好难的一题。。。
#include <iostream>
#include <queue>
#include <stack>
#include <map>
#include <set>
#include <bitset>
#include <cstdio>
#include <algorithm>
#include <cstring>
#include <climits>
#include <cstdlib>
#include <cmath>
#include <time.h>
#define maxn 100005
#define maxm 100005
#define eps 1e-10
#define mod 998244353
#define INF 999999999
#define lowbit(x) (x&(-x))
#define ls o<<1
#define rs o<<1 | 1
#define lson o->ch[0], L, mid
#define rson o->ch[1], mid+1, R
typedef long long LL;
//typedef int LL;
using namespace std;
LL powmod(LL a, LL b) { LL res = 1, base = a; while(b) { if(b%2) res = res * base % mod; base = base * base % mod; b /= 2; } return res; }
LL f[maxn], g[maxn];
int cnt[maxn], pcnt, n;
LL dp[15][maxn];
void handle(void)
{
f[0] = 1;
for(int i = 1; i <= 100000; i++) f[i] = f[i-1] * i % mod;
g[100000] = powmod(f[100000], mod-2);
for(int i = 99999; i >= 0; i--) g[i] = g[i+1] * (i+1) % mod;
}
void init(void)
{
memset(dp, 0, sizeof dp);
memset(cnt, 0, sizeof cnt);
}
void scanf(int &x)
{
x = 0;
char ch = getchar();
while(ch == ' ' || ch == '\n') ch = getchar();
while(ch >= '0' && ch <= '9') x = x*10 + ch - '0', ch = getchar();
}
void read(void)
{
int x;
for(int i = 1; i <= n; i++) {
scanf(x);
cnt[x]++;
}
pcnt = 0;
for(int i = 0; i <= 11; i++) pcnt += cnt[1 << i];
}
void work(int _)
{
int ct = cnt[1];
LL last = f[cnt[1]], ans;
for(int j = 0; j <= ct; j++) {
dp[0][j>>1] = (dp[0][j>>1] + g[ct-j] * g[j]) % mod;
}
for(int i = 0; i < 11; i++) {
ct = cnt[1<<(i + 1)];
last = last * f[ct] % mod;
for(int j = 0; j < (1 << (10 - i)); j++)
if(dp[i][j])
for(int k = 0; k < (1 << (10 - i)); k++) {
if(k > ct || (j + k) > (1 << (10 - i))) break;
dp[i+1][(j+k)>>1] = (dp[i+1][(j+k)>>1] + dp[i][j] * g[ct - k] % mod * g[k] % mod) % mod;
}
}
ans = (powmod(2, pcnt) - dp[11][0] * last % mod) % mod * powmod(2, n - pcnt) % mod;
if(ans < 0) ans += mod;
printf("Case #%d: %I64d\n", _, ans);
}
int main(void)
{
handle();
int _ = 0;
while(scanf("%d", &n), n != 0) {
init();
read();
work(++_);
}
return 0;
}