# vector [batch_size, hidden_dim]
# labels [batch_size, 1]
# logits [batch_size, 1]

logits = tf.reduce_mean(vector1 * vector2 * vector3, axis=-1, keep_dims=True) \
+ tf.reduce_mean(vector1 + vector2 + vector3, axis=-1, keep_dims=True)
# + tf.reduce_mean(vector1 + vector2 * vector3, axis=-1, keep_dims=True) \ # 实际效果不好
# + tf.reduce_mean(vector1 * vector2 + vector3, axis=-1, keep_dims=True) \ # 实际效果不好
# + tf.reduce_mean(vector1 * vector3 + vector2, axis=-1, keep_dims=True) # 实际效果不好

loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=labels,
logits=logits))

其实只能是加法和乘法,减法等于加法。