cell = multilayer_dropout_cell(
RNN_CELL_DICT[cell_type],
hidden_size, n_layers, rnn_dropout)

if bidirectional:
cell_bw = multilayer_dropout_cell(
RNN_CELL_DICT[cell_type],
hidden_size, n_layers, rnn_dropout)
else:
cell_bw = None

# Add attention.
if attn_inputs is not None:
cell = tf_utils.SeqAttentionCellWrapper(
cell, attn_inputs=attn_inputs,
attn_size=attn_size, attn_vec_size=attn_vec_size,
output_size=hidden_size, attn_masks=attn_masks)
if bidirectional:
cell_bw = tf_utils.SeqAttentionCellWrapper(
cell_bw, attn_inputs=attn_inputs,
attn_size=attn_size, attn_vec_size=attn_vec_size,
output_size=hidden_size, attn_masks=attn_masks)

if initial_state is None:
# Create zero state.
zero_state = cell.zero_state(batch_size, tf.float32)

if bidirectional:
zero_state_bw = cell_bw.zero_state(batch_size, tf.float32)
zero_state = (zero_state, zero_state_bw)

initial_state = zero_state

# Create RNN.
outputs, final_state = create_rnn(
cell, initial_state, embedded_seq_inputs.tensor,
embedded_seq_inputs.sequence_length,
hidden_size=hidden_size,
bidirectional=bidirectional, cell_bw=cell_bw)
rnn_outputs = outputs

SeqAttentionCellWrapper 和 AttentionCellWrapper 类似,