Python tensorflow.python.ops.rnn_cell.MultiRNNCell() Examples

The following are 10 code examples of tensorflow.python.ops.rnn_cell.MultiRNNCell(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.ops.rnn_cell , or try the search function .
Example #1
Source File: dynamic_rnn_estimator.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _get_rnn_cell(cell_type, num_units, num_layers):
  """Constructs and return an `RNNCell`.

  Args:
    cell_type: either a string identifying the `RNNCell` type, or a subclass of
      `RNNCell`.
    num_units: the number of units in the `RNNCell`.
    num_layers: the number of layers in the RNN.
  Returns:
    An initialized `RNNCell`.
  Raises:
    ValueError: `cell_type` is an invalid `RNNCell` name.
    TypeError: `cell_type` is not a string or a subclass of `RNNCell`.
  """
  if isinstance(cell_type, str):
    cell_type = _CELL_TYPES.get(cell_type)
    if cell_type is None:
      raise ValueError('The supported cell types are {}; got {}'.format(
          list(_CELL_TYPES.keys()), cell_type))
  if not issubclass(cell_type, rnn_cell.RNNCell):
    raise TypeError(
        'cell_type must be a subclass of RNNCell or one of {}.'.format(
            list(_CELL_TYPES.keys())))
  cell = cell_type(num_units=num_units)
  if num_layers > 1:
    cell = rnn_cell.MultiRNNCell(
        [cell] * num_layers, state_is_tuple=True)
  return cell 
Example #2
Source File: model.py    From chatbot-rnn with MIT License 5 votes vote down vote up
def save_variables_list(self):
        # Return a list of the trainable variables created within the rnnlm model.
        # This consists of the two projection softmax variables (softmax_w and softmax_b),
        # embedding, and all of the weights and biases in the MultiRNNCell model.
        # Save only the trainable variables and the placeholders needed to resume training;
        # discard the rest, including optimizer state.
        save_vars = set(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='rnnlm'))
        save_vars.update({self.lr, self.global_epoch_fraction, self.global_seconds_elapsed})
        return list(save_vars) 
Example #3
Source File: rnn.py    From rnnprop with MIT License 5 votes vote down vote up
def _build_pre(self):
        self.dimA = 20
        self.cellA = MultiRNNCell([LSTMCell(self.dimA)] * 2)
        self.b1 = 0.95
        self.b2 = 0.95
        self.lr = 0.1
        self.eps = 1e-8 
Example #4
Source File: deepmind.py    From rnnprop with MIT License 5 votes vote down vote up
def _build_pre(self):
        self.dimH = 20
        self.cellH = MultiRNNCell([LSTMCell(self.dimH)] * 2)
        self.lr = 0.1 
Example #5
Source File: seq2seq_model.py    From AmusingPythonCodes with MIT License 5 votes vote down vote up
def _create_encoder_cell(self):
        return MultiRNNCell([self._create_rnn_cell() for _ in range(self.cfg.num_layers)]) 
Example #6
Source File: seq2seq_model.py    From AmusingPythonCodes with MIT License 5 votes vote down vote up
def _create_decoder_cell(self):
        enc_outputs, enc_states, enc_seq_len = self.enc_outputs, self.enc_states, self.enc_seq_len
        batch_size = self.batch_size * self.cfg.beam_size if self.use_beam_search else self.batch_size
        with tf.variable_scope("attention"):
            if self.cfg.attention == "luong":  # Luong attention mechanism
                attention_mechanism = LuongAttention(num_units=self.cfg.num_units, memory=enc_outputs,
                                                     memory_sequence_length=enc_seq_len)
            else:  # default using Bahdanau attention mechanism
                attention_mechanism = BahdanauAttention(num_units=self.cfg.num_units, memory=enc_outputs,
                                                        memory_sequence_length=enc_seq_len)

        def cell_input_fn(inputs, attention):  # define cell input function to keep input/output dimension same
            # reference: https://www.tensorflow.org/api_docs/python/tf/contrib/seq2seq/AttentionWrapper
            if not self.cfg.use_attention_input_feeding:
                return inputs
            input_project = tf.layers.Dense(self.cfg.num_units, dtype=tf.float32, name='attn_input_feeding')
            return input_project(tf.concat([inputs, attention], axis=-1))

        if self.cfg.top_attention:  # apply attention mechanism only on the top decoder layer
            cells = [self._create_rnn_cell() for _ in range(self.cfg.num_layers)]
            cells[-1] = AttentionWrapper(cells[-1], attention_mechanism=attention_mechanism, name="Attention_Wrapper",
                                         attention_layer_size=self.cfg.num_units, initial_cell_state=enc_states[-1],
                                         cell_input_fn=cell_input_fn)
            initial_state = [state for state in enc_states]
            initial_state[-1] = cells[-1].zero_state(batch_size=batch_size, dtype=tf.float32)
            dec_init_states = tuple(initial_state)
            cells = MultiRNNCell(cells)
        else:
            cells = MultiRNNCell([self._create_rnn_cell() for _ in range(self.cfg.num_layers)])
            cells = AttentionWrapper(cells, attention_mechanism=attention_mechanism, name="Attention_Wrapper",
                                     attention_layer_size=self.cfg.num_units, initial_cell_state=enc_states,
                                     cell_input_fn=cell_input_fn)
            dec_init_states = cells.zero_state(batch_size=batch_size, dtype=tf.float32).clone(cell_state=enc_states)
        return cells, dec_init_states 
Example #7
Source File: blstm_cnn_crf_model.py    From neural_sequence_labeling with MIT License 5 votes vote down vote up
def _create_rnn_cell(self):
        if self.cfg["num_layers"] is None or self.cfg["num_layers"] <= 1:
            return self._create_single_rnn_cell(self.cfg["num_units"])
        else:
            if self.cfg["use_stack_rnn"]:
                return [self._create_single_rnn_cell(self.cfg["num_units"]) for _ in range(self.cfg["num_layers"])]
            else:
                return MultiRNNCell([self._create_single_rnn_cell(self.cfg["num_units"])
                                     for _ in range(self.cfg["num_layers"])]) 
Example #8
Source File: model.py    From ATRank with Apache License 2.0 5 votes vote down vote up
def build_cell(hidden_units, depth=1):
  cell_list = [build_single_cell(hidden_units) for i in range(depth)]
  return MultiRNNCell(cell_list) 
Example #9
Source File: model.py    From ATRank with Apache License 2.0 5 votes vote down vote up
def build_cell(hidden_units, depth=1):
  cell_list = [build_single_cell(hidden_units) for i in range(depth)]
  return MultiRNNCell(cell_list) 
Example #10
Source File: model.py    From ATRank with Apache License 2.0 5 votes vote down vote up
def build_cell(hidden_units, depth=1):
    cell_list = [build_single_cell(hidden_units) for i in range(depth)]
    return MultiRNNCell(cell_list)
    user_count, item_count, cate_count = pickle.load(f)