Python keras.layers.RNN Examples

The following are 5 code examples of keras.layers.RNN(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module keras.layers , or try the search function .
Example #1
Source File: Recurrent.py    From dts with MIT License 6 votes vote down vote up
def __init__(self, layers, cell_type, cell_params):
        """
        Build the rnn with the given number of layers.
        :param layers: list
            list of integers. The i-th element of the list is the number of hidden neurons for the i-th layer.
        :param cell_type: 'gru', 'rnn', 'lstm'
        :param cell_params: dict
            A dictionary containing all the paramters for the RNN cell.
            see keras.layers.LSTMCell, keras.layers.GRUCell or keras.layers.SimpleRNNCell for more details.
        """
        # init params
        self.model = None
        self.horizon = None
        self.layers = layers
        self.cell_params = cell_params
        if cell_type == 'lstm':
            self.cell = LSTMCell
        elif cell_type == 'gru':
            self.cell = GRUCell
        elif cell_type == 'rnn':
            self.cell = SimpleRNNCell
        else:
            raise NotImplementedError('{0} is not a valid cell type.'.format(cell_type))
        # Build deep rnn
        self.rnn = self._build_rnn() 
Example #2
Source File: Recurrent.py    From dts with MIT License 5 votes vote down vote up
def _build_rnn(self):
        cells = []
        for _ in range(self.layers):
            cells.append(self.cell(**self.cell_params))
        deep_rnn = RNN(cells, return_sequences=False, return_state=False)
        return deep_rnn 
Example #3
Source File: Seq2Seq.py    From dts with MIT License 5 votes vote down vote up
def __init__(self,
                 encoder_layers,
                 decoder_layers,
                 output_sequence_length,
                 dropout=0.0,
                 l2=0.01,
                 cell_type='lstm'):
        """
        :param encoder_layers: list
            encoder (RNN) architecture: [n_hidden_units_1st_layer, n_hidden_units_2nd_layer, ...]
        :param decoder_layers: list
            decoder (RNN) architecture: [n_hidden_units_1st_layer, n_hidden_units_2nd_layer, ...]
        :param output_sequence_length: int
            number of timestep to be predicted.
        :param cell_type: str
            gru or lstm.
        """
        self.encoder_layers = encoder_layers
        self.decoder_layers = decoder_layers
        self.output_sequence_length = output_sequence_length
        self.dropout = dropout
        self.l2 = l2
        if cell_type == 'lstm':
            self.cell = LSTMCell
        elif cell_type == 'gru':
            self.cell = GRUCell
        else:
            raise ValueError('{0} is not a valid cell type. Choose between gru and lstm.'.format(cell_type)) 
Example #4
Source File: Seq2Seq.py    From dts with MIT License 5 votes vote down vote up
def _build_encoder(self):
        """
        Build the encoder multilayer RNN (stacked RNN)
        """
        # Create a list of RNN Cells, these get stacked one after the other in the RNN,
        # implementing an efficient stacked RNN
        encoder_cells = []
        for n_hidden_neurons in self.encoder_layers:
            encoder_cells.append(self.cell(units=n_hidden_neurons,
                                           dropout=self.dropout,
                                           kernel_regularizer=l2(self.l2),
                                           recurrent_regularizer=l2(self.l2)))

        self.encoder = RNN(encoder_cells, return_state=True, name='encoder') 
Example #5
Source File: Seq2Seq.py    From dts with MIT License 5 votes vote down vote up
def _build_decoder(self):
        decoder_cells = []
        for n_hidden_neurons in self.decoder_layers:
            decoder_cells.append(self.cell(units=n_hidden_neurons,
                                           dropout=self.dropout,
                                           kernel_regularizer=l2(self.l2),
                                           recurrent_regularizer=l2(self.l2)
                                           ))
        # return output for EACH timestamp
        self.decoder = RNN(decoder_cells, return_sequences=True, return_state=True, name='decoder')