Python tensorflow.contrib.cudnn_rnn.python.ops.cudnn_rnn_ops.CudnnGRU() Examples
The following are 9
code examples of tensorflow.contrib.cudnn_rnn.python.ops.cudnn_rnn_ops.CudnnGRU().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.contrib.cudnn_rnn.python.ops.cudnn_rnn_ops
, or try the search function
.
Example #1
Source File: test_rnn.py From MMdnn with MIT License | 5 votes |
def create_symbol(X, num_classes=0, is_training=False, CUDNN=False, maxf=30000, edim=125, nhid=100, batchs=64): word_vectors = tf.contrib.layers.embed_sequence(X, vocab_size=maxf, embed_dim=edim) word_list = tf.unstack(word_vectors, axis=1) if not CUDNN: cell1 = tf.contrib.rnn.LSTMCell(nhid) cell2 = tf.contrib.rnn.GRUCell(nhid) stacked_cell = tf.nn.rnn_cell.MultiRNNCell([cell1, cell2]) outputs, states = tf.nn.static_rnn(stacked_cell, word_list, dtype=tf.float32) logits = tf.layers.dense(outputs[-1], 2, activation=None, name='output') else: # Using cuDNN since vanilla RNN from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops cudnn_cell = cudnn_rnn_ops.CudnnGRU(num_layers=1, num_units=nhid, input_size=edim, input_mode='linear_input') params_size_t = cudnn_cell.params_size() params = tf.Variable(tf.random_uniform([params_size_t], -0.1, 0.1), validate_shape=False) input_h = tf.Variable(tf.zeros([1, batchs, nhid])) outputs, states = cudnn_cell(input_data=word_list, input_h=input_h, params=params) logits = tf.layers.dense(outputs[-1], 2, activation=None, name='output') return logits, logits
Example #2
Source File: cudnn_recurrent.py From GraphicDesignPatternByPython with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True
Example #3
Source File: cudnn_recurrent.py From DeepLearning_Wavelet-LSTM with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True
Example #4
Source File: cudnn_recurrent.py From DeepLearning_Wavelet-LSTM with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True
Example #5
Source File: cudnn_recurrent.py From DeepLearning_Wavelet-LSTM with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True
Example #6
Source File: cudnn_recurrent.py From DeepLearning_Wavelet-LSTM with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True
Example #7
Source File: cudnn_recurrent.py From DeepLearning_Wavelet-LSTM with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True
Example #8
Source File: cudnn_recurrent.py From DeepLearning_Wavelet-LSTM with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True
Example #9
Source File: cudnn_recurrent.py From DeepLearning_Wavelet-LSTM with MIT License | 4 votes |
def build(self, input_shape): super(CuDNNGRU, self).build(input_shape) if isinstance(input_shape, list): input_shape = input_shape[0] input_dim = input_shape[-1] from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops self._cudnn_gru = cudnn_rnn_ops.CudnnGRU( num_layers=1, num_units=self.units, input_size=input_dim, input_mode='linear_input') self.kernel = self.add_weight(shape=(input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) self.bias = self.add_weight(shape=(self.units * 6,), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.kernel_z = self.kernel[:, :self.units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units: self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] self.bias_z_i = self.bias[:self.units] self.bias_r_i = self.bias[self.units: self.units * 2] self.bias_h_i = self.bias[self.units * 2: self.units * 3] self.bias_z = self.bias[self.units * 3: self.units * 4] self.bias_r = self.bias[self.units * 4: self.units * 5] self.bias_h = self.bias[self.units * 5:] self.built = True