Python tensorflow.nn.relu() Examples

The following are 3 code examples of tensorflow.nn.relu(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.nn , or try the search function .
Example #1
Source File: test_mlp_classifier.py    From muffnn with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, hidden_units=(256,), batch_size=64,
                 keep_prob=1.0, activation=nn.relu):
        super(MLPClassifierManyEpochs, self).__init__(
            hidden_units=hidden_units, batch_size=batch_size,
            n_epochs=100, keep_prob=keep_prob,
            activation=activation,
            random_state=42) 
Example #2
Source File: test_mlp_regressor.py    From muffnn with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, hidden_units=(256,), batch_size=64, n_epochs=5,
                 keep_prob=1.0, activation=nn.relu,
                 random_state=None):
        super(MLPRegressorFewerParams, self).__init__(
            hidden_units=hidden_units, batch_size=batch_size,
            n_epochs=n_epochs, keep_prob=keep_prob,
            activation=activation,
            random_state=random_state) 
Example #3
Source File: fractal_block.py    From FractalNet with MIT License 5 votes vote down vote up
def fractal_conv2d(inputs,
                   num_columns,
                   num_outputs,
                   kernel_size,
                   joined=True,
                   stride=1,
                   padding='SAME',
                   # rate=1,
                   activation_fn=nn.relu,
                   normalizer_fn=slim.batch_norm,
                   normalizer_params=None,
                   weights_initializer=initializers.xavier_initializer(),
                   weights_regularizer=None,
                   biases_initializer=None,
                   biases_regularizer=None,
                   reuse=None,
                   variables_collections=None,
                   outputs_collections=None,
                   is_training=True,
                   trainable=True,
                   scope=None):
  """Builds a fractal block with slim.conv2d.
  The fractal will have `num_columns` columns, and have
  Args:
    inputs: a 4-D tensor  `[batch_size, height, width, channels]`.
    num_columns: integer, the columns in the fractal.
  """
  locs = locals()
  fractal_args = ['inputs','num_columns','joined','is_training']
  asc_fn = lambda : slim.arg_scope([slim.conv2d],
                                   **{arg:val for (arg,val) in locs.items()
                                      if arg not in fractal_args})
  return fractal_template(inputs, num_columns, slim.conv2d, asc_fn,
                          joined, is_training, reuse, scope)