Python tensorflow.histogram_summary() Examples

The following are 30 code examples of tensorflow.histogram_summary(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow , or try the search function .
Example #1
Source File: model.py    From web_page_classification with MIT License 6 votes vote down vote up
def _activation_summary(self, x):
        """Helper to create summaries for activations.
        Creates a summary that provides a histogram of activations.
        Creates a summary that measure the sparsity of activations.
        Args:
            x: Tensor
        Returns:
            nothing
        """
        # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
        # session. This helps the clarity of presentation on tensorboard.
        # Error: these summaries cause high classifier error!!!
        # All inputs to node MergeSummary/MergeSummary must be from the same frame.

        # tensor_name = re.sub('%s_[0-9]*/' % "tower", '', x.op.name)
        # tf.histogram_summary(tensor_name + '/activations', x)
        # tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x)) 
Example #2
Source File: model_deploy.py    From shuttleNet with GNU General Public License v3.0 6 votes vote down vote up
def _add_gradients_summaries(grads_and_vars):
  """Add histogram summaries to gradients.

  Note: The summaries are also added to the SUMMARIES collection.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The _list_ of the added summaries for grads_and_vars.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, tf.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(tf.histogram_summary(var.op.name + ':gradient',
                                            grad_values))
      summaries.append(tf.histogram_summary(var.op.name + ':gradient_norm',
                                            tf.global_norm([grad_values])))
    else:
      tf.logging.info('Var %s has no gradient', var.op.name)
  return summaries 
Example #3
Source File: tf_networks.py    From learn_prox_ops with GNU General Public License v3.0 6 votes vote down vote up
def init_summaries(self):
        """
        Initialize summaries for TensorBoard.
        """
        # train
        # for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
        #    tf.histogram_summary(v.name, v, collections=['train'], name='variables')
        tf.summary.scalar('LOSS/batch_train_loss', self.loss, collections=['train'])
        if hasattr(self, 'learning_rate'):
            tf.summary.scalar('learning_rate', self.learning_rate, collections=['train'])

        # test
        for v in tf.get_collection('moving_avgs'):
            tf.summary.histogram(v.name, v, collections=['test'], name='moving_avgs')

        # images
        nb_imgs = 3
        tf.summary.image('data', self.data, max_outputs=nb_imgs, collections=['images'])
        tf.summary.image('output', self.output_clipped, max_outputs=nb_imgs, collections=['images'])
        tf.summary.image('label', self.labels, max_outputs=nb_imgs, collections=['images']) 
Example #4
Source File: tfbasemodel.py    From Supply-demand-forecasting with MIT License 6 votes vote down vote up
def nn_layer_(self,input_tensor, input_dim, output_dim, layer_name, act=tf.nn.relu):
        """Reusable code for making a simple neural net layer.
        It does a matrix multiply, bias add, and then uses relu to nonlinearize.
        It also sets up name scoping so that the resultant graph is easy to read,
        and adds a number of summary ops.
        """
        # Adding a name scope ensures logical grouping of the layers in the graph.
        with tf.name_scope(layer_name):
            with tf.name_scope('weights'):
                weights = self.weight_variable([input_dim, output_dim])
                self.variable_summaries(weights, layer_name + '/weights')
            with tf.name_scope('biases'):
                biases = self.bias_variable([output_dim])
                self.variable_summaries(biases, layer_name + '/biases')
            with tf.name_scope('Wx_plus_b'):
                preactivate = tf.matmul(input_tensor, weights) + biases
                tf.histogram_summary(layer_name + '/pre_activations', preactivate)               
            activations = act(preactivate, 'activation')
            tf.histogram_summary(layer_name + '/activations', activations)
            
        return activations 
Example #5
Source File: neural_network.py    From sentiment-analysis-tensorflow with Apache License 2.0 6 votes vote down vote up
def variable_summaries(var, name):
        """
        Attach a lot of summaries to a Tensor for Tensorboard visualization.
        Ref: https://www.tensorflow.org/versions/r0.11/how_tos/summaries_and_tensorboard/index.html
        :param var: Variable to summarize
        :param name: Summary name
        """
        with tf.name_scope('summaries'):
            mean = tf.reduce_mean(var)
            tf.scalar_summary('mean/' + name, mean)
            with tf.name_scope('stddev'):
                stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
            tf.scalar_summary('stddev/' + name, stddev)
            tf.scalar_summary('max/' + name, tf.reduce_max(var))
            tf.scalar_summary('min/' + name, tf.reduce_min(var))
            tf.histogram_summary(name, var) 
Example #6
Source File: cifar10.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def _activation_summary(x):
  """Helper to create summaries for activations.

  Creates a summary that provides a histogram of activations.
  Creates a summary that measures the sparsity of activations.

  Args:
    x: Tensor
  Returns:
    nothing
  """
  # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
  # session. This helps the clarity of presentation on tensorboard.
  tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
  tf.histogram_summary(tensor_name + '/activations', x)
  tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x)) 
Example #7
Source File: LSPModels.py    From deeppose with GNU General Public License v3.0 6 votes vote down vote up
def _activation_summary(x):
    """Helper to create summaries for activations.
    
    Creates a summary that provides a histogram of activations.
    Creates a summary that measure the sparsity of activations.
    
    Args:
      x: Tensor
    Returns:
      nothing
    """
    # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
    # session. This helps the clarity of presentation on tensorboard.
    tensor_name = re.sub('%s_[0-9]*/' % LSPGlobals.TOWER_NAME, '', x.op.name)
    tf.histogram_summary(tensor_name + '/activations', x)
    tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x)) 
Example #8
Source File: convNet.py    From adascan-public with GNU General Public License v3.0 6 votes vote down vote up
def fc_layer(self, bottom, name):
        with tf.variable_scope(name) as scope:
            shape = bottom.get_shape().as_list()
            dim = 1
            for d in shape[1:]:
                dim *= d
            x = tf.reshape(bottom, [-1, dim])

            with tf.device('/cpu:0'):
                weights = self.get_fc_weight(name)
                biases = self.get_fc_bias(name)

            # Fully connected layer. Note that the '+' operation automatically
            # broadcasts the biases.
            fc = tf.nn.bias_add(tf.matmul(x, weights), biases)
            #tf.histogram_summary('adascan/'+name+'_activations', fc)
            #tf.histogram_summary('adascan/'+name+'_weights', weights)
            scope.reuse_variables()
            return fc 
Example #9
Source File: model_deploy.py    From Action_Recognition_Zoo with MIT License 6 votes vote down vote up
def _add_gradients_summaries(grads_and_vars):
  """Add histogram summaries to gradients.

  Note: The summaries are also added to the SUMMARIES collection.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The _list_ of the added summaries for grads_and_vars.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, tf.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(tf.histogram_summary(var.op.name + ':gradient',
                                            grad_values))
      summaries.append(tf.histogram_summary(var.op.name + ':gradient_norm',
                                            tf.global_norm([grad_values])))
    else:
      tf.logging.info('Var %s has no gradient', var.op.name)
  return summaries 
Example #10
Source File: mnist_with_summary.py    From MachineLearning with Apache License 2.0 6 votes vote down vote up
def nn_layer(input_tensor, input_dim, output_dim, layer_name, act=tf.nn.relu):
  """Reusable code for making a simple neural net layer.
    
  It does a matrix multiply, bias add, and then uses relu to nonlinearize.
  It also sets up name scoping so that the resultant graph is easy to read,
  and adds a number of summary ops.
  """
  # Adding a name scope ensures logical grouping of the layers in the graph.
  with tf.name_scope(layer_name):
    # This Variable will hold the state of the weights for the layer
    with tf.name_scope('weights'):
      weights = weight_variable([input_dim, output_dim])
      variable_summaries(weights, layer_name + '/weights')
    with tf.name_scope('biases'):
      biases = bias_variable([output_dim])
      variable_summaries(biases, layer_name + '/biases')
    with tf.name_scope('Wx_plus_b'):
      preactivate = tf.matmul(input_tensor, weights) + biases
      tf.histogram_summary(layer_name + '/pre_activations', preactivate)
    activations = act(preactivate, 'activation')
    tf.histogram_summary(layer_name + '/activations', activations)
    return activations 
Example #11
Source File: ranknet.py    From tfranknet with GNU General Public License v2.0 6 votes vote down vote up
def _setup_training(self):
        """
        Set up a data flow graph for fine tuning
        """
        layer_num = self.layer_num
        act_func = ACTIVATE_FUNC[self.activate_func]
        sigma = self.sigma
        lr = self.learning_rate
        weights = self.weights
        biases = self.biases
        data1, data2 = self.data1, self.data2
        batch_size = self.batch_size
        optimizer = OPTIMIZER[self.optimizer]
        with tf.name_scope("training"):
            s1 = self._obtain_score(data1, weights, biases, act_func, "1")
            s2 = self._obtain_score(data2, weights, biases, act_func, "2")
            with tf.name_scope("cost"):
                sum_cost = tf.reduce_sum(tf.log(1 + tf.exp(-sigma*(s1-s2))))
                self.cost = cost = sum_cost / batch_size
        self.optimize = optimizer(lr).minimize(cost)

        for n in range(layer_num-1):
            tf.histogram_summary("weight"+str(n), weights[n])
            tf.histogram_summary("bias"+str(n), biases[n])
        tf.scalar_summary("cost", cost) 
Example #12
Source File: cifar10.py    From ml with Apache License 2.0 6 votes vote down vote up
def _activation_summary(x):
  """Helper to create summaries for activations.

  Creates a summary that provides a histogram of activations.
  Creates a summary that measure the sparsity of activations.

  Args:
    x: Tensor
  Returns:
    nothing
  """
  # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
  # session. This helps the clarity of presentation on tensorboard.
  tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
  # tf.histogram_summary(tensor_name + '/activations', x)
  tf.summary.histogram(tensor_name + '/activations', x)
  # tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
  tf.summary.scalar(tensor_name + '/sparsity', tf.nn.zero_fraction(x)) 
Example #13
Source File: cifar10.py    From ml with Apache License 2.0 6 votes vote down vote up
def _activation_summary(x):
  """Helper to create summaries for activations.

  Creates a summary that provides a histogram of activations.
  Creates a summary that measure the sparsity of activations.

  Args:
    x: Tensor
  Returns:
    nothing
  """
  # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
  # session. This helps the clarity of presentation on tensorboard.
  tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
  # tf.histogram_summary(tensor_name + '/activations', x)
  tf.summary.histogram(tensor_name + '/activations', x)
  # tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
  tf.summary.scalar(tensor_name + '/sparsity', tf.nn.zero_fraction(x)) 
Example #14
Source File: trainer.py    From StackGAN with MIT License 6 votes vote down vote up
def define_summaries(self):
        '''Helper function for init_opt'''
        all_sum = {'g': [], 'd': [], 'hr_g': [], 'hr_d': [], 'hist': []}
        for k, v in self.log_vars:
            if k.startswith('g'):
                all_sum['g'].append(tf.scalar_summary(k, v))
            elif k.startswith('d'):
                all_sum['d'].append(tf.scalar_summary(k, v))
            elif k.startswith('hr_g'):
                all_sum['hr_g'].append(tf.scalar_summary(k, v))
            elif k.startswith('hr_d'):
                all_sum['hr_d'].append(tf.scalar_summary(k, v))
            elif k.startswith('hist'):
                all_sum['hist'].append(tf.histogram_summary(k, v))

        self.g_sum = tf.merge_summary(all_sum['g'])
        self.d_sum = tf.merge_summary(all_sum['d'])
        self.hr_g_sum = tf.merge_summary(all_sum['hr_g'])
        self.hr_d_sum = tf.merge_summary(all_sum['hr_d'])
        self.hist_sum = tf.merge_summary(all_sum['hist']) 
Example #15
Source File: clock_model.py    From deep-time-reading with MIT License 6 votes vote down vote up
def _activation_summary(x):
    """Helper to create summaries for activations.

    Creates a summary that provides a histogram of activations.
    Creates a summary that measure the sparsity of activations.

    Args:
      x: Tensor
    Returns:
      nothing
    """
    # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
    # session. This helps the clarity of presentation on tensorboard.
    tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
    tf.histogram_summary(tensor_name + '/activations', x)
    tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x)) 
Example #16
Source File: model_deploy.py    From ECO-pytorch with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _add_gradients_summaries(grads_and_vars):
  """Add histogram summaries to gradients.

  Note: The summaries are also added to the SUMMARIES collection.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The _list_ of the added summaries for grads_and_vars.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, tf.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(tf.histogram_summary(var.op.name + ':gradient',
                                            grad_values))
      summaries.append(tf.histogram_summary(var.op.name + ':gradient_norm',
                                            tf.global_norm([grad_values])))
    else:
      tf.logging.info('Var %s has no gradient', var.op.name)
  return summaries 
Example #17
Source File: deeplab_v3plus_model.py    From SketchySceneColorization with MIT License 5 votes vote down vote up
def _decay(self):
        """L2 weight decay loss."""
        costs = []
        for var in tf.trainable_variables():
            if var.op.name.find(r'weights') > 0:
                costs.append(tf.nn.l2_loss(var))
                # tf.histogram_summary(var.op.name, var)

        return tf.multiply(self.weight_decay_rate, tf.add_n(costs)) 
Example #18
Source File: train_image_classifier.py    From Action_Recognition_Zoo with MIT License 5 votes vote down vote up
def _add_variables_summaries(learning_rate):
  summaries = []
  for variable in slim.get_model_variables():
    summaries.append(tf.histogram_summary(variable.op.name, variable))
  summaries.append(tf.scalar_summary('training/Learning Rate', learning_rate))
  return summaries 
Example #19
Source File: resnet_model_basic.py    From DualLearning with MIT License 5 votes vote down vote up
def GetWeightDecay(self):
    """L2 weight decay loss."""
    costs = []
    for var in self.trainable_variables:
      if var.op.name.find(r'DW') > 0:
        costs.append(tf.nn.l2_loss(var))
        # tf.histogram_summary(var.op.name, var)

    return tf.mul(self.hps.weight_decay_rate, tf.add_n(costs)) 
Example #20
Source File: resnet_model.py    From Action_Recognition_Zoo with MIT License 5 votes vote down vote up
def _decay(self):
    """L2 weight decay loss."""
    costs = []
    for var in tf.trainable_variables():
      if var.op.name.find(r'DW') > 0:
        costs.append(tf.nn.l2_loss(var))
        # tf.histogram_summary(var.op.name, var)

    return tf.mul(self.hps.weight_decay_rate, tf.add_n(costs)) 
Example #21
Source File: model.py    From personalized-dialog with MIT License 5 votes vote down vote up
def _init_summaries(self):
        self.accuracy = tf.placeholder_with_default(0.0, shape=(), name='Accuracy')
        self.accuracy_summary = tf.scalar_summary('Accuracy summary', self.accuracy)

        self.f_pos_summary = tf.histogram_summary('f_pos', self.f_pos)
        self.f_neg_summary = tf.histogram_summary('f_neg', self.f_neg)

        self.loss_summary = tf.scalar_summary('Mini-batch loss', self.loss)
        self.summary_op = tf.merge_summary(
            [
                self.f_pos_summary,
                self.f_neg_summary,
                self.loss_summary
            ]
        ) 
Example #22
Source File: resnet_model_basic.py    From DualLearning with MIT License 5 votes vote down vote up
def _decay(self):
    """L2 weight decay loss."""
    costs = []
    for var in tf.trainable_variables():
      if var.op.name.find(r'DW') > 0:
        costs.append(tf.nn.l2_loss(var))
        # tf.histogram_summary(var.op.name, var)

    return tf.multiply(self.hps.weight_decay_rate, tf.add_n(costs)) 
Example #23
Source File: segnet_model.py    From SketchySceneColorization with MIT License 5 votes vote down vote up
def _decay(self):
        """L2 weight decay loss."""
        costs = []
        for var in tf.trainable_variables():
            if var.op.name.find(r'DW') > 0:
                costs.append(tf.nn.l2_loss(var))
                # tf.histogram_summary(var.op.name, var)

        return tf.multiply(self.weight_decay_rate, tf.add_n(costs)) 
Example #24
Source File: fcn8s_model.py    From SketchySceneColorization with MIT License 5 votes vote down vote up
def _decay(self):
        """L2 weight decay loss."""
        costs = []
        for var in tf.trainable_variables():
            if var.op.name.find(r'DW') > 0:
                costs.append(tf.nn.l2_loss(var))
                # tf.histogram_summary(var.op.name, var)

        return tf.multiply(self.weight_decay_rate, tf.add_n(costs)) 
Example #25
Source File: deeplab_model.py    From SketchySceneColorization with MIT License 5 votes vote down vote up
def _decay(self):
        """L2 weight decay loss."""
        costs = []
        for var in tf.trainable_variables():
            if var.op.name.find(r'DW') > 0:
                costs.append(tf.nn.l2_loss(var))
                # tf.histogram_summary(var.op.name, var)

        return tf.multiply(self.weight_decay_rate, tf.add_n(costs)) 
Example #26
Source File: TensorflowUtils_plus.py    From AutoPortraitMatting with Apache License 2.0 5 votes vote down vote up
def add_activation_summary(var):
    if var is not None:
        tf.histogram_summary(var.op.name + "/activation", var)
        tf.scalar_summary(var.op.name + "/sparsity", tf.nn.zero_fraction(var)) 
Example #27
Source File: retrain.py    From Tensorflow_Object_Tracking_Video with MIT License 5 votes vote down vote up
def variable_summaries(var, name):
  """Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
  with tf.name_scope('summaries'):
    mean = tf.reduce_mean(var)
    scalar_summary('mean/' + name, mean)
    with tf.name_scope('stddev'):
      stddev = tf.sqrt(tf.reduce_sum(tf.square(var - mean)))
    scalar_summary('sttdev/' + name, stddev)
    scalar_summary('max/' + name, tf.reduce_max(var))
    scalar_summary('min/' + name, tf.reduce_min(var))
    if(int(tf.__version__.split(".")[0])<1): ### For tf v<1.0
      tf.histogram_summary(name, var)
    else: ### For tf v>=1.0 
      tf.summary.histogram(name, var) 
Example #28
Source File: q_network.py    From agent-trainer with MIT License 5 votes vote down vote up
def _activation_summary(self, tensor):
        tensor_name = tensor.op.name
        tf.histogram_summary(tensor_name + '/activations', tensor)
        tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(tensor)) 
Example #29
Source File: utils.py    From WassersteinGAN.tensorflow with MIT License 5 votes vote down vote up
def add_gradient_summary(grad, var):
    if grad is not None:
        tf.histogram_summary(var.op.name + "/gradient", grad) 
Example #30
Source File: utils.py    From WassersteinGAN.tensorflow with MIT License 5 votes vote down vote up
def add_activation_summary(var):
    tf.histogram_summary(var.op.name + "/activation", var)
    tf.scalar_summary(var.op.name + "/sparsity", tf.nn.zero_fraction(var))