Python tensorflow.summary() Examples

The following are 30 code examples of tensorflow.summary(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow , or try the search function .
Example #1
Source File: tf_summary_test.py    From tensorboard with Apache License 2.0 6 votes vote down vote up
def test_tf_summary_export(self):
        # Ensure that TF wasn't already imported, since we want this test to cover
        # the entire flow of "import tensorflow; use tf.summary" and if TF was in
        # fact already imported that reduces the comprehensiveness of the test.
        # This means this test has to be kept in its own file and that no other
        # test methods in this file should import tensorflow.
        self.assertEqual("notfound", sys.modules.get("tensorflow", "notfound"))
        import tensorflow as tf

        if not tf.__version__.startswith("2."):
            if hasattr(tf, "compat") and hasattr(tf.compat, "v2"):
                tf = tf.compat.v2
            else:
                self.skipTest("TF v2 summary API not available")
        # Check that tf.summary contains both TB-provided and TF-provided symbols.
        expected_symbols = frozenset(
            ["scalar", "image", "audio", "histogram", "text"]
            + ["write", "create_file_writer", "SummaryWriter"]
        )
        self.assertLessEqual(expected_symbols, frozenset(dir(tf.summary)))
        # Ensure we can dereference symbols as well.
        print(tf.summary.scalar)
        print(tf.summary.write) 
Example #2
Source File: kerascallback.py    From delve with MIT License 6 votes vote down vote up
def on_epoch_begin(self, epoch, logs=None):
        """Add user-def. op to Model eval_function callbacks, reset batch count."""

        # check if histogram summary should be run for this epoch
        if self.user_defined_freq and epoch % self.user_defined_freq == 0:
            self._epoch = epoch
            # pylint: disable=protected-access
            # add the user-defined summary ops if it should run this epoch
            self.model._make_eval_function()
            if self.merged not in self.model._eval_function.fetches:
                self.model._eval_function.fetches.append(self.merged)
                self.model._eval_function.fetch_callbacks[
                    self.merged] = self._fetch_callback
            # pylint: enable=protected-access

        super(CustomTensorBoard, self).on_epoch_begin(epoch, logs=None) 
Example #3
Source File: component.py    From rlgraph with Apache License 2.0 6 votes vote down vote up
def propagate_summary(self, summary_key):
        """
        Propagates a single summary op of this Component to its parents' summaries registries.

        Args:
            summary_key (str): The lookup key for the summary to propagate.
        """
        # Return if there is no parent.
        if self.parent_component is None:
            return

        # If already there -> Error.
        if summary_key in self.parent_component.summaries:
            raise RLGraphError("ERROR: Summary registry of '{}' already has a summary under key '{}'!".
                               format(self.parent_component.name, summary_key))
        self.parent_component.summaries[summary_key] = self.summaries[summary_key]

        # Recurse up the container hierarchy.
        self.parent_component.propagate_summary(summary_key) 
Example #4
Source File: layers.py    From PADME with MIT License 6 votes vote down vote up
def set_summary(self, summary_op, summary_description=None, collections=None):
    """Annotates a tensor with a tf.summary operation

    This causes self.out_tensor to be logged to Tensorboard.

    Parameters
    ----------
    summary_op: str
      summary operation to annotate node
    summary_description: object, optional
      Optional summary_pb2.SummaryDescription()
    collections: list of graph collections keys, optional
      New summary op is added to these collections. Defaults to [GraphKeys.SUMMARIES]
    """
    supported_ops = {'tensor_summary', 'scalar', 'histogram'}
    if summary_op not in supported_ops:
      raise ValueError(
          "Invalid summary_op arg. Only 'tensor_summary', 'scalar', 'histogram' supported"
      )
    self.summary_op = summary_op
    self.summary_description = summary_description
    self.collections = collections
    self.tensorboard = True 
Example #5
Source File: layers.py    From PADME with MIT License 6 votes vote down vote up
def add_summary_to_tg(self, tb_input=None):
    """
    Create the summary operation for this layer, if set_summary() has been called on it.
    Can only be called after self.create_layer to gaurentee that name is not None.

    Parameters
    ----------
    tb_input: tensor
      the tensor to log to Tensorboard. If None, self.out_tensor is used.
    """
    if self.tensorboard == False:
      return
    if tb_input == None:
      tb_input = self.out_tensor
    if self.summary_op == "tensor_summary":
      tf.summary.tensor_summary(self.name, tb_input, self.summary_description, 
                                self.collections)
    elif self.summary_op == 'scalar':
      tf.summary.scalar(self.name, tb_input, self.collections)
    elif self.summary_op == 'histogram':
      tf.summary.histogram(self.name, tb_input, self.collections) 
Example #6
Source File: transformer_vae.py    From BERT with Apache License 2.0 6 votes vote down vote up
def estimator_spec_eval(self, features, logits, labels, loss, losses_dict):
    """Constructs `tf.estimator.EstimatorSpec` for EVAL (evaluation) mode."""
    estimator_spec = super(TransformerAE, self).estimator_spec_eval(
        features, logits, labels, loss, losses_dict)
    if common_layers.is_xla_compiled():
      # For TPUs (and XLA more broadly?), do not add summary hooks that depend
      # on losses; they are not supported.
      return estimator_spec

    summary_op = tf.get_collection(tf.GraphKeys.SUMMARIES, scope="losses")
    summary_op.extend(tf.get_collection(tf.GraphKeys.SUMMARIES, scope="loss"))
    summary_op.append(tf.summary.scalar("loss", loss))
    summary_saver_hook = tf.train.SummarySaverHook(
        save_steps=100,
        summary_op=summary_op,
        output_dir=os.path.join(self.hparams.model_dir, "eval"))

    hooks = list(estimator_spec.evaluation_hooks)
    hooks.append(summary_saver_hook)
    return estimator_spec._replace(evaluation_hooks=hooks) 
Example #7
Source File: summary_util.py    From guildai with Apache License 2.0 6 votes vote down vote up
def _try_listen_tf_v1(self):
        if not _tf_version().startswith("1."):
            raise util.TryFailed()
        try:
            # pylint: disable=import-error,no-name-in-module
            from tensorflow.compat.v1.summary import FileWriter
        except Exception as e:
            self.log.debug(
                "error importing tensorflow.compat.v1.summary.FileWriter: %s", e
            )
            raise util.TryFailed()
        else:
            self.log.debug(
                "wrapping tensorflow.compat.v1.summary.FileWriter.add_summary"
            )
            python_util.listen_method(FileWriter, "add_summary", self._handle_summary) 
Example #8
Source File: t2t_model.py    From BERT with Apache License 2.0 6 votes vote down vote up
def summarize_features(features, num_shards=1):
  """Generate summaries for features."""
  if not common_layers.should_generate_summaries():
    return

  with tf.name_scope("input_stats"):
    for (k, v) in sorted(six.iteritems(features)):
      if (isinstance(v, tf.Tensor) and (v.get_shape().ndims > 1) and
          (v.dtype != tf.string)):
        tf.summary.scalar("%s_batch" % k, tf.shape(v)[0] // num_shards)
        tf.summary.scalar("%s_length" % k, tf.shape(v)[1])
        nonpadding = tf.to_float(tf.not_equal(v, 0))
        nonpadding_tokens = tf.reduce_sum(nonpadding)
        tf.summary.scalar("%s_nonpadding_tokens" % k, nonpadding_tokens)
        tf.summary.scalar("%s_nonpadding_fraction" % k,
                          tf.reduce_mean(nonpadding)) 
Example #9
Source File: base_algorithm.py    From ULTRA with Apache License 2.0 5 votes vote down vote up
def step(self, session, input_feed, forward_only):
        """Run a step of the model feeding the given inputs.

        Args:
            session: (tf.Session) tensorflow session to use.
            input_feed: (dictionary) A dictionary containing all the input feed data.
            forward_only: whether to do the backward step (False) or only forward (True).

        Returns:
            A triple consisting of the loss, outputs (None if we do backward),
            and a tf.summary containing related information about the step.

        """
        pass 
Example #10
Source File: model.py    From vae-seq with Apache License 2.0 5 votes vote down vote up
def _make_output_summary(self, tag, observed):
        """Returns a tf.summary to display this sequence..""" 
Example #11
Source File: profiling.py    From seed_rl with Apache License 2.0 5 votes vote down vote up
def __exit__(self, exc_type, exc_value, traceback):
    self.elapsed_s = time.time() - self.start_time_s
    aggregator = self.aggregators[self.summary_name]
    aggregator.add(self.elapsed_s)
    if aggregator.count >= self.aggregation_window_size:
      tf.summary.scalar(self.summary_name, aggregator.average())
      aggregator.reset() 
Example #12
Source File: utils.py    From dynamic-training-bench with Mozilla Public License 2.0 5 votes vote down vote up
def tf_log(summary, collection=SCALAR_SUMMARIES):
    """Add tf.summary object to collection named collection"""
    tf.add_to_collection(collection, summary) 
Example #13
Source File: regression_EM.py    From ULTRA with Apache License 2.0 5 votes vote down vote up
def step(self, session, input_feed, forward_only):
        """Run a step of the model feeding the given inputs.

        Args:
            session: (tf.Session) tensorflow session to use.
            input_feed: (dictionary) A dictionary containing all the input feed data.
            forward_only: whether to do the backward step (False) or only forward (True).

        Returns:
            A triple consisting of the loss, outputs (None if we do backward),
            and a tf.summary containing related information about the step.

        """

        # Output feed: depends on whether we do a backward step or not.
        if not forward_only:
            input_feed[self.is_training.name] = True
            output_feed = [
                self.updates,    # Update Op that does SGD.
                self.loss,    # Loss for this batch.
                self.update_propensity_op,
                self.train_summary  # Summarize statistics.
            ]
        else:
            input_feed[self.is_training.name] = False
            output_feed = [
                self.eval_summary,  # Summarize statistics.
                self.output   # Model outputs
            ]

        outputs = session.run(output_feed, input_feed)
        if not forward_only:
            # loss, no outputs, summary.
            return outputs[1], None, outputs[-1]
        else:
            return None, outputs[1], outputs[0]    # loss, outputs, summary. 
Example #14
Source File: nsgd.py    From ULTRA with Apache License 2.0 5 votes vote down vote up
def step(self, session, input_feed, forward_only):
        """Run a step of the model feeding the given inputs.

        Args:
            session: (tf.Session) tensorflow session to use.
            input_feed: (dictionary) A dictionary containing all the input feed data.
            forward_only: whether to do the backward step (False) or only forward (True).

        Returns:
            A triple consisting of the loss, outputs (None if we do backward),
            and a tf.summary containing related information about the step.

        """
        #print ("!!!!!!!!!!!!!", tf.shape(self.new_output))

        if not forward_only:
            input_feed[self.is_training.name] = True
            output_feed = [
                self.updates,    # Update Op that does SGD.
                self.loss,    # Loss for this batch.
                self.train_summary  # Summarize statistics.
            ] + self.update_ops_list
            outputs = session.run(output_feed, input_feed)
            # loss, no outputs, summary.
            return outputs[1], None, outputs[2]
        else:
            input_feed[self.is_training.name] = False
            output_feed = [
                self.eval_summary,  # Summarize statistics.
                self.output   # Model outputs
            ]
            outputs = session.run(output_feed, input_feed)
            return None, outputs[1], outputs[0]    # loss, outputs, summary. 
Example #15
Source File: SuperResolution.py    From VideoSuperResolution with MIT License 5 votes vote down vote up
def summary(self):
    return tf.get_default_session().run(self.summary_op,
                                        feed_dict=self.feed_dict) 
Example #16
Source File: pairwise_debias.py    From ULTRA with Apache License 2.0 5 votes vote down vote up
def step(self, session, input_feed, forward_only):
        """Run a step of the model feeding the given inputs.

        Args:
            session: (tf.Session) tensorflow session to use.
            input_feed: (dictionary) A dictionary containing all the input feed data.
            forward_only: whether to do the backward step (False) or only forward (True).

        Returns:
            A triple consisting of the loss, outputs (None if we do backward),
            and a tf.summary containing related information about the step.

        """

        # Output feed: depends on whether we do a backward step or not.
        if not forward_only:
            input_feed[self.is_training.name] = True
            output_feed = [
                self.updates,    # Update Op that does SGD.
                self.loss,    # Loss for this batch.
                self.update_propensity_op,
                self.train_summary  # Summarize statistics.
            ]
        else:
            input_feed[self.is_training.name] = False
            output_feed = [
                self.eval_summary,  # Summarize statistics.
                self.output   # Model outputs
            ]

        outputs = session.run(output_feed, input_feed)
        if not forward_only:
            # loss, no outputs, summary.
            return outputs[1], None, outputs[-1]
        else:
            return None, outputs[1], outputs[0]    # loss, outputs, summary. 
Example #17
Source File: dbgd.py    From ULTRA with Apache License 2.0 5 votes vote down vote up
def step(self, session, input_feed, forward_only):
        """Run a step of the model feeding the given inputs.

        Args:
            session: (tf.Session) tensorflow session to use.
            input_feed: (dictionary) A dictionary containing all the input feed data.
            forward_only: whether to do the backward step (False) or only forward (True).

        Returns:
            A triple consisting of the loss, outputs (None if we do backward),
            and a tf.summary containing related information about the step.

        """

        if not forward_only:
            input_feed[self.is_training.name] = True
            output_feed = [
                self.updates,    # Update Op that does SGD.
                self.loss,    # Loss for this batch.
                self.train_summary  # Summarize statistics.
            ]
            outputs = session.run(output_feed, input_feed)
            # loss, no outputs, summary.
            return outputs[1], None, outputs[-1]
        else:
            input_feed[self.is_training.name] = False
            output_feed = [
                self.eval_summary,  # Summarize statistics.
                self.output   # Model outputs
            ]
            outputs = session.run(output_feed, input_feed)
            return None, outputs[1], outputs[0]    # loss, outputs, summary. 
Example #18
Source File: dla.py    From ULTRA with Apache License 2.0 5 votes vote down vote up
def step(self, session, input_feed, forward_only):
        """Run a step of the model feeding the given inputs.

        Args:
            session: (tf.Session) tensorflow session to use.
            input_feed: (dictionary) A dictionary containing all the input feed data.
            forward_only: whether to do the backward step (False) or only forward (True).

        Returns:
            A triple consisting of the loss, outputs (None if we do backward),
            and a tf.summary containing related information about the step.

        """

        # Output feed: depends on whether we do a backward step or not.
        if not forward_only:
            input_feed[self.is_training.name] = True
            output_feed = [self.updates,    # Update Op that does SGD.
                           self.loss,    # Loss for this batch.
                           self.train_summary  # Summarize statistics.
                           ]
        else:
            input_feed[self.is_training.name] = False
            output_feed = [
                self.eval_summary,  # Summarize statistics.
                self.output   # Model outputs
            ]

        outputs = session.run(output_feed, input_feed)
        if not forward_only:
            # loss, no outputs, summary.
            return outputs[1], None, outputs[-1]
        else:
            return None, outputs[1], outputs[0]    # no loss, outputs, summary. 
Example #19
Source File: navie_algorithm.py    From ULTRA with Apache License 2.0 5 votes vote down vote up
def step(self, session, input_feed, forward_only):
        """Run a step of the model feeding the given inputs.

        Args:
            session: (tf.Session) tensorflow session to use.
            input_feed: (dictionary) A dictionary containing all the input feed data.
            forward_only: whether to do the backward step (False) or only forward (True).

        Returns:
            A triple consisting of the loss, outputs (None if we do backward),
            and a tf.summary containing related information about the step.

        """

        # Output feed: depends on whether we do a backward step or not.
        if not forward_only:
            input_feed[self.is_training.name] = True
            output_feed = [
                self.updates,    # Update Op that does SGD.
                self.loss,    # Loss for this batch.
                self.train_summary  # Summarize statistics.
            ]
        else:
            input_feed[self.is_training.name] = False
            output_feed = [
                self.eval_summary,  # Summarize statistics.
                self.output   # Model outputs
            ]

        outputs = session.run(output_feed, input_feed)
        if not forward_only:
            # loss, no outputs, summary.
            return outputs[1], None, outputs[-1]
        else:
            return None, outputs[1], outputs[0]    # loss, outputs, summary. 
Example #20
Source File: SuperResolution.py    From VideoSuperResolution with MIT License 5 votes vote down vote up
def __init__(self, scale, channel, weight_decay=0, **kwargs):
    """Common initialize parameters

    Args:
        scale: the scale factor, can be a list of 2 integer to specify
          different stretch in width and height
        channel: input color channel
        weight_decay: decay of L2 regularization on trainable weights
    """

    self.scale = to_list(scale, repeat=2)
    self.channel = channel
    self.weight_decay = weight_decay  # weights regularization
    self.rgba = False  # deprecated
    self._trainer = VSR  # default trainer

    self.inputs = []  # hold placeholder for model inputs
    # hold some image procession for inputs (i.e. RGB->YUV, if you need)
    self.inputs_preproc = []
    self.label = []  # hold placeholder for model labels
    self.outputs = []  # hold output tensors
    self.loss = []  # this is the optimize op
    self.train_metric = {}  # metrics show at training phase
    self.metrics = {}  # metrics record in tf.summary and show at benchmark
    self.feed_dict = {}
    self.savers = {}
    self.global_steps = None
    self.training_phase = None  # only useful for bn
    self.learning_rate = None
    self.summary_op = None
    self.summary_writer = None
    self.compiled = False
    self.pre_ckpt = None
    self.unknown_args = kwargs 
Example #21
Source File: SuperResolution.py    From VideoSuperResolution with MIT License 5 votes vote down vote up
def compile(self):
    """build entire graph and training ops"""

    self.global_steps = tf.Variable(0, trainable=False, name='global_step')
    self.training_phase = tf.placeholder(tf.bool, name='is_training')
    self.learning_rate = tf.placeholder(tf.float32, name='learning_rate')
    self.build_graph()
    self.build_loss()
    self.build_summary()
    self.summary_op = tf.summary.merge_all()
    self.build_saver()
    self.compiled = True
    return self 
Example #22
Source File: SuperResolution.py    From VideoSuperResolution with MIT License 5 votes vote down vote up
def build_summary(self):
    """summary scalars in metrics"""
    for k, v in self.metrics.items():
      tf.summary.scalar(k, v) 
Example #23
Source File: tpu_estimator.py    From estimator with Apache License 2.0 5 votes vote down vote up
def AddOp(self, op):
    if op.type in [
        'AudioSummary', 'AudioSummaryV2', 'HistogramSummary', 'ImageSummary',
        'MergeSummary', 'ScalarSummary', 'TensorSummary', 'TensorSummaryV2'
    ]:
      raise ValueError('Please use tf.contrib.summary instead of tf.summary '
                       'inside of host_calls.') 
Example #24
Source File: summary_util.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _patch_guild_summary(self):
        from guild import summary

        python_util.listen_method(
            summary.SummaryWriter, "add_scalar", self._handle_guild_scalar
        ) 
Example #25
Source File: summary_util.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _listen_tf_summary(self):
        # pylint: disable=import-error,no-name-in-module
        from tensorflow import summary

        self.log.debug("wrapping tensorflow.summary.scalar")
        python_util.listen_function(summary, "scalar", self._handle_scalar) 
Example #26
Source File: summary_util.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _try_listen_tf_legacy(self):
        if not _tf_version().startswith("1."):
            raise util.TryFailed()
        try:
            # pylint: disable=import-error,no-name-in-module
            from tensorflow.summary import FileWriter
        except Exception as e:
            self.log.debug("error importing tensorflow.summary.FileWriter: %s", e)
            raise util.TryFailed()
        else:
            self.log.debug("wrapping tensorflow.summary.FileWriter.add_summary")
            python_util.listen_method(FileWriter, "add_summary", self._handle_summary) 
Example #27
Source File: summary_util.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _listen_tf_failed(self):
        self.log.warning(
            "unable to find TensorFlow summary writer, skipping " "summaries for %s",
            self.name,
        ) 
Example #28
Source File: summary_util.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _handle_summary(self, add_summary, _summary, global_step=None):
        """Callback to apply summary values via add_summary callback.

        This is the TF 1.x API for logging scalars.

        See SummaryPlugin docstring above for background.
        """
        vals = self._summary_values(global_step)
        if vals:
            self.log.debug("summary values via add_summary: %s", vals)
            summary = tf_scalar_summary(vals)
            add_summary(summary, global_step) 
Example #29
Source File: summary_util.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _summary_values(self, global_step):
        if self._summary_cache.expired():
            self.log.debug("reading summary values")
            try:
                vals = self.read_summary_values(global_step)
            except:
                self.log.exception("reading summary values")
                vals = {}
            self._summary_cache.reset_for_step(global_step, vals)
        return self._summary_cache.for_step(global_step) 
Example #30
Source File: summary_util.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _handle_scalar(self, scalar, _name, _data, step=None, description=None):
        """Callback to apply summary values via scalars API.

        This is the TF 2.x and tensorboardX API for logging scalars.
        """
        # pylint: disable=unused-argument
        vals = self._summary_values(step)
        if vals:
            self.log.debug("summary values via scalar: %s", vals)
            for tag, val in vals.items():
                if val is None:
                    continue
                scalar(tag, val, step)