Python tensorflow.python.summary.summary.FileWriter() Examples

The following are 6 code examples of tensorflow.python.summary.summary.FileWriter(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.summary.summary , or try the search function .
Example #1
Source File: summaries_test.py    From tf-slim with Apache License 2.0 6 votes vote down vote up
def verify_scalar_summary_is_written(self, print_summary):
    value = 3
    tensor = array_ops.ones([]) * value
    name = 'my_score'
    prefix = 'eval'
    summaries.add_scalar_summary(tensor, name, prefix, print_summary)

    output_dir = tempfile.mkdtemp('scalar_summary_no_print_test')
    summary_op = summary.merge_all()

    summary_writer = summary.FileWriter(output_dir)
    with self.cached_session() as sess:
      new_summary = sess.run(summary_op)
      summary_writer.add_summary(new_summary, 1)
      summary_writer.flush()

    self.assert_scalar_summary(output_dir, {
        '%s/%s' % (prefix, name): value
    }) 
Example #2
Source File: all_models_to_tensorboard.py    From realtime_object_detection with MIT License 6 votes vote down vote up
def create_tfevent_from_pb(model,optimized=False):
    print("> creating tfevent of model: {}".format(model))

    if optimized:
        model_path=ROOT_DIR+'/models/{}/optimized_inference_graph.pb'.format(model)
        log_dir=ROOT_DIR+'/models/{}/log_opt/'.format(model)
    else:
        model_path=ROOT_DIR+'/models/{}/frozen_inference_graph.pb'.format(model)
        log_dir=ROOT_DIR+'/models/{}/log/'.format(model)

    with session.Session(graph=ops.Graph()) as sess:
        with gfile.FastGFile(model_path, "rb") as f:
          graph_def = graph_pb2.GraphDef()
          graph_def.ParseFromString(f.read())
          importer.import_graph_def(graph_def)
        pb_visual_writer = summary.FileWriter(log_dir)
        pb_visual_writer.add_graph(sess.graph)
    print("> Model {} Imported. \nVisualize by running: \
    tensorboard --logdir={}".format(model_path, log_dir))

# Gather all Model Names in models/ 
Example #3
Source File: convert_oss.py    From code-snippets with Apache License 2.0 6 votes vote down vote up
def import_to_tensorboard(saved_model, output_dir):
  """View an imported saved_model.pb as a graph in Tensorboard.

  Args:
    saved_model: The location of the saved_model.pb to visualize.
    output_dir: The location for the Tensorboard log to begin visualization from.

  Usage:
    Call this function with your model location and desired log directory.
    Launch Tensorboard by pointing it to the log directory.
    View your imported `.pb` model as a graph.
  """
  with open(saved_model, "rb") as f:
    sm = saved_model_pb2.SavedModel()
    sm.ParseFromString(f.read())
    if 1 != len(sm.meta_graphs):
      print('More than one graph found. Not sure which to write')
      sys.exit(1)
    graph_def = sm.meta_graphs[0].graph_def

    pb_visual_writer = summary.FileWriter(output_dir)
    pb_visual_writer.add_graph(None, graph_def=graph_def)
    print("Model Imported. Visualize by running: "
          "tensorboard --logdir={}".format(output_dir)) 
Example #4
Source File: convert_oss.py    From code-snippets with Apache License 2.0 6 votes vote down vote up
def import_to_tensorboard(saved_model, output_dir):
  """View an imported saved_model.pb as a graph in Tensorboard.

  Args:
    saved_model: The location of the saved_model.pb to visualize.
    output_dir: The location for the Tensorboard log to begin visualization from.

  Usage:
    Call this function with your model location and desired log directory.
    Launch Tensorboard by pointing it to the log directory.
    View your imported `.pb` model as a graph.
  """
  with open(saved_model, "rb") as f:
    sm = saved_model_pb2.SavedModel()
    sm.ParseFromString(f.read())
    if 1 != len(sm.meta_graphs):
      print('More than one graph found. Not sure which to write')
      sys.exit(1)
    graph_def = sm.meta_graphs[0].graph_def

    pb_visual_writer = summary.FileWriter(output_dir)
    pb_visual_writer.add_graph(None, graph_def=graph_def)
    print("Model Imported. Visualize by running: "
          "tensorboard --logdir={}".format(output_dir)) 
Example #5
Source File: tfcompat.py    From keras-onnx with MIT License 5 votes vote down vote up
def dump_graph_into_tensorboard(tf_graph):
    # type: (_tf.Graph) -> None
    _tb_log_dir = os.environ.get('TB_LOG_DIR')
    if _tb_log_dir:
        if is_tf2:
            from tensorflow.python.ops.summary_ops_v2 import graph as write_graph
            pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir)
            with pb_visual_writer.as_default():
                write_graph(tf_graph)
        else:
            from tensorflow.python.summary import summary
            pb_visual_writer = summary.FileWriter(_tb_log_dir)
            pb_visual_writer.add_graph(tf_graph) 
Example #6
Source File: callbacks.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 4 votes vote down vote up
def set_model(self, model):
    self.model = model
    self.sess = K.get_session()
    if self.histogram_freq and self.merged is None:
      for layer in self.model.layers:
        for weight in layer.weights:
          mapped_weight_name = weight.name.replace(':', '_')
          tf_summary.histogram(mapped_weight_name, weight)
          if self.write_grads:
            grads = model.optimizer.get_gradients(model.total_loss, weight)

            def is_indexed_slices(grad):
              return type(grad).__name__ == 'IndexedSlices'

            grads = [grad.values if is_indexed_slices(grad) else grad
                     for grad in grads]
            tf_summary.histogram('{}_grad'.format(mapped_weight_name), grads)
          if self.write_images:
            w_img = array_ops.squeeze(weight)
            shape = K.int_shape(w_img)
            if len(shape) == 2:  # dense layer kernel case
              if shape[0] > shape[1]:
                w_img = array_ops.transpose(w_img)
                shape = K.int_shape(w_img)
              w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
            elif len(shape) == 3:  # convnet case
              if K.image_data_format() == 'channels_last':
                # switch to channels_first to display
                # every kernel as a separate image
                w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
                shape = K.int_shape(w_img)
              w_img = array_ops.reshape(w_img,
                                        [shape[0], shape[1], shape[2], 1])
            elif len(shape) == 1:  # bias case
              w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
            else:
              # not possible to handle 3D convnets etc.
              continue

            shape = K.int_shape(w_img)
            assert len(shape) == 4 and shape[-1] in [1, 3, 4]
            tf_summary.image(mapped_weight_name, w_img)

        if hasattr(layer, 'output'):
          tf_summary.histogram('{}_out'.format(layer.name), layer.output)
    self.merged = tf_summary.merge_all()

    if self.write_graph:
      self.writer = tf_summary.FileWriter(self.log_dir, self.sess.graph)
    else:
      self.writer = tf_summary.FileWriter(self.log_dir)