Python tensorflow.local_variables_initializer() Examples

The following are 30 code examples of tensorflow.local_variables_initializer(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow , or try the search function .
Example #1
Source File: optimization_test.py    From Bert-TextClassification with MIT License 6 votes vote down vote up
def test_adam(self):
        with self.test_session() as sess:
            w = tf.get_variable(
                "w",
                shape=[3],
                initializer=tf.constant_initializer([0.1, -0.2, -0.1]))
            x = tf.constant([0.4, 0.2, -0.5])
            loss = tf.reduce_mean(tf.square(x - w))
            tvars = tf.trainable_variables()
            grads = tf.gradients(loss, tvars)
            global_step = tf.train.get_or_create_global_step()
            optimizer = optimization.AdamWeightDecayOptimizer(learning_rate=0.2)
            train_op = optimizer.apply_gradients(zip(grads, tvars), global_step)
            init_op = tf.group(tf.global_variables_initializer(),
                               tf.local_variables_initializer())
            sess.run(init_op)
            for _ in range(100):
                sess.run(train_op)
            w_np = sess.run(w)
            self.assertAllClose(w_np.flat, [0.4, 0.2, -0.5], rtol=1e-2, atol=1e-2) 
Example #2
Source File: metrics_test.py    From fine-lm with MIT License 6 votes vote down vote up
def testRocAuc(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [1., -1.],
        [1., -1.]
    ])
    labels = np.array([
        [1],
        [0],
        [1],
        [0]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.roc_auc(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertAlmostEqual(s, 0.750, places=3) 
Example #3
Source File: metrics_test.py    From fine-lm with MIT License 6 votes vote down vote up
def testSigmoidCrossEntropyOneHot(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [1., -1.],
        [1., -1.]
    ])
    labels = np.array([
        [0, 1],
        [1, 0],
        [0, 0],
        [0, 1]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.sigmoid_cross_entropy_one_hot(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertAlmostEqual(s, 0.688, places=3) 
Example #4
Source File: model_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #5
Source File: metrics_test.py    From fine-lm with MIT License 6 votes vote down vote up
def testSigmoidRecallOneHot(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [1., -1.],
        [1., -1.]
    ])
    labels = np.array([
        [0, 1],
        [0, 1],
        [0, 1],
        [0, 1]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.sigmoid_recall_one_hot(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertEqual(s, 0.25) 
Example #6
Source File: metrics_test.py    From fine-lm with MIT License 6 votes vote down vote up
def testSigmoidPrecisionOneHot(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [1., -1.],
        [1., -1.]
    ])
    labels = np.array([
        [0, 1],
        [0, 1],
        [0, 1],
        [0, 1]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.sigmoid_precision_one_hot(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertEqual(s, 0.25) 
Example #7
Source File: metrics_test.py    From fine-lm with MIT License 6 votes vote down vote up
def testSigmoidAccuracyOneHot(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [-1., 1.],
        [1., -1.]
    ])
    labels = np.array([
        [0, 1],
        [1, 0],
        [1, 0],
        [0, 1]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.sigmoid_accuracy_one_hot(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertEqual(s, 0.5) 
Example #8
Source File: TestUpd.py    From NTM-One-Shot-TF with MIT License 6 votes vote down vote up
def omniglot():

    sess = tf.InteractiveSession()

    """    def wrapper(v):
        return tf.Print(v, [v], message="Printing v")

    v = tf.Variable(initial_value=np.arange(0, 36).reshape((6, 6)), dtype=tf.float32, name='Matrix')

    sess.run(tf.global_variables_initializer())
    sess.run(tf.local_variables_initializer())

    temp = tf.Variable(initial_value=np.arange(0, 36).reshape((6, 6)), dtype=tf.float32, name='temp')
    temp = wrapper(v)
    #with tf.control_dependencies([temp]):
    temp.eval()
    print 'Hello'"""

    def update_tensor(V, dim2, val):  # Update tensor V, with index(:,dim2[:]) by val[:]
        val = tf.cast(val, V.dtype)
        def body(_, (v, d2, chg)):
            d2_int = tf.cast(d2, tf.int32)
            return tf.slice(tf.concat_v2([v[:d2_int],[chg] ,v[d2_int+1:]], axis=0), [0], [v.get_shape().as_list()[0]])
        Z = tf.scan(body, elems=(V, dim2, val), initializer=tf.constant(1, shape=V.get_shape().as_list()[1:], dtype=tf.float32), name="Scan_Update")
        return Z 
Example #9
Source File: test_case.py    From Person-Detection-and-Tracking with MIT License 6 votes vote down vote up
def execute_cpu(self, graph_fn, inputs):
    """Constructs the graph, executes it on CPU and returns the result.

    Args:
      graph_fn: a callable that constructs the tensorflow graph to test. The
        arguments of this function should correspond to `inputs`.
      inputs: a list of numpy arrays to feed input to the computation graph.

    Returns:
      A list of numpy arrays or a scalar returned from executing the tensorflow
      graph.
    """
    with self.test_session(graph=tf.Graph()) as sess:
      placeholders = [tf.placeholder_with_default(v, v.shape) for v in inputs]
      results = graph_fn(*placeholders)
      sess.run([tf.global_variables_initializer(), tf.tables_initializer(),
                tf.local_variables_initializer()])
      materialized_results = sess.run(results, feed_dict=dict(zip(placeholders,
                                                                  inputs)))
      if (len(materialized_results) == 1
          and (isinstance(materialized_results, list)
               or isinstance(materialized_results, tuple))):
        materialized_results = materialized_results[0]
    return materialized_results 
Example #10
Source File: optimization_test.py    From BERT with Apache License 2.0 6 votes vote down vote up
def test_adam(self):
    with self.test_session() as sess:
      w = tf.get_variable(
          "w",
          shape=[3],
          initializer=tf.constant_initializer([0.1, -0.2, -0.1]))
      x = tf.constant([0.4, 0.2, -0.5])
      loss = tf.reduce_mean(tf.square(x - w))
      tvars = tf.trainable_variables()
      grads = tf.gradients(loss, tvars)
      global_step = tf.train.get_or_create_global_step()
      optimizer = optimization.AdamWeightDecayOptimizer(learning_rate=0.2)
      train_op = optimizer.apply_gradients(zip(grads, tvars), global_step)
      init_op = tf.group(tf.global_variables_initializer(),
                         tf.local_variables_initializer())
      sess.run(init_op)
      for _ in range(100):
        sess.run(train_op)
      w_np = sess.run(w)
      self.assertAllClose(w_np.flat, [0.4, 0.2, -0.5], rtol=1e-2, atol=1e-2) 
Example #11
Source File: metrics_test.py    From BERT with Apache License 2.0 6 votes vote down vote up
def testMultilabelMatch3(self):
    predictions = np.random.randint(1, 5, size=(100, 1, 1, 1))
    targets = np.random.randint(1, 5, size=(100, 10, 1, 1))
    weights = np.random.randint(0, 2, size=(100, 1, 1, 1))
    targets *= weights

    predictions_repeat = np.repeat(predictions, 10, axis=1)
    expected = (predictions_repeat == targets).astype(float)
    expected = np.sum(expected, axis=(1, 2, 3))
    expected = np.minimum(expected / 3.0, 1.)
    expected = np.sum(expected * weights[:, 0, 0, 0]) / weights.shape[0]
    with self.test_session() as session:
      scores, weights_ = metrics.multilabel_accuracy_match3(
          tf.one_hot(predictions, depth=5, dtype=tf.float32),
          tf.constant(targets, dtype=tf.int32))
      a, a_op = tf.metrics.mean(scores, weights_)
      session.run(tf.local_variables_initializer())
      session.run(tf.global_variables_initializer())
      _ = session.run(a_op)
      actual = session.run(a)
    self.assertAlmostEqual(actual, expected, places=6) 
Example #12
Source File: metrics_test.py    From BERT with Apache License 2.0 6 votes vote down vote up
def testRocAuc(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [1., -1.],
        [1., -1.]
    ])
    labels = np.array([
        [1],
        [0],
        [1],
        [0]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.roc_auc(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertAlmostEqual(s, 0.750, places=3) 
Example #13
Source File: optimization_test.py    From Extending-Google-BERT-as-Question-and-Answering-model-and-Chatbot with Apache License 2.0 6 votes vote down vote up
def test_adam(self):
    with self.test_session() as sess:
      w = tf.get_variable(
          "w",
          shape=[3],
          initializer=tf.constant_initializer([0.1, -0.2, -0.1]))
      x = tf.constant([0.4, 0.2, -0.5])
      loss = tf.reduce_mean(tf.square(x - w))
      tvars = tf.trainable_variables()
      grads = tf.gradients(loss, tvars)
      global_step = tf.train.get_or_create_global_step()
      optimizer = optimization.AdamWeightDecayOptimizer(learning_rate=0.2)
      train_op = optimizer.apply_gradients(zip(grads, tvars), global_step)
      init_op = tf.group(tf.global_variables_initializer(),
                         tf.local_variables_initializer())
      sess.run(init_op)
      for _ in range(100):
        sess.run(train_op)
      w_np = sess.run(w)
      self.assertAllClose(w_np.flat, [0.4, 0.2, -0.5], rtol=1e-2, atol=1e-2) 
Example #14
Source File: metrics_test.py    From BERT with Apache License 2.0 6 votes vote down vote up
def testSigmoidCrossEntropyOneHot(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [1., -1.],
        [1., -1.]
    ])
    labels = np.array([
        [0, 1],
        [1, 0],
        [0, 0],
        [0, 1]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.sigmoid_cross_entropy_one_hot(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertAlmostEqual(s, 0.688, places=3) 
Example #15
Source File: metrics_test.py    From BERT with Apache License 2.0 6 votes vote down vote up
def testSigmoidPrecisionOneHot(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [1., -1.],
        [1., -1.]
    ])
    labels = np.array([
        [0, 1],
        [0, 1],
        [0, 1],
        [0, 1]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.sigmoid_precision_one_hot(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertEqual(s, 0.25) 
Example #16
Source File: metrics_test.py    From BERT with Apache License 2.0 6 votes vote down vote up
def testSigmoidAccuracyOneHot(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [-1., 1.],
        [1., -1.]
    ])
    labels = np.array([
        [0, 1],
        [1, 0],
        [1, 0],
        [0, 1]
    ])
    logits = np.expand_dims(np.expand_dims(logits, 1), 1)
    labels = np.expand_dims(np.expand_dims(labels, 1), 1)

    with self.test_session() as session:
      score, _ = metrics.sigmoid_accuracy_one_hot(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertEqual(s, 0.5) 
Example #17
Source File: TestUpd.py    From How-to-Learn-from-Little-Data with MIT License 6 votes vote down vote up
def omniglot():

    sess = tf.InteractiveSession()

    """    def wrapper(v):
        return tf.Print(v, [v], message="Printing v")

    v = tf.Variable(initial_value=np.arange(0, 36).reshape((6, 6)), dtype=tf.float32, name='Matrix')

    sess.run(tf.global_variables_initializer())
    sess.run(tf.local_variables_initializer())

    temp = tf.Variable(initial_value=np.arange(0, 36).reshape((6, 6)), dtype=tf.float32, name='temp')
    temp = wrapper(v)
    #with tf.control_dependencies([temp]):
    temp.eval()
    print 'Hello'"""

    def update_tensor(V, dim2, val):  # Update tensor V, with index(:,dim2[:]) by val[:]
        val = tf.cast(val, V.dtype)
        def body(_, (v, d2, chg)):
            d2_int = tf.cast(d2, tf.int32)
            return tf.slice(tf.concat_v2([v[:d2_int],[chg] ,v[d2_int+1:]], axis=0), [0], [v.get_shape().as_list()[0]])
        Z = tf.scan(body, elems=(V, dim2, val), initializer=tf.constant(1, shape=V.get_shape().as_list()[1:], dtype=tf.float32), name="Scan_Update")
        return Z 
Example #18
Source File: test_case.py    From ros_people_object_detection_tensorflow with Apache License 2.0 6 votes vote down vote up
def execute_cpu(self, graph_fn, inputs):
    """Constructs the graph, executes it on CPU and returns the result.

    Args:
      graph_fn: a callable that constructs the tensorflow graph to test. The
        arguments of this function should correspond to `inputs`.
      inputs: a list of numpy arrays to feed input to the computation graph.

    Returns:
      A list of numpy arrays or a scalar returned from executing the tensorflow
      graph.
    """
    with self.test_session(graph=tf.Graph()) as sess:
      placeholders = [tf.placeholder_with_default(v, v.shape) for v in inputs]
      results = graph_fn(*placeholders)
      sess.run([tf.global_variables_initializer(), tf.tables_initializer(),
                tf.local_variables_initializer()])
      materialized_results = sess.run(results, feed_dict=dict(zip(placeholders,
                                                                  inputs)))
      if (len(materialized_results) == 1
          and (isinstance(materialized_results, list)
               or isinstance(materialized_results, tuple))):
        materialized_results = materialized_results[0]
    return materialized_results 
Example #19
Source File: optimization_test.py    From BERT-Classification-Tutorial with Apache License 2.0 6 votes vote down vote up
def test_adam(self):
        with self.test_session() as sess:
            w = tf.get_variable(
                "w",
                shape=[3],
                initializer=tf.constant_initializer([0.1, -0.2, -0.1]))
            x = tf.constant([0.4, 0.2, -0.5])
            loss = tf.reduce_mean(tf.square(x - w))
            tvars = tf.trainable_variables()
            grads = tf.gradients(loss, tvars)
            global_step = tf.train.get_or_create_global_step()
            optimizer = optimization.AdamWeightDecayOptimizer(learning_rate=0.2)
            train_op = optimizer.apply_gradients(zip(grads, tvars), global_step)
            init_op = tf.group(tf.global_variables_initializer(),
                               tf.local_variables_initializer())
            sess.run(init_op)
            for _ in range(100):
                sess.run(train_op)
            w_np = sess.run(w)
            self.assertAllClose(w_np.flat, [0.4, 0.2, -0.5], rtol=1e-2, atol=1e-2) 
Example #20
Source File: variables.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def initialize_local_variables():
  """See `tf.local_variables_initializer`."""
  return local_variables_initializer() 
Example #21
Source File: test_case.py    From ros_people_object_detection_tensorflow with Apache License 2.0 5 votes vote down vote up
def execute_tpu(self, graph_fn, inputs):
    """Constructs the graph, executes it on TPU and returns the result.

    Args:
      graph_fn: a callable that constructs the tensorflow graph to test. The
        arguments of this function should correspond to `inputs`.
      inputs: a list of numpy arrays to feed input to the computation graph.

    Returns:
      A list of numpy arrays or a scalar returned from executing the tensorflow
      graph.
    """
    with self.test_session(graph=tf.Graph()) as sess:
      placeholders = [tf.placeholder_with_default(v, v.shape) for v in inputs]
      tpu_computation = tpu.rewrite(graph_fn, placeholders)
      sess.run(tpu.initialize_system())
      sess.run([tf.global_variables_initializer(), tf.tables_initializer(),
                tf.local_variables_initializer()])
      materialized_results = sess.run(tpu_computation,
                                      feed_dict=dict(zip(placeholders, inputs)))
      sess.run(tpu.shutdown_system())
      if (len(materialized_results) == 1
          and (isinstance(materialized_results, list)
               or isinstance(materialized_results, tuple))):
        materialized_results = materialized_results[0]
    return materialized_results 
Example #22
Source File: infer_detections.py    From ros_people_object_detection_tensorflow with Apache License 2.0 5 votes vote down vote up
def main(_):
  tf.logging.set_verbosity(tf.logging.INFO)

  required_flags = ['input_tfrecord_paths', 'output_tfrecord_path',
                    'inference_graph']
  for flag_name in required_flags:
    if not getattr(FLAGS, flag_name):
      raise ValueError('Flag --{} is required'.format(flag_name))

  with tf.Session() as sess:
    input_tfrecord_paths = [
        v for v in FLAGS.input_tfrecord_paths.split(',') if v]
    tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
    serialized_example_tensor, image_tensor = detection_inference.build_input(
        input_tfrecord_paths)
    tf.logging.info('Reading graph and building model...')
    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, FLAGS.inference_graph)

    tf.logging.info('Running inference and writing output to {}'.format(
        FLAGS.output_tfrecord_path))
    sess.run(tf.local_variables_initializer())
    tf.train.start_queue_runners()
    with tf.python_io.TFRecordWriter(
        FLAGS.output_tfrecord_path) as tf_record_writer:
      try:
        for counter in itertools.count():
          tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                 counter)
          tf_example = detection_inference.infer_detections_and_add_to_example(
              serialized_example_tensor, detected_boxes_tensor,
              detected_scores_tensor, detected_labels_tensor,
              FLAGS.discard_image_pixels)
          tf_record_writer.write(tf_example.SerializeToString())
      except tf.errors.OutOfRangeError:
        tf.logging.info('Finished processing records') 
Example #23
Source File: train.py    From MobileNet with Apache License 2.0 5 votes vote down vote up
def __init_model(self):
        print("Initializing the model...")
        self.init = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
        self.sess.run(self.init)
        print("Model initialized successfully\n\n") 
Example #24
Source File: metrics_test.py    From BERT with Apache License 2.0 5 votes vote down vote up
def testSigmoidAccuracy(self):
    logits = np.array([
        [-1., 1.],
        [1., -1.],
        [-1., 1.],
        [1., -1.]
    ])
    labels = np.array([1, 0, 0, 1])

    with self.test_session() as session:
      score, _ = metrics.sigmoid_accuracy(logits, labels)
      session.run(tf.global_variables_initializer())
      session.run(tf.local_variables_initializer())
      s = session.run(score)
    self.assertEqual(s, 0.5) 
Example #25
Source File: task.py    From cloudml-samples with Apache License 2.0 5 votes vote down vote up
def main_op():
    init_local = variables.local_variables_initializer()
    init_tables = lookup_ops.tables_initializer()
    return control_flow_ops.group(init_local, init_tables) 
Example #26
Source File: task.py    From cloudml-samples with Apache License 2.0 5 votes vote down vote up
def _run_eval(self):
        """Run model evaluation and generate summaries."""
        coord = tf.train.Coordinator(clean_stop_exception_types=(
            tf.errors.CancelledError, tf.errors.OutOfRangeError))

        with tf.Session(graph=self._graph) as session:
            # Restores previously saved variables from latest checkpoint
            self._saver.restore(session, self._latest_checkpoint)

            session.run([
                tf.tables_initializer(),
                tf.local_variables_initializer()])
            tf.train.start_queue_runners(coord=coord, sess=session)
            train_step = session.run(self._gs)

            tf.logging.info(
                'Starting Evaluation For Step: {}'.format(train_step))
            with coord.stop_on_exception():
                eval_step = 0
                while not coord.should_stop() and (self._eval_steps is None or
                                                   eval_step <
                                                   self._eval_steps):
                    summaries, final_values, _ = session.run(
                        [self._summary_op, self._final_ops_dict,
                         self._eval_ops])
                    if eval_step % 100 == 0:
                        tf.logging.info(
                            'On Evaluation Step: {}'.format(eval_step))
                    eval_step += 1

            # Write the summaries
            self._file_writer.add_summary(summaries, global_step=train_step)
            self._file_writer.flush()
            tf.logging.info(final_values) 
Example #27
Source File: utility.py    From soccer-matlab with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def initialize_variables(sess, saver, logdir, checkpoint=None, resume=None):
  """Initialize or restore variables from a checkpoint if available.

  Args:
    sess: Session to initialize variables in.
    saver: Saver to restore variables.
    logdir: Directory to search for checkpoints.
    checkpoint: Specify what checkpoint name to use; defaults to most recent.
    resume: Whether to expect recovering a checkpoint or starting a new run.

  Raises:
    ValueError: If resume expected but no log directory specified.
    RuntimeError: If no resume expected but a checkpoint was found.
  """
  sess.run(tf.group(
      tf.local_variables_initializer(),
      tf.global_variables_initializer()))
  if resume and not (logdir or checkpoint):
    raise ValueError('Need to specify logdir to resume a checkpoint.')
  if logdir:
    state = tf.train.get_checkpoint_state(logdir)
    if checkpoint:
      checkpoint = os.path.join(logdir, checkpoint)
    if not checkpoint and state and state.model_checkpoint_path:
      checkpoint = state.model_checkpoint_path
    if checkpoint and resume is False:
      message = 'Found unexpected checkpoint when starting a new run.'
      raise RuntimeError(message)
    if checkpoint:
      saver.restore(sess, checkpoint) 
Example #28
Source File: model_test.py    From FC-DenseNet-TensorFlow with MIT License 5 votes vote down vote up
def test_iou_all_wrong(self):
        logits = tf.constant([[[15.0], [1.0]], [[-1], [-10]]])
        labels = tf.constant([[1], [1]])
        iou, update_op = self.tiramisu.calculate_iou(labels, logits)

        with self.test_session() as sess:
            sess.run(tf.local_variables_initializer())
            self.assertEqual(iou.eval(), 0.0)
            update_op.eval()
            self.assertEqual(iou.eval(), 0.0) 
Example #29
Source File: model_test.py    From FC-DenseNet-TensorFlow with MIT License 5 votes vote down vote up
def test_iou_all_correct(self):
        logits = tf.constant([[[15.0], [1.0]], [[-1], [-10]]])
        labels = tf.constant([[0], [0]])
        iou, update_op = self.tiramisu.calculate_iou(labels, logits)

        with self.test_session() as sess:
            sess.run(tf.local_variables_initializer())
            self.assertEqual(iou.eval(), 0.0)
            update_op.eval()
            self.assertEqual(iou.eval(), 1.0) 
Example #30
Source File: freeze_graph.py    From TNT with GNU General Public License v3.0 5 votes vote down vote up
def main(args):
    with tf.Graph().as_default():
        with tf.Session() as sess:
            # Load the model metagraph and checkpoint
            print('Model directory: %s' % args.model_dir)
            meta_file, ckpt_file = facenet.get_model_filenames(os.path.expanduser(args.model_dir))
            
            print('Metagraph file: %s' % meta_file)
            print('Checkpoint file: %s' % ckpt_file)

            model_dir_exp = os.path.expanduser(args.model_dir)
            saver = tf.train.import_meta_graph(os.path.join(model_dir_exp, meta_file), clear_devices=True)
            tf.get_default_session().run(tf.global_variables_initializer())
            tf.get_default_session().run(tf.local_variables_initializer())
            saver.restore(tf.get_default_session(), os.path.join(model_dir_exp, ckpt_file))
            
            # Retrieve the protobuf graph definition and fix the batch norm nodes
            input_graph_def = sess.graph.as_graph_def()
            
            # Freeze the graph def
            output_graph_def = freeze_graph_def(sess, input_graph_def, 'embeddings,label_batch')

        # Serialize and dump the output graph to the filesystem
        with tf.gfile.GFile(args.output_file, 'wb') as f:
            f.write(output_graph_def.SerializeToString())
        print("%d ops in the final graph: %s" % (len(output_graph_def.node), args.output_file))