Python tensorflow.python.ops.variables.local_variables() Examples

The following are 14 code examples of tensorflow.python.ops.variables.local_variables(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.ops.variables , or try the search function .
Example #1
Source File: tensor_forest.py    From lambda-packs with MIT License 5 votes vote down vote up
def get_epoch_variable():
  """Returns the epoch variable, or [0] if not defined."""
  # Grab epoch variable defined in
  # //third_party/tensorflow/python/training/input.py::limit_epochs
  for v in tf_variables.local_variables():
    if 'limit_epochs/epoch' in v.op.name:
      return array_ops.reshape(v, [1])
  # TODO(thomaswc): Access epoch from the data feeder.
  return [0]


# A simple container to hold the training variables for a single tree. 
Example #2
Source File: tensor_forest.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_epoch_variable():
  """Returns the epoch variable, or [0] if not defined."""
  # Grab epoch variable defined in
  # //third_party/tensorflow/python/training/input.py::limit_epochs
  for v in tf_variables.local_variables():
    if 'limit_epochs/epoch' in v.op.name:
      return array_ops.reshape(v, [1])
  # TODO(thomaswc): Access epoch from the data feeder.
  return [0]


# A simple container to hold the training variables for a single tree. 
Example #3
Source File: classification_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testVars(self):
    classification.f1_score(
        predictions=array_ops.ones((10, 1)),
        labels=array_ops.ones((10, 1)),
        num_thresholds=3)
    expected = {'f1/true_positives:0', 'f1/false_positives:0',
                'f1/false_negatives:0'}
    self.assertEqual(
        expected, set(v.name for v in variables.local_variables()))
    self.assertEqual(
        set(expected), set(v.name for v in variables.local_variables()))
    self.assertEqual(
        set(expected),
        set(v.name for v in ops.get_collection(ops.GraphKeys.METRIC_VARIABLES))) 
Example #4
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def test_local_variable(self):
    with self.cached_session() as sess:
      self.assertEqual([], variables_lib.local_variables())
      value0 = 42
      variables_lib2.local_variable(value0)
      value1 = 43
      variables_lib2.local_variable(value1)
      variables = variables_lib.local_variables()
      self.assertEqual(2, len(variables))
      self.assertRaises(errors_impl.OpError, sess.run, variables)
      variables_lib.variables_initializer(variables).run()
      self.assertAllEqual(set([value0, value1]), set(sess.run(variables))) 
Example #5
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testLocalVariableNotInAllVariables(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.local_variable(0)
        self.assertNotIn(a, variables_lib.global_variables())
        self.assertIn(a, variables_lib.local_variables()) 
Example #6
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testLocalVariableNotInVariablesToRestore(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.local_variable(0)
        self.assertNotIn(a, variables_lib2.get_variables_to_restore())
        self.assertIn(a, variables_lib.local_variables()) 
Example #7
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testGlobalVariableNotInLocalVariables(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.global_variable(0)
        self.assertNotIn(a, variables_lib.local_variables())
        self.assertIn(a, variables_lib.global_variables()) 
Example #8
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testCreateVariable(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.variable('a', [5])
        self.assertEqual(a.op.name, 'A/a')
        self.assertListEqual(a.get_shape().as_list(), [5])
        self.assertIn(a, ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES))
        self.assertNotIn(a, ops.get_collection(ops.GraphKeys.MODEL_VARIABLES))
        self.assertNotIn(a, variables_lib.local_variables()) 
Example #9
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testNotInLocalVariables(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.model_variable('a', [5])
        self.assertIn(a, variables_lib.global_variables())
        self.assertIn(a, ops.get_collection(ops.GraphKeys.MODEL_VARIABLES))
        self.assertNotIn(a, variables_lib.local_variables()) 
Example #10
Source File: tensor_forest.py    From keras-lambda with MIT License 5 votes vote down vote up
def get_epoch_variable():
  """Returns the epoch variable, or [0] if not defined."""
  # Grab epoch variable defined in
  # //third_party/tensorflow/python/training/input.py::limit_epochs
  for v in tf_variables.local_variables():
    if 'limit_epochs/epoch' in v.op.name:
      return array_ops.reshape(v, [1])
  # TODO(thomaswc): Access epoch from the data feeder.
  return [0]


# A simple container to hold the training variables for a single tree. 
Example #11
Source File: tensorflow_dataframe.py    From lambda-packs with MIT License 4 votes vote down vote up
def run(self,
          num_batches=None,
          graph=None,
          session=None,
          start_queues=True,
          initialize_variables=True,
          **kwargs):
    """Builds and runs the columns of the `DataFrame` and yields batches.

    This is a generator that yields a dictionary mapping column names to
    evaluated columns.

    Args:
      num_batches: the maximum number of batches to produce. If none specified,
        the returned value will iterate through infinite batches.
      graph: the `Graph` in which the `DataFrame` should be built.
      session: the `Session` in which to run the columns of the `DataFrame`.
      start_queues: if true, queues will be started before running and halted
        after producting `n` batches.
      initialize_variables: if true, variables will be initialized.
      **kwargs: Additional keyword arguments e.g. `num_epochs`.

    Yields:
      A dictionary, mapping column names to the values resulting from running
      each column for a single batch.
    """
    if graph is None:
      graph = ops.get_default_graph()
    with graph.as_default():
      if session is None:
        session = sess.Session()
      self_built = self.build(**kwargs)
      keys = list(self_built.keys())
      cols = list(self_built.values())
      if initialize_variables:
        if variables.local_variables():
          session.run(variables.local_variables_initializer())
        if variables.global_variables():
          session.run(variables.global_variables_initializer())
      if start_queues:
        coord = coordinator.Coordinator()
        threads = qr.start_queue_runners(sess=session, coord=coord)
      i = 0
      while num_batches is None or i < num_batches:
        i += 1
        try:
          values = session.run(cols)
          yield collections.OrderedDict(zip(keys, values))
        except errors.OutOfRangeError:
          break
      if start_queues:
        coord.request_stop()
        coord.join(threads) 
Example #12
Source File: tensorflow_dataframe.py    From auto-alt-text-lambda-api with MIT License 4 votes vote down vote up
def run(self,
          num_batches=None,
          graph=None,
          session=None,
          start_queues=True,
          initialize_variables=True,
          **kwargs):
    """Builds and runs the columns of the `DataFrame` and yields batches.

    This is a generator that yields a dictionary mapping column names to
    evaluated columns.

    Args:
      num_batches: the maximum number of batches to produce. If none specified,
        the returned value will iterate through infinite batches.
      graph: the `Graph` in which the `DataFrame` should be built.
      session: the `Session` in which to run the columns of the `DataFrame`.
      start_queues: if true, queues will be started before running and halted
        after producting `n` batches.
      initialize_variables: if true, variables will be initialized.
      **kwargs: Additional keyword arguments e.g. `num_epochs`.

    Yields:
      A dictionary, mapping column names to the values resulting from running
      each column for a single batch.
    """
    if graph is None:
      graph = ops.get_default_graph()
    with graph.as_default():
      if session is None:
        session = sess.Session()
      self_built = self.build(**kwargs)
      keys = list(self_built.keys())
      cols = list(self_built.values())
      if initialize_variables:
        if variables.local_variables():
          session.run(variables.local_variables_initializer())
        if variables.global_variables():
          session.run(variables.global_variables_initializer())
      if start_queues:
        coord = coordinator.Coordinator()
        threads = qr.start_queue_runners(sess=session, coord=coord)
      i = 0
      while num_batches is None or i < num_batches:
        i += 1
        try:
          values = session.run(cols)
          yield collections.OrderedDict(zip(keys, values))
        except errors.OutOfRangeError:
          break
      if start_queues:
        coord.request_stop()
        coord.join(threads) 
Example #13
Source File: tensorflow_dataframe.py    From deep_image_model with Apache License 2.0 4 votes vote down vote up
def run(self,
          num_batches=None,
          graph=None,
          session=None,
          start_queues=True,
          initialize_variables=True,
          **kwargs):
    """Builds and runs the columns of the `DataFrame` and yields batches.

    This is a generator that yields a dictionary mapping column names to
    evaluated columns.

    Args:
      num_batches: the maximum number of batches to produce. If none specified,
        the returned value will iterate through infinite batches.
      graph: the `Graph` in which the `DataFrame` should be built.
      session: the `Session` in which to run the columns of the `DataFrame`.
      start_queues: if true, queues will be started before running and halted
        after producting `n` batches.
      initialize_variables: if true, variables will be initialized.
      **kwargs: Additional keyword arguments e.g. `num_epochs`.

    Yields:
      A dictionary, mapping column names to the values resulting from running
      each column for a single batch.
    """
    if graph is None:
      graph = ops.get_default_graph()
    with graph.as_default():
      if session is None:
        session = sess.Session()
      self_built = self.build(**kwargs)
      keys = list(self_built.keys())
      cols = list(self_built.values())
      if initialize_variables:
        if variables.local_variables():
          session.run(variables.local_variables_initializer())
        if variables.global_variables():
          session.run(variables.global_variables_initializer())
      if start_queues:
        coord = coordinator.Coordinator()
        threads = qr.start_queue_runners(sess=session, coord=coord)
      i = 0
      while num_batches is None or i < num_batches:
        i += 1
        try:
          values = session.run(cols)
          yield collections.OrderedDict(zip(keys, values))
        except errors.OutOfRangeError:
          break
      if start_queues:
        coord.request_stop()
        coord.join(threads) 
Example #14
Source File: tensorflow_dataframe.py    From keras-lambda with MIT License 4 votes vote down vote up
def run(self,
          num_batches=None,
          graph=None,
          session=None,
          start_queues=True,
          initialize_variables=True,
          **kwargs):
    """Builds and runs the columns of the `DataFrame` and yields batches.

    This is a generator that yields a dictionary mapping column names to
    evaluated columns.

    Args:
      num_batches: the maximum number of batches to produce. If none specified,
        the returned value will iterate through infinite batches.
      graph: the `Graph` in which the `DataFrame` should be built.
      session: the `Session` in which to run the columns of the `DataFrame`.
      start_queues: if true, queues will be started before running and halted
        after producting `n` batches.
      initialize_variables: if true, variables will be initialized.
      **kwargs: Additional keyword arguments e.g. `num_epochs`.

    Yields:
      A dictionary, mapping column names to the values resulting from running
      each column for a single batch.
    """
    if graph is None:
      graph = ops.get_default_graph()
    with graph.as_default():
      if session is None:
        session = sess.Session()
      self_built = self.build(**kwargs)
      keys = list(self_built.keys())
      cols = list(self_built.values())
      if initialize_variables:
        if variables.local_variables():
          session.run(variables.local_variables_initializer())
        if variables.global_variables():
          session.run(variables.global_variables_initializer())
      if start_queues:
        coord = coordinator.Coordinator()
        threads = qr.start_queue_runners(sess=session, coord=coord)
      i = 0
      while num_batches is None or i < num_batches:
        i += 1
        try:
          values = session.run(cols)
          yield collections.OrderedDict(zip(keys, values))
        except errors.OutOfRangeError:
          break
      if start_queues:
        coord.request_stop()
        coord.join(threads)