Python tensorflow.python.framework.ops.add_to_collection() Examples

The following are 30 code examples of tensorflow.python.framework.ops.add_to_collection(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.framework.ops , or try the search function .
Example #1
Source File: supervisor.py    From lambda-packs with MIT License 6 votes vote down vote up
def _init_init_op(self, init_op=USE_DEFAULT, init_feed_dict=None):
    """Initializes init_op.

    Args:
      init_op: `Operation` to initialize the variables. If set to USE_DEFAULT,
        create an op that initializes all variables and tables.
      init_feed_dict: A dictionary that maps `Tensor` objects to feed values.
        This feed dictionary will be used when `init_op` is evaluated.
    """
    if init_op is Supervisor.USE_DEFAULT:
      init_op = self._get_first_op_from_collection(ops.GraphKeys.INIT_OP)
      if init_op is None:
        init_op = variables.global_variables_initializer()
        ops.add_to_collection(ops.GraphKeys.INIT_OP, init_op)
    self._init_op = init_op
    self._init_feed_dict = init_feed_dict 
Example #2
Source File: session_bundle_test.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def setUp(self):
    self.base_path = os.path.join(test.get_temp_dir(), "no_vars")
    if not os.path.exists(self.base_path):
      os.mkdir(self.base_path)

    # Create a simple graph with a variable, then convert variables to
    # constants and export the graph.
    with ops.Graph().as_default() as g:
      x = array_ops.placeholder(dtypes.float32, name="x")
      w = variables.Variable(3.0)
      y = math_ops.subtract(w * x, 7.0, name="y")  # pylint: disable=unused-variable
      ops.add_to_collection("meta", "this is meta")

      with self.test_session(graph=g) as session:
        variables.global_variables_initializer().run()
        new_graph_def = graph_util.convert_variables_to_constants(
            session, g.as_graph_def(), ["y"])

      filename = os.path.join(self.base_path, constants.META_GRAPH_DEF_FILENAME)
      saver.export_meta_graph(
          filename, graph_def=new_graph_def, collection_list=["meta"]) 
Example #3
Source File: rnn_cell.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _get_concat_variable(name, shape, dtype, num_shards):
  """Get a sharded variable concatenated into one tensor."""
  sharded_variable = _get_sharded_variable(name, shape, dtype, num_shards)
  if len(sharded_variable) == 1:
    return sharded_variable[0]

  concat_name = name + "/concat"
  concat_full_name = vs.get_variable_scope().name + "/" + concat_name + ":0"
  for value in ops.get_collection(ops.GraphKeys.CONCATENATED_VARIABLES):
    if value.name == concat_full_name:
      return value

  concat_variable = array_ops.concat(sharded_variable, 0, name=concat_name)
  ops.add_to_collection(ops.GraphKeys.CONCATENATED_VARIABLES,
                        concat_variable)
  return concat_variable 
Example #4
Source File: lookup_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def initialize(self, table):
    """Initializes the given `table` with `keys` and `values` tensors.

    Args:
      table: The table to initialize.

    Returns:
      The operation that initializes the table.

    Raises:
      TypeError: when the keys and values data types do not match the table
      key and value data types.
    """
    _check_table_dtypes(table, self._keys.dtype, self._values.dtype)
    with ops.name_scope(
        self._name, values=(table.table_ref, self._keys,
                            self._values)) as scope:
      # pylint: disable=protected-access
      init_op = gen_lookup_ops._initialize_table_v2(
          table.table_ref, self._keys, self._values, name=scope)
      # pylint: enable=protected-access
    ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, init_op)
    return init_op 
Example #5
Source File: logging_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def get_summary_op():
  """Returns a single Summary op that would run all summaries.

  Either existing one from `SUMMARY_OP` collection or merges all existing
  summaries.

  Returns:
    If no summaries were collected, returns None. Otherwise returns a scalar
    `Tensor` of type `string` containing the serialized `Summary` protocol
    buffer resulting from the merging.
  """
  summary_op = ops.get_collection(ops.GraphKeys.SUMMARY_OP)
  if summary_op is not None:
    if summary_op:
      summary_op = summary_op[0]
    else:
      summary_op = None
  if summary_op is None:
    summary_op = merge_all_summaries()
    if summary_op is not None:
      ops.add_to_collection(ops.GraphKeys.SUMMARY_OP, summary_op)
  return summary_op 
Example #6
Source File: saver.py    From lambda-packs with MIT License 6 votes vote down vote up
def _get_saver_or_default():
  """Returns the saver from SAVERS collection, or creates a default one.

  This method is used by other members of the training module, such as
  `Scaffold`, or `CheckpointSaverHook`.

  Returns:
    `Saver`.

  Raises:
    RuntimeError: If the SAVERS collection already has more than one items.
  """
  collection_key = ops.GraphKeys.SAVERS
  savers = ops.get_collection(collection_key)
  if savers:
    if len(savers) > 1:
      raise RuntimeError(
          "More than one item in collection {}. "
          "Please indicate which one to use by passing it to the constructor.".
          format(collection_key))
    return savers[0]
  saver = Saver(sharded=True, allow_empty=True)
  if saver is not None:
    ops.add_to_collection(collection_key, saver)
  return saver 
Example #7
Source File: logging_ops.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def get_summary_op():
  """Returns a single Summary op that would run all summaries.

  Either existing one from `SUMMARY_OP` collection or merges all existing
  summaries.

  Returns:
    If no summaries were collected, returns None. Otherwise returns a scalar
    `Tensor` of type `string` containing the serialized `Summary` protocol
    buffer resulting from the merging.
  """
  summary_op = ops.get_collection(ops.GraphKeys.SUMMARY_OP)
  if summary_op is not None:
    if summary_op:
      summary_op = summary_op[0]
    else:
      summary_op = None
  if summary_op is None:
    summary_op = merge_all_summaries()
    if summary_op is not None:
      ops.add_to_collection(ops.GraphKeys.SUMMARY_OP, summary_op)
  return summary_op 
Example #8
Source File: resources.py    From lambda-packs with MIT License 6 votes vote down vote up
def register_resource(handle, create_op, is_initialized_op, is_shared=True):
  """Registers a resource into the appropriate collections.

  This makes the resource findable in either the shared or local resources
  collection.

  Args:
   handle: op which returns a handle for the resource.
   create_op: op which initializes the resource.
   is_initialized_op: op which returns a scalar boolean tensor of whether
    the resource has been initialized.
   is_shared: if True, the resource gets added to the shared resource
    collection; otherwise it gets added to the local resource collection.

  """
  resource = _Resource(handle, create_op, is_initialized_op)
  if is_shared:
    ops.add_to_collection(ops.GraphKeys.RESOURCES, resource)
  else:
    ops.add_to_collection(ops.GraphKeys.LOCAL_RESOURCES, resource) 
Example #9
Source File: queue_runner_impl.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def add_queue_runner(qr, collection=ops.GraphKeys.QUEUE_RUNNERS):
  """Adds a `QueueRunner` to a collection in the graph.

  When building a complex model that uses many queues it is often difficult to
  gather all the queue runners that need to be run.  This convenience function
  allows you to add a queue runner to a well known collection in the graph.

  The companion method `start_queue_runners()` can be used to start threads for
  all the collected queue runners.

  Args:
    qr: A `QueueRunner`.
    collection: A `GraphKey` specifying the graph collection to add
      the queue runner to.  Defaults to `GraphKeys.QUEUE_RUNNERS`.
  """
  ops.add_to_collection(collection, qr) 
Example #10
Source File: lookup_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def initialize(self, table):
    """Initializes the given `table` with `keys` and `values` tensors.

    Args:
      table: The table to initialize.

    Returns:
      The operation that initializes the table.

    Raises:
      TypeError: when the keys and values data types do not match the table
      key and value data types.
    """
    table.check_table_dtypes(self._keys.dtype, self._values.dtype)
    with ops.name_scope(
        self._name,
        values=(table.table_ref, self._keys, self._values)) as scope:
      # pylint: disable=protected-access
      init_op = gen_lookup_ops._initialize_table(
          table.table_ref, self._keys, self._values, name=scope)
      # pylint: enable=protected-access
    ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, init_op)
    return init_op 
Example #11
Source File: lookup_ops.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def initialize(self, table):
    """Initializes the given `table` with `keys` and `values` tensors.

    Args:
      table: The table to initialize.

    Returns:
      The operation that initializes the table.

    Raises:
      TypeError: when the keys and values data types do not match the table
      key and value data types.
    """
    table.check_table_dtypes(self._keys.dtype, self._values.dtype)
    with ops.name_scope(self._name, values=[table]) as scope:
      # pylint: disable=protected-access
      init_op = gen_data_flow_ops._initialize_table(table.table_ref,
                                                    self._keys,
                                                    self._values,
                                                    name=scope)
      # pylint: enable=protected-access
    ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, init_op)
    return init_op 
Example #12
Source File: resources.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def register_resource(handle, create_op, is_initialized_op, is_shared=True):
  """Registers a resource into the appropriate collections.

  This makes the resource findable in either the shared or local resources
  collection.

  Args:
   handle: op which returns a handle for the resource.
   create_op: op which initializes the resource.
   is_initialized_op: op which returns a scalar boolean tensor of whether
    the resource has been initialized.
   is_shared: if True, the resource gets added to the shared resource
    collection; otherwise it gets added to the local resource collection.

  """
  resource = _Resource(handle, create_op, is_initialized_op)
  if is_shared:
    ops.add_to_collection(ops.GraphKeys.RESOURCES, resource)
  else:
    ops.add_to_collection(ops.GraphKeys.LOCAL_RESOURCES, resource) 
Example #13
Source File: variational_inference.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def register_prior(variational, prior):
  """Associate a variational `StochasticTensor` with a `Distribution` prior.

  This is a helper function used in conjunction with `elbo` that allows users
  to specify the mapping between variational distributions and their priors
  without having to pass in `variational_with_prior` explicitly.

  Args:
    variational: `StochasticTensor` q(Z). Approximating distribution.
    prior: `Distribution` p(Z). Prior distribution.

  Returns:
    None

  Raises:
    ValueError: if variational is not a `StochasticTensor` or `prior` is not
      a `Distribution`.
  """
  if not isinstance(variational, st.StochasticTensor):
    raise TypeError("variational must be a StochasticTensor")
  if not isinstance(prior, distribution.Distribution):
    raise TypeError("prior must be a Distribution")
  ops.add_to_collection(VI_PRIORS, (variational, prior)) 
Example #14
Source File: supervisor.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _init_local_init_op(self, local_init_op=USE_DEFAULT):
    """Initializes local_init_op.

    Args:
      local_init_op: `Operation` run for every new supervisor instance. If set
      to USE_DEFAULT, use the first op from the GraphKeys.LOCAL_INIT_OP
      collection. If the collection is empty, create an op that initializes
      all local variables and all tables.
    """
    if local_init_op is Supervisor.USE_DEFAULT:
      local_init_op = self._get_first_op_from_collection(
          ops.GraphKeys.LOCAL_INIT_OP)
      if local_init_op is None:
        op_list = [variables.local_variables_initializer(),
                   data_flow_ops.tables_initializer()]
        if op_list:
          local_init_op = control_flow_ops.group(*op_list)
          ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, local_init_op)
    self._local_init_op = local_init_op 
Example #15
Source File: queue_runner_impl.py    From lambda-packs with MIT License 6 votes vote down vote up
def add_queue_runner(qr, collection=ops.GraphKeys.QUEUE_RUNNERS):
  """Adds a `QueueRunner` to a collection in the graph.

  When building a complex model that uses many queues it is often difficult to
  gather all the queue runners that need to be run.  This convenience function
  allows you to add a queue runner to a well known collection in the graph.

  The companion method `start_queue_runners()` can be used to start threads for
  all the collected queue runners.

  Args:
    qr: A `QueueRunner`.
    collection: A `GraphKey` specifying the graph collection to add
      the queue runner to.  Defaults to `GraphKeys.QUEUE_RUNNERS`.
  """
  ops.add_to_collection(collection, qr) 
Example #16
Source File: supervisor.py    From ctw-baseline with MIT License 6 votes vote down vote up
def _init_local_init_op(self, local_init_op=USE_DEFAULT):
    """Initializes local_init_op.

    Args:
      local_init_op: `Operation` run for every new supervisor instance. If set
      to USE_DEFAULT, use the first op from the GraphKeys.LOCAL_INIT_OP
      collection. If the collection is empty, create an op that initializes
      all local variables and all tables.
    """
    if local_init_op is Supervisor.USE_DEFAULT:
      local_init_op = self._get_first_op_from_collection(
          ops.GraphKeys.LOCAL_INIT_OP)
      if local_init_op is None:
        op_list = [
            variables.local_variables_initializer(),
            lookup_ops.tables_initializer()
        ]
        if op_list:
          local_init_op = control_flow_ops.group(*op_list)
          ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, local_init_op)
    self._local_init_op = local_init_op 
Example #17
Source File: supervisor.py    From ctw-baseline with MIT License 6 votes vote down vote up
def _init_init_op(self, init_op=USE_DEFAULT, init_feed_dict=None):
    """Initializes init_op.

    Args:
      init_op: `Operation` to initialize the variables. If set to USE_DEFAULT,
        create an op that initializes all variables and tables.
      init_feed_dict: A dictionary that maps `Tensor` objects to feed values.
        This feed dictionary will be used when `init_op` is evaluated.
    """
    if init_op is Supervisor.USE_DEFAULT:
      init_op = self._get_first_op_from_collection(ops.GraphKeys.INIT_OP)
      if init_op is None:
        init_op = variables.global_variables_initializer()
        ops.add_to_collection(ops.GraphKeys.INIT_OP, init_op)
    self._init_op = init_op
    self._init_feed_dict = init_feed_dict 
Example #18
Source File: variables.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def add_model_variable(var):
  """Adds a variable to the `GraphKeys.MODEL_VARIABLES` collection.

  Args:
    var: a variable.
  """
  if var not in ops.get_collection(ops.GraphKeys.MODEL_VARIABLES):
    ops.add_to_collection(ops.GraphKeys.MODEL_VARIABLES, var) 
Example #19
Source File: supervisor.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _init_ready_op(self,
                     ready_op=USE_DEFAULT,
                     ready_for_local_init_op=USE_DEFAULT):
    """Initializes ready_op.

    Args:
      ready_op: `Tensor` to check if the model is initialized.
        If it's set to USE_DEFAULT, creates an op that checks all
        the variables are initialized.
      ready_for_local_init_op: `Tensor` to check if the model is ready to run
        local_init_op.
        If it's set to USE_DEFAULT, creates an op that checks all
        the global variables are initialized.
    """
    if ready_op is Supervisor.USE_DEFAULT:
      ready_op = self._get_first_op_from_collection(ops.GraphKeys.READY_OP)
      if ready_op is None:
        ready_op = variables.report_uninitialized_variables()
        ops.add_to_collection(ops.GraphKeys.READY_OP, ready_op)
    self._ready_op = ready_op

    # ready_for_local_init_op defaults to None for backward compatibility
    if ready_for_local_init_op is Supervisor.USE_DEFAULT:
      ready_for_local_init_op = self._get_first_op_from_collection(
          ops.GraphKeys.READY_FOR_LOCAL_INIT_OP)
    self._ready_for_local_init_op = ready_for_local_init_op 
Example #20
Source File: stochastic_tensor.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def __init__(self):
    # Add self to this graph's Stochsatic Tensor collection for
    # purposes of later performing correct surrogate loss calculation.
    ops.add_to_collection(STOCHASTIC_TENSOR_COLLECTION, self) 
Example #21
Source File: builder_impl.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _maybe_add_legacy_init_op(self, legacy_init_op=None):
    """Add legacy init op to the SavedModel.

    Args:
      legacy_init_op: Optional legacy init op to support backward compatibility.

    Raises:
      TypeError if legacy init op is not of type `Operation`.
    """
    if legacy_init_op is not None:
      if not isinstance(legacy_init_op, ops.Operation):
        raise TypeError("legacy_init_op needs to be an Operation: %r" %
                        legacy_init_op)
      ops.add_to_collection(constants.LEGACY_INIT_OP_KEY, legacy_init_op) 
Example #22
Source File: monitored_session.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_or_default(arg_name, collection_key, default_constructor):
    """Get from cache or create a default operation."""
    elements = ops.get_collection(collection_key)
    if elements:
      if len(elements) > 1:
        raise RuntimeError('More than one item in the collection "%s". '
                           'Please indicate which one to use by passing it to '
                           'the tf.Scaffold constructor as:  '
                           'tf.Scaffold(%s=item to use)', collection_key,
                           arg_name)
      return elements[0]
    op = default_constructor()
    if op is not None:
      ops.add_to_collection(collection_key, op)
    return op 
Example #23
Source File: util.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def add_loss(loss, loss_collection=ops.GraphKeys.LOSSES):
  """Adds a externally defined loss to the collection of losses.

  Args:
    loss: A loss `Tensor`.
    loss_collection: Optional collection to add the loss to.
  """
  if loss_collection:
    ops.add_to_collection(loss_collection, loss) 
Example #24
Source File: logging_ops.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _Collect(val, collections, default_collections):
  if collections is None:
    collections = default_collections
  for key in collections:
    ops.add_to_collection(key, val) 
Example #25
Source File: variable_scope.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_variable_scope():
  """Returns the current variable scope."""
  scope = ops.get_collection(_VARSCOPE_KEY)
  if scope:  # This collection has at most 1 element, the default scope at [0].
    return scope[0]
  scope = VariableScope(False)
  ops.add_to_collection(_VARSCOPE_KEY, scope)
  return scope 
Example #26
Source File: summary_ops.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _Collect(val, collections, default_collections):
  if collections is None:
    collections = default_collections
  for key in collections:
    ops.add_to_collection(key, val) 
Example #27
Source File: saved_model_test.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def testLegacyInitOp(self):
    export_dir = os.path.join(test.get_temp_dir(), "test_legacy_init_op")
    builder = saved_model_builder.SavedModelBuilder(export_dir)

    with self.test_session(graph=ops.Graph()) as sess:
      # Add `v1` and `v2` variables to the graph.
      v1 = variables.Variable(1, name="v1")
      ops.add_to_collection("v", v1)
      v2 = variables.Variable(2, name="v2")
      ops.add_to_collection("v", v2)

      # Initialize another variable `v3` to 42.
      v3 = variables.Variable(42, name="v3", trainable=False, collections=[])
      ops.add_to_collection("v", v3)

      # Set up an assignment op to be run as part of the legacy_init_op.
      assign_v3 = state_ops.assign(v3, math_ops.add(v1, v2))
      legacy_init_op = control_flow_ops.group(assign_v3, name="legacy_init_op")

      sess.run(variables.global_variables_initializer())
      builder.add_meta_graph_and_variables(
          sess, ["foo"], legacy_init_op=legacy_init_op)

    # Save the SavedModel to disk.
    builder.save()

    with self.test_session(graph=ops.Graph()) as sess:
      loader.load(sess, ["foo"], export_dir)
      self.assertEqual(1, ops.get_collection("v")[0].eval())
      self.assertEqual(2, ops.get_collection("v")[1].eval())
      # Evaluates to the sum of the first two variables and assigned as part of
      # the legacy_init_op, following a restore.
      self.assertEqual(3, ops.get_collection("v")[2].eval()) 
Example #28
Source File: saved_model_test.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _build_asset_collection(self, asset_file_name, asset_file_contents,
                              asset_file_tensor_name):
    asset_filepath = os.path.join(
        compat.as_bytes(test.get_temp_dir()), compat.as_bytes(asset_file_name))
    file_io.write_string_to_file(asset_filepath, asset_file_contents)
    asset_file_tensor = constant_op.constant(
        asset_filepath, name=asset_file_tensor_name)
    ops.add_to_collection(ops.GraphKeys.ASSET_FILEPATHS, asset_file_tensor)
    asset_collection = ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)
    return asset_collection 
Example #29
Source File: builder_impl.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _add_main_op(self, main_op):
    """Add main op to the SavedModel.

    Args:
      main_op: Main op to run as part of graph initialization.

    Raises:
      TypeError if main op is not of type `Operation`.
    """
    if main_op is not None:
      if not isinstance(main_op, ops.Operation):
        raise TypeError("main_op needs to be an Operation: %r" % main_op)
      ops.add_to_collection(constants.MAIN_OP_KEY, main_op) 
Example #30
Source File: variables.py    From lambda-packs with MIT License 5 votes vote down vote up
def add_model_variable(var):
  """Adds a variable to the `GraphKeys.MODEL_VARIABLES` collection.

  Args:
    var: a variable.
  """
  if var not in ops.get_collection(ops.GraphKeys.MODEL_VARIABLES):
    ops.add_to_collection(ops.GraphKeys.MODEL_VARIABLES, var)