Python tensorflow.python.ops.variables._all_saveable_objects() Examples

The following are 8 code examples of tensorflow.python.ops.variables._all_saveable_objects(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.ops.variables , or try the search function .
Example #1
Source File: model.py    From ebonite with Apache License 2.0 5 votes vote down vote up
def _is_graph_frozen() -> bool:
        """
        Checks if graph in current graph is frozen

        :return: `True` or `False`
        """
        from tensorflow.python.ops import variables
        return not bool(variables._all_saveable_objects()) 
Example #2
Source File: tf_model.py    From DeepPavlov with Apache License 2.0 5 votes vote down vote up
def _get_saveable_variables(exclude_scopes=tuple()):
        # noinspection PyProtectedMember
        all_vars = variables._all_saveable_objects()
        vars_to_train = [var for var in all_vars if all(sc not in var.name for sc in exclude_scopes)]
        return vars_to_train 
Example #3
Source File: builder_impl.py    From lambda-packs with MIT License 4 votes vote down vote up
def add_meta_graph(self,
                     tags,
                     signature_def_map=None,
                     assets_collection=None,
                     legacy_init_op=None,
                     clear_devices=False,
                     main_op=None):
    """Adds the current meta graph to the SavedModel.

    Creates a Saver in the current scope and uses the Saver to export the meta
    graph def. Invoking this API requires the `add_meta_graph_and_variables()`
    API to have been invoked before.

    Args:
      tags: The set of tags to annotate the meta graph def with.
      signature_def_map: The map of signature defs to be added to the meta graph
          def.
      assets_collection: Assets collection to be saved with SavedModel. Note
          that this collection should be a subset of the assets saved as part of
          the first meta graph in the SavedModel.
      legacy_init_op: Legacy support for op or group of ops to execute after the
          restore op upon a load.
      clear_devices: Set to true if the device info on the default graph should
          be cleared.
      main_op: Op or group of ops to execute when the graph is loaded.

    Raises:
      AssertionError: If the variables for the SavedModel have not been saved
          yet.
    """
    if not self._has_saved_variables:
      raise AssertionError(
          "Graph state including variables and assets has not been saved yet. "
          "Please invoke `add_meta_graph_and_variables()` first.")

    # Validate the signature def map to ensure all included TensorInfos are
    # properly populated.
    self._validate_signature_def_map(signature_def_map)

    # Save asset files and write them to disk, if any.
    self._save_and_write_assets(assets_collection)

    if main_op is None:
      # Add legacy init op to the SavedModel.
      self._maybe_add_legacy_init_op(legacy_init_op)
    else:
      self._add_main_op(main_op)

    # Initialize a saver to generate a sharded output for all saveables in the
    # current scope.
    saver = tf_saver.Saver(
        variables._all_saveable_objects(),  # pylint: disable=protected-access
        sharded=True,
        write_version=saver_pb2.SaverDef.V2,
        allow_empty=True)

    meta_graph_def = saver.export_meta_graph(clear_devices=clear_devices)

    # Tag the meta graph def and add it to the SavedModel.
    self._tag_and_add_meta_graph(meta_graph_def, tags, signature_def_map) 
Example #4
Source File: saver.py    From lambda-packs with MIT License 4 votes vote down vote up
def build(self):
    """Builds saver_def."""
    if self._is_built:
      return
    self._is_built = True
    if not self.saver_def:
      if self._builder is None:
        self._builder = BaseSaverBuilder(self._write_version)
      if self._var_list is None:
        # pylint: disable=protected-access
        self._var_list = variables._all_saveable_objects()
      if not self._var_list:
        if self._allow_empty:
          self._is_empty = True
          return
        else:
          raise ValueError("No variables to save")
      self._is_empty = False
      self.saver_def = self._builder.build(
          self._var_list,
          reshape=self._reshape,
          sharded=self._sharded,
          max_to_keep=self._max_to_keep,
          keep_checkpoint_every_n_hours=self._keep_checkpoint_every_n_hours,
          name=self._name,
          restore_sequentially=self._restore_sequentially)
    elif self.saver_def and self._name:
      # Since self._name is used as a name_scope by builder(), we are
      # overloading the use of this field to represent the "import_scope" as
      # well.
      self.saver_def.filename_tensor_name = ops.prepend_name_scope(
          self.saver_def.filename_tensor_name, self._name)
      self.saver_def.save_tensor_name = ops.prepend_name_scope(
          self.saver_def.save_tensor_name, self._name)
      self.saver_def.restore_op_name = ops.prepend_name_scope(
          self.saver_def.restore_op_name, self._name)

    self._check_saver_def()
    # Updates next checkpoint time.
    self._next_checkpoint_time = (
        time.time() + self.saver_def.keep_checkpoint_every_n_hours * 3600)
    self._last_checkpoints = [] 
Example #5
Source File: saver.py    From auto-alt-text-lambda-api with MIT License 4 votes vote down vote up
def build(self):
    """Builds saver_def."""
    if self._is_built:
      return
    self._is_built = True
    if not self.saver_def:
      if self._builder is None:
        self._builder = BaseSaverBuilder(self._write_version)
      if self._var_list is None:
        # pylint: disable=protected-access
        self._var_list = variables._all_saveable_objects()
      if not self._var_list:
        if self._allow_empty:
          self._is_empty = True
          return
        else:
          raise ValueError("No variables to save")
      self._is_empty = False
      self.saver_def = self._builder.build(
          self._var_list,
          reshape=self._reshape,
          sharded=self._sharded,
          max_to_keep=self._max_to_keep,
          keep_checkpoint_every_n_hours=self._keep_checkpoint_every_n_hours,
          name=self._name,
          restore_sequentially=self._restore_sequentially)
    elif self.saver_def and self._name:
      # Since self._name is used as a name_scope by builder(), we are
      # overloading the use of this field to represent the "import_scope" as
      # well.
      self.saver_def.filename_tensor_name = ops.prepend_name_scope(
          self.saver_def.filename_tensor_name, self._name)
      self.saver_def.save_tensor_name = ops.prepend_name_scope(
          self.saver_def.save_tensor_name, self._name)
      self.saver_def.restore_op_name = ops.prepend_name_scope(
          self.saver_def.restore_op_name, self._name)

    self._check_saver_def()
    # Updates next checkpoint time.
    self._next_checkpoint_time = (
        time.time() + self.saver_def.keep_checkpoint_every_n_hours * 3600)
    self._last_checkpoints = [] 
Example #6
Source File: saver.py    From deep_image_model with Apache License 2.0 4 votes vote down vote up
def build(self):
    """Builds saver_def."""
    if self._is_built:
      return
    self._is_built = True
    if not self.saver_def:
      if self._builder is None:
        self._builder = BaseSaverBuilder(self._write_version)
      if self._var_list is None:
        # pylint: disable=protected-access
        self._var_list = variables._all_saveable_objects()
      if not self._var_list:
        if self._allow_empty:
          self._is_empty = True
          return
        else:
          raise ValueError("No variables to save")
      self._is_empty = False
      self.saver_def = self._builder.build(
          self._var_list,
          reshape=self._reshape,
          sharded=self._sharded,
          max_to_keep=self._max_to_keep,
          keep_checkpoint_every_n_hours=self._keep_checkpoint_every_n_hours,
          name=self._name,
          restore_sequentially=self._restore_sequentially)
    elif self.saver_def and self._name:
      # Since self._name is used as a name_scope by builder(), we are
      # overloading the use of this field to represent the "import_scope" as
      # well.
      self.saver_def.filename_tensor_name = ops.prepend_name_scope(
          self.saver_def.filename_tensor_name, self._name)
      self.saver_def.save_tensor_name = ops.prepend_name_scope(
          self.saver_def.save_tensor_name, self._name)
      self.saver_def.restore_op_name = ops.prepend_name_scope(
          self.saver_def.restore_op_name, self._name)

    self._check_saver_def()
    # Updates next checkpoint time.
    self._next_checkpoint_time = (
        time.time() + self.saver_def.keep_checkpoint_every_n_hours * 3600)
    self._last_checkpoints = [] 
Example #7
Source File: saver.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 4 votes vote down vote up
def _build(self, checkpoint_path, build_save, build_restore):
    """Builds saver_def."""
    if context.in_graph_mode():
      if self._is_built:
        return
      self._is_built = True

    if not self.saver_def or context.in_eager_mode():
      if self._builder is None:
        self._builder = BaseSaverBuilder(self._write_version)
      if self._var_list is None:
        # pylint: disable=protected-access
        self._var_list = variables._all_saveable_objects()
      if not self._var_list:
        if self._allow_empty:
          self._is_empty = True
          return
        else:
          raise ValueError("No variables to save")
      self._is_empty = False

      self.saver_def = self._builder._build_internal(  # pylint: disable=protected-access
          self._var_list,
          reshape=self._reshape,
          sharded=self._sharded,
          max_to_keep=self._max_to_keep,
          keep_checkpoint_every_n_hours=self._keep_checkpoint_every_n_hours,
          name=self._name,
          restore_sequentially=self._restore_sequentially,
          filename=checkpoint_path,
          build_save=build_save, build_restore=build_restore)
    elif self.saver_def and self._name:
      # Since self._name is used as a name_scope by builder(), we are
      # overloading the use of this field to represent the "import_scope" as
      # well.
      self.saver_def.filename_tensor_name = ops.prepend_name_scope(
          self.saver_def.filename_tensor_name, self._name)
      self.saver_def.save_tensor_name = ops.prepend_name_scope(
          self.saver_def.save_tensor_name, self._name)
      self.saver_def.restore_op_name = ops.prepend_name_scope(
          self.saver_def.restore_op_name, self._name)

    self._check_saver_def()
    # Updates next checkpoint time.
    self._next_checkpoint_time = (
        time.time() + self.saver_def.keep_checkpoint_every_n_hours * 3600)
    self._last_checkpoints = []
    self._checkpoints_to_be_deleted = [] 
Example #8
Source File: saver.py    From keras-lambda with MIT License 4 votes vote down vote up
def build(self):
    """Builds saver_def."""
    if self._is_built:
      return
    self._is_built = True
    if not self.saver_def:
      if self._builder is None:
        self._builder = BaseSaverBuilder(self._write_version)
      if self._var_list is None:
        # pylint: disable=protected-access
        self._var_list = variables._all_saveable_objects()
      if not self._var_list:
        if self._allow_empty:
          self._is_empty = True
          return
        else:
          raise ValueError("No variables to save")
      self._is_empty = False
      self.saver_def = self._builder.build(
          self._var_list,
          reshape=self._reshape,
          sharded=self._sharded,
          max_to_keep=self._max_to_keep,
          keep_checkpoint_every_n_hours=self._keep_checkpoint_every_n_hours,
          name=self._name,
          restore_sequentially=self._restore_sequentially)
    elif self.saver_def and self._name:
      # Since self._name is used as a name_scope by builder(), we are
      # overloading the use of this field to represent the "import_scope" as
      # well.
      self.saver_def.filename_tensor_name = ops.prepend_name_scope(
          self.saver_def.filename_tensor_name, self._name)
      self.saver_def.save_tensor_name = ops.prepend_name_scope(
          self.saver_def.save_tensor_name, self._name)
      self.saver_def.restore_op_name = ops.prepend_name_scope(
          self.saver_def.restore_op_name, self._name)

    self._check_saver_def()
    # Updates next checkpoint time.
    self._next_checkpoint_time = (
        time.time() + self.saver_def.keep_checkpoint_every_n_hours * 3600)
    self._last_checkpoints = []