Python tensorflow.python.ops.io_ops.restore_v2() Examples

The following are 19 code examples of tensorflow.python.ops.io_ops.restore_v2(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.ops.io_ops , or try the search function .
Example #1
Source File: saver.py    From lingvo with Apache License 2.0 6 votes vote down vote up
def ReadNpArrays(file_prefix, nmap):
  """Reads from a tf checkpoint to fill in values of a NesteMap.

  Args:
    file_prefix: A TF checkpoint filename prefix.
    nmap: A NestedMap of numpy dtypes.

  Returns:
    A NestedMap with numpy arrays compatible w/ nmap.
  """
  g = tf.Graph()
  with g.as_default():
    reads = []
    for name, dtype in nmap.FlattenItems():
      reads.append(
          io_ops.restore_v2(
              prefix=file_prefix,
              tensor_names=[name],
              shape_and_slices=[""],
              dtypes=[dtype])[0])

  with tf.Session(graph=g) as sess:
    vals = sess.run(reads)

  return nmap.Pack(vals) 
Example #2
Source File: checkpoint_utils.py    From lambda-packs with MIT License 6 votes vote down vote up
def _set_checkpoint_initializer(variable,
                                ckpt_file,
                                tensor_name,
                                slice_spec,
                                name="checkpoint_initializer"):
  """Overrides given variable's initialization op.

  Sets variable initializer to assign op that initializes variable from tensor's
  value in the checkpoint.

  Args:
    variable: `tf.Variable` object.
    ckpt_file: string, full path of the checkpoint.
    tensor_name: Name of the tensor to load from the checkpoint.
    slice_spec: Slice specification for loading partitioned tensors.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  restore_op = io_ops.restore_v2(
      ckpt_file, [tensor_name], [slice_spec], [base_type], name=name)[0]
  variable._initializer_op = state_ops.assign(variable, restore_op)  # pylint:disable=protected-access 
Example #3
Source File: checkpoint_utils.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 6 votes vote down vote up
def _set_checkpoint_initializer(variable,
                                ckpt_file,
                                tensor_name,
                                slice_spec,
                                name="checkpoint_initializer"):
  """Overrides given variable's initialization op.

  Sets variable initializer to assign op that initializes variable from tensor's
  value in the checkpoint.

  Args:
    variable: `tf.Variable` object.
    ckpt_file: string, full path of the checkpoint.
    tensor_name: Name of the tensor to load from the checkpoint.
    slice_spec: Slice specification for loading partitioned tensors.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  with ops.colocate_with(variable):
    restore_op = io_ops.restore_v2(
        ckpt_file, [tensor_name], [slice_spec], [base_type], name=name)[0]
    variable._initializer_op = state_ops.assign(variable, restore_op)  # pylint:disable=protected-access 
Example #4
Source File: saver.py    From TF_Face_Toolbox with Apache License 2.0 6 votes vote down vote up
def restore_op(self, filename_tensor, saveable, preferred_shard):
    tensors = []
    for spec in saveable.specs:
      # Ignore the moving_mean and moving_variance in other towers.
      if spec.name.startswith('replicated_'):
        if not spec.name.startswith('replicated_0') and 'BatchNorm/moving_' in spec.name:
          continue
        tensors.append(
              io_ops.restore_v2(
                filename_tensor,
                ['/'.join(spec.name.split('/')[1:])],
                [spec.slice_spec],
                [spec.tensor.dtype])[0])
      else:
        tensors.append(
              io_ops.restore_v2(
                  filename_tensor,
                  [spec.name],
                  [spec.slice_spec],
                  [spec.tensor.dtype])[0])

    return tensors 
Example #5
Source File: saver.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def restore_op(self, filename_tensor, saveable, preferred_shard):
    """Create ops to restore 'saveable'.

    This is intended to be overridden by subclasses that want to generate
    different Ops.

    Args:
      filename_tensor: String Tensor.
      saveable: A BaseSaverBuilder.SaveableObject object.
      preferred_shard: Int.  Shard to open first when loading a sharded file.

    Returns:
      A list of Tensors resulting from reading 'saveable' from
        'filename'.
    """
    # pylint: disable=protected-access
    tensors = []
    for spec in saveable.specs:
      tensors.append(
          io_ops.restore_v2(
              filename_tensor,
              [spec.name],
              [spec.slice_spec],
              [spec.tensor.dtype])[0])

    return tensors
  # pylint: enable=unused-argument 
Example #6
Source File: checkpoint_utils.py    From keras-lambda with MIT License 5 votes vote down vote up
def _set_checkpoint_initializer(variable, file_pattern, tensor_name, slice_spec,
                                name="checkpoint_initializer"):
  """Sets variable initializer to assign op form value in checkpoint's tensor.

  Args:
    variable: `Variable` object.
    file_pattern: string, where to load checkpoints from.
    tensor_name: Name of the `Tensor` to load from checkpoint reader.
    slice_spec: Slice specification for loading partitioned variables.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  restore_op = io_ops.restore_v2(
      file_pattern, [tensor_name], [slice_spec], [base_type], name=name)[0]
  variable._initializer_op = state_ops.assign(variable, restore_op) 
Example #7
Source File: saver.py    From keras-lambda with MIT License 5 votes vote down vote up
def restore_op(self, filename_tensor, saveable, preferred_shard):
    """Create ops to restore 'saveable'.

    This is intended to be overridden by subclasses that want to generate
    different Ops.

    Args:
      filename_tensor: String Tensor.
      saveable: A BaseSaverBuilder.SaveableObject object.
      preferred_shard: Int.  Shard to open first when loading a sharded file.

    Returns:
      A list of Tensors resulting from reading 'saveable' from
        'filename'.
    """
    # pylint: disable=protected-access
    tensors = []
    for spec in saveable.specs:
      tensors.append(
          io_ops.restore_v2(
              filename_tensor,
              [spec.name],
              [spec.slice_spec],
              [spec.tensor.dtype])[0])

    return tensors
  # pylint: enable=unused-argument 
Example #8
Source File: saver.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def restore_op(self, filename_tensor, saveable, preferred_shard):
    """Create ops to restore 'saveable'.

    This is intended to be overridden by subclasses that want to generate
    different Ops.

    Args:
      filename_tensor: String Tensor.
      saveable: A BaseSaverBuilder.SaveableObject object.
      preferred_shard: Int.  Shard to open first when loading a sharded file.

    Returns:
      A list of Tensors resulting from reading 'saveable' from
        'filename'.
    """
    # pylint: disable=protected-access
    tensors = []
    for spec in saveable.specs:
      tensors.append(
          io_ops.restore_v2(
              filename_tensor,
              [spec.name],
              [spec.slice_spec],
              [spec.tensor.dtype])[0])

    return tensors
  # pylint: enable=unused-argument 
Example #9
Source File: checkpoint_utils.py    From human-rl with MIT License 5 votes vote down vote up
def _set_checkpoint_initializer(variable, file_pattern, tensor_name, slice_spec,
                                name="checkpoint_initializer"):
  """Sets variable initializer to assign op form value in checkpoint's tensor.

  Args:
    variable: `Variable` object.
    file_pattern: string, where to load checkpoints from.
    tensor_name: Name of the `Tensor` to load from checkpoint reader.
    slice_spec: Slice specification for loading partitioned variables.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  restore_op = io_ops.restore_v2(
      file_pattern, [tensor_name], [slice_spec], [base_type], name=name)[0]
  variable._initializer_op = state_ops.assign(variable, restore_op) 
Example #10
Source File: checkpoint_utils.py    From human-rl with MIT License 5 votes vote down vote up
def _set_checkpoint_initializer(variable, file_pattern, tensor_name, slice_spec,
                                name="checkpoint_initializer"):
  """Sets variable initializer to assign op form value in checkpoint's tensor.

  Args:
    variable: `Variable` object.
    file_pattern: string, where to load checkpoints from.
    tensor_name: Name of the `Tensor` to load from checkpoint reader.
    slice_spec: Slice specification for loading partitioned variables.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  restore_op = io_ops.restore_v2(
      file_pattern, [tensor_name], [slice_spec], [base_type], name=name)[0]
  variable._initializer_op = state_ops.assign(variable, restore_op) 
Example #11
Source File: saver.py    From TF_Face_Toolbox with Apache License 2.0 5 votes vote down vote up
def restore_op(self, filename_tensor, saveable, preferred_shard):
    tensors = []
    for spec in saveable.specs:
      print(spec.name)
      tensors.append(
            io_ops.restore_v2(
                filename_tensor,
                [spec.name],
                [spec.slice_spec],
                [spec.tensor.dtype])[0])

    return tensors 
Example #12
Source File: checkpoint_utils.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _set_checkpoint_initializer(variable, file_pattern, tensor_name, slice_spec,
                                name="checkpoint_initializer"):
  """Sets variable initializer to assign op form value in checkpoint's tensor.

  Args:
    variable: `Variable` object.
    file_pattern: string, where to load checkpoints from.
    tensor_name: Name of the `Tensor` to load from checkpoint reader.
    slice_spec: Slice specification for loading partitioned variables.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  restore_op = io_ops.restore_v2(
      file_pattern, [tensor_name], [slice_spec], [base_type], name=name)[0]
  variable._initializer_op = state_ops.assign(variable, restore_op) 
Example #13
Source File: base_input_generator.py    From lingvo with Apache License 2.0 5 votes vote down vote up
def _InputBatch(self):
    p = self.params

    @tf.function
    def ReadData():
      x, y = io_ops.restore_v2(p.ckpt, [p.data, p.label], [''] * 2,
                               [p.data_dtype, p.label_dtype])
      # Always convert to float32.
      return tf.cast(x, tf.float32), tf.cast(y, tf.float32)

    # Loads data and label into memory and keep it around.
    data, label = ops.cached_call(
        f=ReadData.get_concrete_function(), T=[tf.float32, tf.float32])
    b, shape = self.InfeedBatchSize(), list(p.data_shape)
    data = tf.reshape(data, [-1] + shape)
    label = tf.reshape(label, [-1])
    label = py_utils.HasShape(label, [tf.shape(data)[0]])
    sample_ids = ops.random_permutation_sequence(
        num=p.num_samples,
        batch=b,
        repeat=p.repeat,
        seed=p.random_seed if p.random_seed else 0)
    n = tf.shape(sample_ids)[0]
    raw = py_utils.PadOrTrimTo(tf.gather(data, sample_ids), [b] + shape)
    ret = py_utils.NestedMap(
        raw=raw,
        data=self._Preprocess(raw),
        label=py_utils.PadOrTrimTo(tf.gather(label, sample_ids), [b]),
        weight=py_utils.PadOrTrimTo(tf.ones([n], dtype=tf.float32), [b]))
    if not py_utils.use_tpu():
      ret['sample_ids'] = sample_ids
    return ret 
Example #14
Source File: checkpoint_utils.py    From tpu_models with Apache License 2.0 5 votes vote down vote up
def _set_checkpoint_initializer(variable,
                                ckpt_file,
                                tensor_name,
                                slice_spec,
                                name="checkpoint_initializer"):
  """Overrides given variable's initialization op.

  Sets variable initializer to assign op that initializes variable from tensor's
  value in the checkpoint.

  Args:
    variable: `tf.Variable` object.
    ckpt_file: string, full path of the checkpoint.
    tensor_name: Name of the tensor to load from the checkpoint.
    slice_spec: Slice specification for loading partitioned tensors.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  # Do not colocate with variable since RestoreV2 op only runs on CPU and
  # colocation will force variable (and other ops that colocate with variable)
  # to be on CPU as well. It is okay to place the variable's initializer op on
  # CPU since it will only be run once at the start.
  with ops.device(variable.device), ops.device("/cpu:0"):
    restore_op = io_ops.restore_v2(
        ckpt_file, [tensor_name], [slice_spec], [base_type], name=name)[0]

    names_to_saveables = saveable_object_util.op_list_to_dict([variable])
    saveable_objects = []
    for name, op in names_to_saveables.items():
      for s in saveable_object_util.saveable_objects_for_op(op, name):
        saveable_objects.append(s)

    assert len(saveable_objects) == 1  # Should be only one variable.
  init_op = saveable_objects[0].restore([restore_op], restored_shapes=None)

  # pylint:disable=protected-access
  variable._initializer_op = init_op
  restore_op.set_shape(variable.shape)
  variable._initial_value = restore_op
  # pylint:enable=protected-access 
Example #15
Source File: checkpoint_utils.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _set_checkpoint_initializer(variable, file_pattern, tensor_name, slice_spec,
                                name="checkpoint_initializer"):
  """Sets variable initializer to assign op form value in checkpoint's tensor.

  Args:
    variable: `Variable` object.
    file_pattern: string, where to load checkpoints from.
    tensor_name: Name of the `Tensor` to load from checkpoint reader.
    slice_spec: Slice specification for loading partitioned variables.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  restore_op = io_ops.restore_v2(
      file_pattern, [tensor_name], [slice_spec], [base_type], name=name)[0]
  variable._initializer_op = state_ops.assign(variable, restore_op) 
Example #16
Source File: saver.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def restore_op(self, filename_tensor, saveable, preferred_shard):
    """Create ops to restore 'saveable'.

    This is intended to be overridden by subclasses that want to generate
    different Ops.

    Args:
      filename_tensor: String Tensor.
      saveable: A BaseSaverBuilder.SaveableObject object.
      preferred_shard: Int.  Shard to open first when loading a sharded file.

    Returns:
      A list of Tensors resulting from reading 'saveable' from
        'filename'.
    """
    # pylint: disable=protected-access
    tensors = []
    for spec in saveable.specs:
      tensors.append(
          io_ops.restore_v2(
              filename_tensor,
              [spec.name],
              [spec.slice_spec],
              [spec.tensor.dtype])[0])

    return tensors
  # pylint: enable=unused-argument 
Example #17
Source File: checkpoint_utils.py    From lambda-packs with MIT License 5 votes vote down vote up
def _set_checkpoint_initializer(variable, file_pattern, tensor_name, slice_spec,
                                name="checkpoint_initializer"):
  """Sets variable initializer to assign op form value in checkpoint's tensor.

  Args:
    variable: `Variable` object.
    file_pattern: string, where to load checkpoints from.
    tensor_name: Name of the `Tensor` to load from checkpoint reader.
    slice_spec: Slice specification for loading partitioned variables.
    name: Name of the operation.
  """
  base_type = variable.dtype.base_dtype
  restore_op = io_ops.restore_v2(
      file_pattern, [tensor_name], [slice_spec], [base_type], name=name)[0]
  variable._initializer_op = state_ops.assign(variable, restore_op) 
Example #18
Source File: saver.py    From lambda-packs with MIT License 5 votes vote down vote up
def restore_op(self, filename_tensor, saveable, preferred_shard):
    """Create ops to restore 'saveable'.

    This is intended to be overridden by subclasses that want to generate
    different Ops.

    Args:
      filename_tensor: String Tensor.
      saveable: A BaseSaverBuilder.SaveableObject object.
      preferred_shard: Int.  Shard to open first when loading a sharded file.

    Returns:
      A list of Tensors resulting from reading 'saveable' from
        'filename'.
    """
    # pylint: disable=protected-access
    tensors = []
    for spec in saveable.specs:
      tensors.append(
          io_ops.restore_v2(
              filename_tensor,
              [spec.name],
              [spec.slice_spec],
              [spec.tensor.dtype])[0])

    return tensors
  # pylint: enable=unused-argument 
Example #19
Source File: saver.py    From lingvo with Apache License 2.0 5 votes vote down vote up
def _BuildRestore(self):
    """Builds restore ops."""
    assign_ops = []
    for var in self._vars:
      val, = io_ops.restore_v2(
          prefix=self._restore_prefix_ph,
          tensor_names=[_VarKey(var)],
          shape_and_slices=[""],
          dtypes=[var.dtype])
      assign_ops.append(var.assign(val))
    self._restore_op = tf.group(*assign_ops)