Python tensorflow.python.framework.dtypes.string() Examples

The following are 30 code examples of tensorflow.python.framework.dtypes.string(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.framework.dtypes , or try the search function .
Example #1
Source File: feature_column.py    From lambda-packs with MIT License 6 votes vote down vote up
def _parse_example_spec(self):
    """Returns a `tf.Example` parsing spec as dict.

    It is used for get_parsing_spec for `tf.parse_example`. Returned spec is a
    dict from keys ('string') to `VarLenFeature`, `FixedLenFeature`, and other
    supported objects. Please check documentation of ${tf.parse_example} for all
    supported spec objects.

    Let's say a Feature column depends on raw feature ('raw') and another
    `_FeatureColumn` (input_fc). One possible implementation of
    _parse_example_spec is as follows:

    ```python
    spec = {'raw': tf.FixedLenFeature(...)}
    spec.update(input_fc._parse_example_spec)
    return spec
    ```
    """
    pass 
Example #2
Source File: export_output.py    From lambda-packs with MIT License 6 votes vote down vote up
def __init__(self, outputs):
    """Constructor for PredictOutput.

    Args:
      outputs: A dict of string to `Tensor` representing the predictions.

    Raises:
      ValueError: if the outputs is not dict, or any of its keys are not
          strings, or any of its values are not `Tensor`s.
    """
    if not isinstance(outputs, dict):
      raise ValueError(
          'Prediction outputs must be given as a dict of string to Tensor; '
          'got {}'.format(outputs))
    for key, value in outputs.items():
      if not isinstance(key, six.string_types):
        raise ValueError(
            'Prediction output key must be a string; got {}.'.format(key))
      if not isinstance(value, ops.Tensor):
        raise ValueError(
            'Prediction output value must be a Tensor; got {}.'.format(value))
    self._outputs = outputs 
Example #3
Source File: export.py    From lambda-packs with MIT License 6 votes vote down vote up
def get_timestamped_export_dir(export_dir_base):
  """Builds a path to a new subdirectory within the base directory.

  Each export is written into a new subdirectory named using the
  current time.  This guarantees monotonically increasing version
  numbers even across multiple runs of the pipeline.
  The timestamp used is the number of seconds since epoch UTC.

  Args:
    export_dir_base: A string containing a directory to write the exported
        graph and checkpoints.
  Returns:
    The full path of the new subdirectory (which is not actually created yet).
  """
  export_timestamp = int(time.time())

  export_dir = os.path.join(
      compat.as_bytes(export_dir_base),
      compat.as_bytes(str(export_timestamp)))
  return export_dir 
Example #4
Source File: saved_model_export_utils.py    From lambda-packs with MIT License 6 votes vote down vote up
def make_parsing_export_strategy(feature_columns, exports_to_keep=5):
  """Create an ExportStrategy for use with Experiment, using `FeatureColumn`s.

  Creates a SavedModel export that expects to be fed with a single string
  Tensor containing serialized tf.Examples.  At serving time, incoming
  tf.Examples will be parsed according to the provided `FeatureColumn`s.

  Args:
    feature_columns: An iterable of `FeatureColumn`s representing the features
      that must be provided at serving time (excluding labels!).
    exports_to_keep: Number of exports to keep.  Older exports will be
      garbage-collected.  Defaults to 5.  Set to None to disable garbage
      collection.

  Returns:
    An ExportStrategy that can be passed to the Experiment constructor.
  """
  feature_spec = feature_column.create_feature_spec_for_parsing(feature_columns)
  serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec)
  return make_export_strategy(serving_input_fn, exports_to_keep=exports_to_keep) 
Example #5
Source File: builder_impl.py    From lambda-packs with MIT License 6 votes vote down vote up
def _asset_path_from_tensor(path_tensor):
  """Returns the filepath value stored in constant `path_tensor`.

  Args:
    path_tensor: Tensor of a file-path.

  Returns:
    The string value i.e. path of the tensor, if valid.

  Raises:
    TypeError if tensor does not match expected op type, dtype or value.
  """
  if not isinstance(path_tensor, ops.Tensor):
    raise TypeError("Asset path tensor must be a Tensor.")
  if path_tensor.op.type != "Const":
    raise TypeError("Asset path tensor must be of type constant.")
  if path_tensor.dtype != dtypes.string:
    raise TypeError("Asset path tensor must be of dtype string.")
  str_values = path_tensor.op.get_attr("value").string_val
  if len(str_values) != 1:
    raise TypeError("Asset path tensor must be a scalar.")
  return str_values[0] 
Example #6
Source File: sparse_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def serialize_sparse(sp_input, name=None):
  """Serialize a `SparseTensor` into a string 3-vector (1-D `Tensor`) object.

  Args:
    sp_input: The input `SparseTensor`.
    name: A name prefix for the returned tensors (optional).

  Returns:
    A string 3-vector (1D `Tensor`), with each column representing the
    serialized `SparseTensor`'s indices, values, and shape (respectively).

  Raises:
    TypeError: If `sp_input` is not a `SparseTensor`.
  """
  sp_input = _convert_to_sparse_tensor(sp_input)

  return gen_sparse_ops._serialize_sparse(
      sp_input.indices, sp_input.values, sp_input.dense_shape, name=name) 
Example #7
Source File: sparse_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def _add_sparse_to_tensors_map(sp_input, container=None,
                               shared_name=None, name=None):
  """Add a `SparseTensor` to a `SparseTensorsMap` and return its handle.

  Args:
    sp_input: The input `SparseTensor`.
    container: The container for the underlying `SparseTensorsMap` (optional).
    shared_name: The shared name for the underlying `SparseTensorsMap`
      (optional, defaults to the name of the newly created op).
    name: A name prefix for the returned tensors (optional).

  Returns:
    A string 1-vector (1D `Tensor`), with the single element representing the
    a unique handle to a `SparseTensor` stored by the `SparseTensorMap`
    underlying this op.

  Raises:
    TypeError: If `sp_input` is not a `SparseTensor`.
  """
  sp_input = _convert_to_sparse_tensor(sp_input)

  return gen_sparse_ops._add_sparse_to_tensors_map(
      sp_input.indices, sp_input.values, sp_input.dense_shape,
      container=container, shared_name=shared_name, name=name) 
Example #8
Source File: variables.py    From lambda-packs with MIT License 6 votes vote down vote up
def to_proto(self, export_scope=None):
    """Converts a `Variable` to a `VariableDef` protocol buffer.

    Args:
      export_scope: Optional `string`. Name scope to remove.

    Returns:
      A `VariableDef` protocol buffer, or `None` if the `Variable` is not
      in the specified name scope.
    """
    if (export_scope is None or
        self._variable.name.startswith(export_scope)):
      var_def = variable_pb2.VariableDef()
      var_def.variable_name = ops.strip_name_scope(
          self._variable.name, export_scope)
      var_def.initializer_name = ops.strip_name_scope(
          self.initializer.name, export_scope)
      var_def.snapshot_name = ops.strip_name_scope(
          self._snapshot.name, export_scope)
      if self._save_slice_info:
        var_def.save_slice_info_def.MergeFrom(self._save_slice_info.to_proto(
            export_scope=export_scope))
      return var_def
    else:
      return None 
Example #9
Source File: variables.py    From lambda-packs with MIT License 6 votes vote down vote up
def to_proto(self, export_scope=None):
      """Returns a SaveSliceInfoDef() proto.

      Args:
        export_scope: Optional `string`. Name scope to remove.

      Returns:
        A `SaveSliceInfoDef` protocol buffer, or None if the `Variable` is not
        in the specified name scope.
      """
      if (export_scope is None or
          self.full_name.startswith(export_scope)):
        save_slice_info_def = variable_pb2.SaveSliceInfoDef()
        save_slice_info_def.full_name = ops.strip_name_scope(
            self.full_name, export_scope)
        for i in self.full_shape:
          save_slice_info_def.full_shape.append(i)
        for i in self.var_offset:
          save_slice_info_def.var_offset.append(i)
        for i in self.var_shape:
          save_slice_info_def.var_shape.append(i)
        return save_slice_info_def
      else:
        return None 
Example #10
Source File: control_flow_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def _to_proto(self, export_scope=None):
    """Converts the values to a `ValuesDef` protocol buffer.

    Args:
      export_scope: Optional `string`. Name scope to remove.

    Returns:
      A `ValuesDef` protocol buffer.
    """
    values_def = control_flow_pb2.ValuesDef()
    values_def.values.extend(
        [ops.strip_name_scope(v, export_scope)
         for v in sorted(self._values)])
    for k, v in self._external_values.items():
      values_def.external_values[k] = ops.strip_name_scope(
          v.name, export_scope)
    return values_def 
Example #11
Source File: control_flow_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def _init_from_proto(self, context_def, import_scope=None):
    """Creates a new `CondContext` from protocol buffer.

    Args:
      context_def: `CondContextDef` protocol buffer.
      import_scope: Optional `string`. Name scope to add.
    """
    assert isinstance(context_def, control_flow_pb2.CondContextDef)
    # Create from context_def.
    g = ops.get_default_graph()
    self._name = ops.prepend_name_scope(
        context_def.context_name, import_scope)
    self._pred = g.as_graph_element(ops.prepend_name_scope(
        context_def.pred_name, import_scope))
    self._pivot = g.as_graph_element(ops.prepend_name_scope(
        context_def.pivot_name, import_scope))
    self._branch = context_def.branch
    super(CondContext, self).__init__(values_def=context_def.values_def,
                                      import_scope=import_scope) 
Example #12
Source File: control_flow_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def to_proto(self, export_scope=None):
    """Converts a `CondContext` to a `CondContextDef` protocol buffer.

    Args:
      export_scope: Optional `string`. Name scope to remove.

    Returns:
      A `CondContextDef` protocol buffer.
    """
    if (export_scope is None or
        self.name.startswith(export_scope)):
      context_def = control_flow_pb2.CondContextDef()
      context_def.context_name = ops.strip_name_scope(
          self.name, export_scope)
      context_def.pred_name = ops.strip_name_scope(
          self._pred.name, export_scope)
      context_def.pivot_name = ops.strip_name_scope(
          self._pivot.name, export_scope)
      context_def.branch = self._branch
      context_def.values_def.MergeFrom(super(CondContext, self)._to_proto(
          export_scope))

      return context_def
    else:
      return None 
Example #13
Source File: control_flow_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def __init__(self, parallel_iterations=10, back_prop=True, swap_memory=False,
               name="while_context", grad_state=None, context_def=None,
               import_scope=None):
    """"Creates a `WhileContext`.

    Args:
      parallel_iterations: The number of iterations allowed to run in parallel.
      back_prop: Whether backprop is enabled for this while loop.
      swap_memory: Whether GPU-CPU memory swap is enabled for this loop.
      name: Optional name prefix for the returned tensors.
      grad_state: The gradient loop state.
      context_def: Optional `WhileContextDef` protocol buffer to initialize
        the `Whilecontext` python object from.
      import_scope: Optional `string`. Name scope to add. Only used when
        initialing from protocol buffer.
    """
    if context_def:
      self._init_from_proto(context_def, import_scope=import_scope)
    else:
      ControlFlowContext.__init__(self)
      self._init_from_args(parallel_iterations, back_prop, swap_memory,
                           name)
    # The gradient loop state.
    self._grad_state = grad_state 
Example #14
Source File: session_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def __init__(self, handle, dtype, session):
    """Constructs a new tensor handle.

    A tensor handle for a persistent tensor is a python string
    that has the form of "tensor_name;unique_id;device_name".

    Args:
      handle: A tensor handle.
      dtype: The data type of the tensor represented by `handle`.
      session: The session in which the tensor is produced.
    """
    self._handle = compat.as_str_any(handle)
    self._resource_handle = None
    self._dtype = dtype
    self._session = session
    self._auto_gc_enabled = True 
Example #15
Source File: session_ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def delete_session_tensor(handle, name=None):
  """Delete the tensor for the given tensor handle.

  This is EXPERIMENTAL and subject to change.

  Delete the tensor of a given tensor handle. The tensor is produced
  in a previous run() and stored in the state of the session.

  Args:
    handle: The string representation of a persistent tensor handle.
    name: Optional name prefix for the return tensor.

  Returns:
    A pair of graph elements. The first is a placeholder for feeding a
    tensor handle and the second is a deletion operation.
  """
  handle_device = TensorHandle._get_device_name(handle)
  with ops.device(handle_device):
    holder = array_ops.placeholder(dtypes.string)
    deleter = gen_data_flow_ops._delete_session_tensor(holder, name=name)
  return (holder, deleter) 
Example #16
Source File: tensorflow_dataframe.py    From lambda-packs with MIT License 5 votes vote down vote up
def _dtype_to_nan(dtype):
  if dtype is dtypes.string:
    return b""
  elif dtype.is_integer:
    return np.nan
  elif dtype.is_floating:
    return np.nan
  elif dtype is dtypes.bool:
    return np.nan
  else:
    raise ValueError("Can't parse type without NaN into sparse tensor: %s" %
                     dtype) 
Example #17
Source File: data_ops.py    From lambda-packs with MIT License 5 votes vote down vote up
def ParseLabelTensorOrDict(labels):
  """Return a tensor to use for input labels to tensor_forest.

  The incoming targets can be a dict where keys are the string names of the
  columns, which we turn into a single 1-D tensor for classification or
  2-D tensor for regression.

  Converts sparse tensors to dense ones.

  Args:
    labels: `Tensor` or `dict` of `Tensor` objects.

  Returns:
    A 2-D tensor for labels/outputs.
  """
  if isinstance(labels, dict):
    return math_ops.to_float(
        array_ops.concat(
            [
                sparse_ops.sparse_tensor_to_dense(
                    labels[k], default_value=-1) if isinstance(
                        labels, sparse_tensor.SparseTensor) else labels[k]
                for k in sorted(labels.keys())
            ],
            1))
  else:
    if isinstance(labels, sparse_tensor.SparseTensor):
      return math_ops.to_float(sparse_ops.sparse_tensor_to_dense(
          labels, default_value=-1))
    else:
      return math_ops.to_float(labels) 
Example #18
Source File: input_fn_utils.py    From lambda-packs with MIT License 5 votes vote down vote up
def build_parsing_serving_input_fn(feature_spec, default_batch_size=None):
  """Build an input_fn appropriate for serving, expecting fed tf.Examples.

  Creates an input_fn that expects a serialized tf.Example fed into a string
  placeholder.  The function parses the tf.Example according to the provided
  feature_spec, and returns all parsed Tensors as features.  This input_fn is
  for use at serving time, so the labels return value is always None.

  Args:
    feature_spec: a dict of string to `VarLenFeature`/`FixedLenFeature`.
    default_batch_size: the number of query examples expected per batch.
        Leave unset for variable batch size (recommended).

  Returns:
    An input_fn suitable for use in serving.
  """
  def input_fn():
    """An input_fn that expects a serialized tf.Example."""
    serialized_tf_example = array_ops.placeholder(dtype=dtypes.string,
                                                  shape=[default_batch_size],
                                                  name='input_example_tensor')
    inputs = {'examples': serialized_tf_example}
    features = parsing_ops.parse_example(serialized_tf_example, feature_spec)
    labels = None  # these are not known in serving!
    return InputFnOps(features, labels, inputs)
  return input_fn 
Example #19
Source File: saver_test_utils.py    From lambda-packs with MIT License 5 votes vote down vote up
def _export(self):
    return gen_lookup_ops._lookup_table_export(self.table_ref, dtypes.string,
                                               dtypes.float32) 
Example #20
Source File: dataset_ops.py    From lambda-packs with MIT License 5 votes vote down vote up
def __init__(self, filenames, compression_type=None):
    """Creates a `TFRecordDataset`.

    Args:
      filenames: A `tf.string` tensor containing one or more filenames.
      compression_type: A `tf.string` scalar evaluating to one of `""` (no
        compression), `"ZLIB"`, or `"GZIP"`.
    """
    super(TFRecordDataset, self).__init__()
    self._filenames = ops.convert_to_tensor(filenames, name="filenames")
    if compression_type is not None:
      self._compression_type = ops.convert_to_tensor(
          compression_type, dtype=dtypes.string, name="compression_type")
    else:
      self._compression_type = constant_op.constant("", name="compression_type") 
Example #21
Source File: saver_test_utils.py    From lambda-packs with MIT License 5 votes vote down vote up
def __init__(self, name, table_ref=None):
    if table_ref is None:
      self.table_ref = gen_lookup_ops._mutable_hash_table(
          key_dtype=dtypes.string, value_dtype=dtypes.float32, name=name)
    else:
      self.table_ref = table_ref
    self._name = name
    self._saveable = CheckpointedOp.CustomSaveable(self, name)
    ops_lib.add_to_collection(ops_lib.GraphKeys.SAVEABLE_OBJECTS,
                              self._saveable) 
Example #22
Source File: input_fn_utils.py    From lambda-packs with MIT License 5 votes vote down vote up
def build_default_serving_input_fn(features, default_batch_size=None):
  """Build an input_fn appropriate for serving, expecting feature Tensors.

  Creates an input_fn that expects all features to be fed directly.
  This input_fn is for use at serving time, so the labels return value is always
  None.

  Args:
    features: a dict of string to `Tensor`.
    default_batch_size: the number of query examples expected per batch.
        Leave unset for variable batch size (recommended).

  Returns:
    An input_fn suitable for use in serving.
  """
  def input_fn():
    """an input_fn that expects all features to be fed directly."""
    features_placeholders = {}
    for name, t in features.items():
      shape_list = t.get_shape().as_list()
      shape_list[0] = default_batch_size
      shape = tensor_shape.TensorShape(shape_list)

      features_placeholders[name] = array_ops.placeholder(dtype=t.dtype,
                                                          shape=shape,
                                                          name=t.name)
    labels = None  # these are not known in serving!
    return InputFnOps(features_placeholders, labels, features_placeholders)
  return input_fn 
Example #23
Source File: feature_column.py    From lambda-packs with MIT License 5 votes vote down vote up
def _parse_example_spec(self):
    config = {}
    for key in self.keys:
      if isinstance(key, _FeatureColumn):
        config.update(key._parse_example_spec)  # pylint: disable=protected-access
      else:  # key must be a string
        config.update({key: parsing_ops.VarLenFeature(dtypes.string)})
    return config 
Example #24
Source File: feature_column.py    From lambda-packs with MIT License 5 votes vote down vote up
def name(self):
    feature_names = []
    for key in _collect_leaf_level_keys(self):
      if isinstance(key, _FeatureColumn):
        feature_names.append(key.name)
      else:  # key must be a string
        feature_names.append(key)
    return '_X_'.join(sorted(feature_names)) 
Example #25
Source File: summaries.py    From tensornets with MIT License 5 votes vote down vote up
def summarize_tensor(tensor, tag=None):
  """Summarize a tensor using a suitable summary type.

  This function adds a summary op for `tensor`. The type of summary depends on
  the shape of `tensor`. For scalars, a `scalar_summary` is created, for all
  other tensors, `histogram_summary` is used.

  Args:
    tensor: The tensor to summarize
    tag: The tag to use, if None then use tensor's op's name.

  Returns:
    The summary op created or None for string tensors.
  """
  # Skips string tensors and boolean tensors (not handled by the summaries).
  if (tensor.dtype.is_compatible_with(dtypes.string) or
      tensor.dtype.base_dtype == dtypes.bool):
    return None

  if tensor.get_shape().ndims == 0:
    # For scalars, use a scalar summary.
    return _add_scalar_summary(tensor, tag)
  else:
    # We may land in here if the rank is still unknown. The histogram won't
    # hurt if this ends up being a scalar.
    return _add_histogram_summary(tensor, tag) 
Example #26
Source File: feature_column.py    From lambda-packs with MIT License 5 votes vote down vote up
def __init__(self, features):
    """Creates a `_LazyBuilder`.

    Args:
      features: A mapping from feature column to objects that are `Tensor` or
        `SparseTensor`, or can be converted to same via
        `sparse_tensor.convert_to_tensor_or_sparse_tensor`. A `string` key
        signifies a base feature (not-transformed). A `_FeatureColumn` key
        means that this `Tensor` is the output of an existing `_FeatureColumn`
        which can be reused.
    """
    self._features = features.copy()
    self._feature_tensors = {} 
Example #27
Source File: sequence_queueing_state_saver.py    From lambda-packs with MIT License 5 votes vote down vote up
def __init__(self, length, key, sequences, context):
    length = ops.convert_to_tensor(length, name="length")
    key = ops.convert_to_tensor(key, name="key")
    if not isinstance(sequences, dict):
      raise TypeError("sequences must be a dict")
    if not isinstance(context, dict):
      raise TypeError("context must be a dict")
    if not sequences:
      raise ValueError("must have at least one sequence tensor")
    for k in sequences.keys():
      if not isinstance(k, six.string_types):
        raise TypeError("sequence key must be string: %s" % k)
      if ":" in k:
        raise ValueError("sequence key may not have a colon: '%s'" % k)
    for k in context.keys():
      if not isinstance(k, six.string_types):
        raise TypeError("context key must be string: %s" % k)
      if ":" in k:
        raise ValueError("context key may not have a colon: '%s'" % k)
    sequences = dict((k, ops.convert_to_tensor(
        v, name="sequence_%s" % k)) for k, v in sequences.items())
    context = dict((k, ops.convert_to_tensor(
        v, name="context_%s" % k)) for k, v in context.items())
    self._length = length
    self._key = key
    self._sequences = sequences
    self._context = context 
Example #28
Source File: feature_column.py    From lambda-packs with MIT License 5 votes vote down vote up
def name(self):
    """Returns string. used for variable_scope and naming."""
    pass 
Example #29
Source File: lookup_ops.py    From lambda-packs with MIT License 5 votes vote down vote up
def _as_string(tensor):
  if dtypes.string == tensor.dtype.base_dtype:
    return tensor
  return string_ops.as_string(tensor) 
Example #30
Source File: saved_model_export_utils.py    From lambda-packs with MIT License 5 votes vote down vote up
def _get_classification_classes(output_tensors):
  classes = output_tensors.get(prediction_key.PredictionKey.CLASSES)
  if classes is not None and classes.dtype != dtypes.string:
    # Servo classification can only serve string classes.
    return None
  return classes