Python tensorflow.python.platform.gfile.Stat() Examples

The following are 12 code examples of tensorflow.python.platform.gfile.Stat(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.platform.gfile , or try the search function .
Example #1
Source File: directory_watcher.py    From lambda-packs with MIT License 6 votes vote down vote up
def _SetPath(self, path):
    """Sets the current path to watch for new events.

    This also records the size of the old path, if any. If the size can't be
    found, an error is logged.

    Args:
      path: The full path of the file to watch.
    """
    old_path = self._path
    if old_path and not io_wrapper.IsGCSPath(old_path):
      try:
        # We're done with the path, so store its size.
        size = gfile.Stat(old_path).length
        logging.debug('Setting latest size of %s to %d', old_path, size)
        self._finalized_sizes[old_path] = size
      except errors.OpError as e:
        logging.error('Unable to get size of %s: %s', old_path, e)

    self._path = path
    self._loader = self._loader_factory(path) 
Example #2
Source File: directory_watcher.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _SetPath(self, path):
    """Sets the current path to watch for new events.

    This also records the size of the old path, if any. If the size can't be
    found, an error is logged.

    Args:
      path: The full path of the file to watch.
    """
    old_path = self._path
    if old_path and not io_wrapper.IsGCSPath(old_path):
      try:
        # We're done with the path, so store its size.
        size = gfile.Stat(old_path).length
        logging.debug('Setting latest size of %s to %d', old_path, size)
        self._finalized_sizes[old_path] = size
      except errors.OpError as e:
        logging.error('Unable to get size of %s: %s', old_path, e)

    self._path = path
    self._loader = self._loader_factory(path) 
Example #3
Source File: directory_watcher.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def _SetPath(self, path):
    """Sets the current path to watch for new events.

    This also records the size of the old path, if any. If the size can't be
    found, an error is logged.

    Args:
      path: The full path of the file to watch.
    """
    old_path = self._path
    if old_path and not io_wrapper.IsGCSPath(old_path):
      try:
        # We're done with the path, so store its size.
        size = gfile.Stat(old_path).length
        logging.debug('Setting latest size of %s to %d', old_path, size)
        self._finalized_sizes[old_path] = size
      except errors.OpError as e:
        logging.error('Unable to get size of %s: %s', old_path, e)

    self._path = path
    self._loader = self._loader_factory(path) 
Example #4
Source File: directory_watcher.py    From keras-lambda with MIT License 6 votes vote down vote up
def _SetPath(self, path):
    """Sets the current path to watch for new events.

    This also records the size of the old path, if any. If the size can't be
    found, an error is logged.

    Args:
      path: The full path of the file to watch.
    """
    old_path = self._path
    if old_path and not io_wrapper.IsGCSPath(old_path):
      try:
        # We're done with the path, so store its size.
        size = gfile.Stat(old_path).length
        logging.debug('Setting latest size of %s to %d', old_path, size)
        self._finalized_sizes[old_path] = size
      except errors.OpError as e:
        logging.error('Unable to get size of %s: %s', old_path, e)

    self._path = path
    self._loader = self._loader_factory(path) 
Example #5
Source File: directory_watcher.py    From lambda-packs with MIT License 5 votes vote down vote up
def _HasOOOWrite(self, path):
    """Returns whether the path has had an out-of-order write."""
    # Check the sizes of each path before the current one.
    size = gfile.Stat(path).length
    old_size = self._finalized_sizes.get(path, None)
    if size != old_size:
      if old_size is None:
        logging.error('File %s created after file %s even though it\'s '
                      'lexicographically earlier', path, self._path)
      else:
        logging.error('File %s updated even though the current file is %s',
                      path, self._path)
      return True
    else:
      return False 
Example #6
Source File: directory_watcher.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _HasOOOWrite(self, path):
    """Returns whether the path has had an out-of-order write."""
    # Check the sizes of each path before the current one.
    size = gfile.Stat(path).length
    old_size = self._finalized_sizes.get(path, None)
    if size != old_size:
      if old_size is None:
        logging.error('File %s created after file %s even though it\'s '
                      'lexicographically earlier', path, self._path)
      else:
        logging.error('File %s updated even though the current file is %s',
                      path, self._path)
      return True
    else:
      return False 
Example #7
Source File: directory_watcher.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _HasOOOWrite(self, path):
    """Returns whether the path has had an out-of-order write."""
    # Check the sizes of each path before the current one.
    size = gfile.Stat(path).length
    old_size = self._finalized_sizes.get(path, None)
    if size != old_size:
      if old_size is None:
        logging.error('File %s created after file %s even though it\'s '
                      'lexicographically earlier', path, self._path)
      else:
        logging.error('File %s updated even though the current file is %s',
                      path, self._path)
      return True
    else:
      return False 
Example #8
Source File: directory_watcher.py    From keras-lambda with MIT License 5 votes vote down vote up
def _HasOOOWrite(self, path):
    """Returns whether the path has had an out-of-order write."""
    # Check the sizes of each path before the current one.
    size = gfile.Stat(path).length
    old_size = self._finalized_sizes.get(path, None)
    if size != old_size:
      if old_size is None:
        logging.error('File %s created after file %s even though it\'s '
                      'lexicographically earlier', path, self._path)
      else:
        logging.error('File %s updated even though the current file is %s',
                      path, self._path)
      return True
    else:
      return False 
Example #9
Source File: debug_data.py    From lambda-packs with MIT License 4 votes vote down vote up
def __init__(self, dump_root, debug_dump_rel_path):
    """`DebugTensorDatum` constructor.

    Args:
      dump_root: (`str`) Debug dump root directory.
      debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
          `dump_root`. For example, suppose the debug dump root
          directory is `/tmp/tfdbg_1` and the dump file is at
          `/tmp/tfdbg_1/ns_1/node_a_0_DebugIdentity_123456789`, then
          the value of the debug_dump_rel_path should be
          `ns_1/node_a_0_DebugIdenity_1234456789`.

    Raises:
      ValueError: If the base file name of the dump file does not conform to
        the dump file naming pattern:
        `node_name`_`output_slot`_`debug_op`_`timestamp`
    """

    base = os.path.basename(debug_dump_rel_path)

    if base.count("_") < 3:
      raise ValueError(
          "Dump file path does not conform to the naming pattern: %s" % base)

    # TODO(cais): Add hostname and pid to support dumps from distributed
    #             sessions.

    self._extended_timestamp = base.split("_")[-1]
    # It may include an index suffix at the end if file path collision happened
    # due to identical timestamps.
    if "-" in self._extended_timestamp:
      self._timestamp = int(
          self._extended_timestamp[:self._extended_timestamp.find("-")])
    else:
      self._timestamp = int(self._extended_timestamp)

    self._debug_op = base.split("_")[-2]
    self._output_slot = int(base.split("_")[-3])

    namespace = os.path.dirname(debug_dump_rel_path).replace("\\", "/")
    node_base_name = "_".join(base.split("_")[:-3])
    if not namespace or namespace == ".":
      self._node_name = node_base_name
    else:
      self._node_name = namespace + "/" + node_base_name

    self._file_path = os.path.join(dump_root, debug_dump_rel_path)
    self._dump_size_bytes = (gfile.Stat(self._file_path).length if
                             gfile.Exists(self._file_path) else None)

    self._run_fetches_info = None
    self._run_feed_keys_info = None 
Example #10
Source File: debug_data.py    From auto-alt-text-lambda-api with MIT License 4 votes vote down vote up
def __init__(self, dump_root, debug_dump_rel_path):
    """`DebugTensorDatum` constructor.

    Args:
      dump_root: (`str`) Debug dump root directory.
      debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
          `dump_root`. For example, suppose the debug dump root
          directory is `/tmp/tfdbg_1` and the dump file is at
          `/tmp/tfdbg_1/ns_1/node_a_0_DebugIdentity_123456789`, then
          the value of the debug_dump_rel_path should be
          `ns_1/node_a_0_DebugIdenity_1234456789`.

    Raises:
      ValueError: If the base file name of the dump file does not conform to
        the dump file naming pattern:
        `node_name`_`output_slot`_`debug_op`_`timestamp`
    """

    base = os.path.basename(debug_dump_rel_path)

    if base.count("_") < 3:
      raise ValueError(
          "Dump file path does not conform to the naming pattern: %s" % base)

    # TODO(cais): Add hostname and pid to support dumps from distributed
    #             sessions.

    self._timestamp = int(base.split("_")[-1])
    self._debug_op = base.split("_")[-2]
    self._output_slot = int(base.split("_")[-3])

    namespace = os.path.dirname(debug_dump_rel_path).replace("\\", "/")
    node_base_name = "_".join(base.split("_")[:-3])
    if not namespace or namespace == ".":
      self._node_name = node_base_name
    else:
      self._node_name = namespace + "/" + node_base_name

    self._file_path = os.path.join(dump_root, debug_dump_rel_path)
    self._dump_size_bytes = (gfile.Stat(self._file_path).length if
                             gfile.Exists(self._file_path) else None)

    self._run_fetches_info = None
    self._run_feed_keys_info = None 
Example #11
Source File: debug_data.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 4 votes vote down vote up
def __init__(self, dump_root, debug_dump_rel_path):
    """`DebugTensorDatum` constructor.

    Args:
      dump_root: (`str`) Debug dump root directory. This path should not include
        the path component that represents the device name (see also below).
      debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
        `dump_root`. The first item of this relative path is assumed to be
        a path representing the name of the device that the Tensor belongs to.
        See `device_path_to_device_name` for more details on the device path.
        For example, suppose the debug dump root
        directory is `/tmp/tfdbg_1` and the dump file is at
        `/tmp/tfdbg_1/<device_path>/>ns_1/node_a_0_DebugIdentity_123456789`,
        then the value of the debug_dump_rel_path should be
        `<device_path>/ns_1/node_a_0_DebugIdenity_1234456789`.

    Raises:
      ValueError: If the base file name of the dump file does not conform to
        the dump file naming pattern:
        `node_name`_`output_slot`_`debug_op`_`timestamp`
    """

    path_components = os.path.normpath(debug_dump_rel_path).split(os.sep)
    self._device_name = device_path_to_device_name(path_components[0])
    base = path_components[-1]
    if base.count("_") < 3:
      raise ValueError(
          "Dump file path does not conform to the naming pattern: %s" % base)

    self._extended_timestamp = base.split("_")[-1]
    # It may include an index suffix at the end if file path collision happened
    # due to identical timestamps.
    if "-" in self._extended_timestamp:
      self._timestamp = int(
          self._extended_timestamp[:self._extended_timestamp.find("-")])
    else:
      self._timestamp = int(self._extended_timestamp)

    self._debug_op = base.split("_")[-2]
    self._output_slot = int(base.split("_")[-3])

    node_base_name = "_".join(base.split("_")[:-3])
    self._node_name = "/".join(path_components[1:-1] + [node_base_name])

    self._file_path = os.path.join(dump_root, debug_dump_rel_path)
    self._dump_size_bytes = (gfile.Stat(self._file_path).length if
                             gfile.Exists(self._file_path) else None) 
Example #12
Source File: debug_data.py    From keras-lambda with MIT License 4 votes vote down vote up
def __init__(self, dump_root, debug_dump_rel_path):
    """`DebugTensorDatum` constructor.

    Args:
      dump_root: (`str`) Debug dump root directory.
      debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
          `dump_root`. For example, suppose the debug dump root
          directory is `/tmp/tfdbg_1` and the dump file is at
          `/tmp/tfdbg_1/ns_1/node_a_0_DebugIdentity_123456789`, then
          the value of the debug_dump_rel_path should be
          `ns_1/node_a_0_DebugIdenity_1234456789`.

    Raises:
      ValueError: If the base file name of the dump file does not conform to
        the dump file naming pattern:
        `node_name`_`output_slot`_`debug_op`_`timestamp`
    """

    base = os.path.basename(debug_dump_rel_path)

    if base.count("_") < 3:
      raise ValueError(
          "Dump file path does not conform to the naming pattern: %s" % base)

    # TODO(cais): Add hostname and pid to support dumps from distributed
    #             sessions.

    self._timestamp = int(base.split("_")[-1])
    self._debug_op = base.split("_")[-2]
    self._output_slot = int(base.split("_")[-3])

    namespace = os.path.dirname(debug_dump_rel_path).replace("\\", "/")
    node_base_name = "_".join(base.split("_")[:-3])
    if not namespace or namespace == ".":
      self._node_name = node_base_name
    else:
      self._node_name = namespace + "/" + node_base_name

    self._file_path = os.path.join(dump_root, debug_dump_rel_path)
    self._dump_size_bytes = (gfile.Stat(self._file_path).length if
                             gfile.Exists(self._file_path) else None)

    self._run_fetches_info = None
    self._run_feed_keys_info = None