Python tensorflow.python.platform.gfile.ListDirectory() Examples

The following are 28 code examples of tensorflow.python.platform.gfile.ListDirectory(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.platform.gfile , or try the search function .
Example #1
Source File: event_file_inspector.py    From lambda-packs with MIT License 6 votes vote down vote up
def generators_from_logdir(logdir):
  """Returns a list of event generators for subdirectories with event files.

  The number of generators returned should equal the number of directories
  within logdir that contain event files. If only logdir contains event files,
  returns a list of length one.

  Args:
    logdir: A log directory that contains event files.

  Returns:
    List of event generators for each subdirectory with event files.
  """
  subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
  generators = [itertools.chain(*[
      generator_from_event_file(os.path.join(subdir, f))
      for f in gfile.ListDirectory(subdir)
      if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
  ]) for subdir in subdirs]
  return generators 
Example #2
Source File: plugin_asset_util.py    From lambda-packs with MIT License 6 votes vote down vote up
def ListPlugins(logdir):
  """List all the plugins that have registered assets in logdir.

  If the plugins_dir does not exist, it returns an empty list. This maintains
  compatibility with old directories that have no plugins written.

  Args:
    logdir: A directory that was created by a TensorFlow events writer.

  Returns:
    a list of plugin names, as strings
  """
  plugins_dir = os.path.join(logdir, _PLUGINS_DIR)
  if not gfile.IsDirectory(plugins_dir):
    return []
  entries = gfile.ListDirectory(plugins_dir)
  return [x for x in entries if _IsDirectory(plugins_dir, x)] 
Example #3
Source File: event_file_inspector.py    From keras-lambda with MIT License 6 votes vote down vote up
def generators_from_logdir(logdir):
  """Returns a list of event generators for subdirectories with event files.

  The number of generators returned should equal the number of directories
  within logdir that contain event files. If only logdir contains event files,
  returns a list of length one.

  Args:
    logdir: A log directory that contains event files.

  Returns:
    List of event generators for each subdirectory with event files.
  """
  subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
  generators = [itertools.chain(*[
      generator_from_event_file(os.path.join(subdir, f))
      for f in gfile.ListDirectory(subdir)
      if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
  ]) for subdir in subdirs]
  return generators 
Example #4
Source File: event_file_inspector.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def generators_from_logdir(logdir):
  """Returns a list of event generators for subdirectories with event files.

  The number of generators returned should equal the number of directories
  within logdir that contain event files. If only logdir contains event files,
  returns a list of length one.

  Args:
    logdir: A log directory that contains event files.

  Returns:
    List of event generators for each subdirectory with event files.
  """
  subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
  generators = [itertools.chain(*[
      generator_from_event_file(os.path.join(subdir, f))
      for f in gfile.ListDirectory(subdir)
      if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
  ]) for subdir in subdirs]
  return generators 
Example #5
Source File: event_file_inspector.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def generators_from_logdir(logdir):
  """Returns a list of event generators for subdirectories with event files.

  The number of generators returned should equal the number of directories
  within logdir that contain event files. If only logdir contains event files,
  returns a list of length one.

  Args:
    logdir: A log directory that contains event files.

  Returns:
    List of event generators for each subdirectory with event files.
  """
  subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
  generators = [itertools.chain(*[
      generator_from_event_file(os.path.join(subdir, f))
      for f in gfile.ListDirectory(subdir)
      if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
  ]) for subdir in subdirs]
  return generators 
Example #6
Source File: io_wrapper.py    From lambda-packs with MIT License 5 votes vote down vote up
def ListDirectoryAbsolute(directory):
  """Yields all files in the given directory. The paths are absolute."""
  return (os.path.join(directory, path)
          for path in gfile.ListDirectory(directory)) 
Example #7
Source File: gc.py    From keras-lambda with MIT License 5 votes vote down vote up
def get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(base_dir, r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #8
Source File: gc.py    From keras-lambda with MIT License 5 votes vote down vote up
def get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(base_dir, r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #9
Source File: io_wrapper.py    From keras-lambda with MIT License 5 votes vote down vote up
def ListDirectoryAbsolute(directory):
  """Yields all files in the given directory. The paths are absolute."""
  return (os.path.join(directory, path)
          for path in gfile.ListDirectory(directory)) 
Example #10
Source File: dumping_wrapper.py    From keras-lambda with MIT License 5 votes vote down vote up
def __init__(self, sess, session_root, watch_fn=None, log_usage=True):
    """Constructor of DumpingDebugWrapperSession.

    Args:
      sess: The TensorFlow `Session` object being wrapped.
      session_root: (`str`) Path to the session root directory. Must be a
        directory that does not exist or an empty directory. If the directory
        does not exist, it will be created by the debugger core during debug
        [`Session.run()`](../../../g3doc/api_docs/python/client.md#session.run)
        calls.
        As the `run()` calls occur, subdirectories will be added to
        `session_root`. The subdirectories' names has the following pattern:
          run_<epoch_time_stamp>_<uuid>
        E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
      watch_fn: (`Callable`) A Callable that can be used to define per-run
        debug ops and watched tensors. See the doc of
        `NonInteractiveDebugWrapperSession.__init__()` for details.
      log_usage: (`bool`) whether the usage of this class is to be logged.

    Raises:
       ValueError: If `session_root` is an existing and non-empty directory or
       if `session_root` is a file.
    """

    if log_usage:
      pass  # No logging for open-source.

    framework.NonInteractiveDebugWrapperSession.__init__(
        self, sess, watch_fn=watch_fn)

    if gfile.Exists(session_root):
      if not gfile.IsDirectory(session_root):
        raise ValueError(
            "session_root path points to a file: %s" % session_root)
      elif gfile.ListDirectory(session_root):
        raise ValueError(
            "session_root path points to a non-empty directory: %s" %
            session_root)
    self._session_root = session_root 
Example #11
Source File: gc.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def _get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(compat.as_str_any(base_dir),
                                 compat.as_str_any(r)),
                    None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #12
Source File: gc_test.py    From estimator with Apache License 2.0 5 votes vote down vote up
def testGcsDirWithSeparator(self):
    base_dir = "gs://bucket/foo"
    with tf.compat.v1.test.mock.patch.object(
        gfile, "ListDirectory") as mock_list_directory:
      # gfile.ListDirectory returns directory names with separator '/'
      mock_list_directory.return_value = ["0/", "1/"]
      self.assertEqual(
          gc._get_paths(base_dir, _create_parser(base_dir)), [
              gc.Path(os.path.join(base_dir, "0"), 0),
              gc.Path(os.path.join(base_dir, "1"), 1)
          ]) 
Example #13
Source File: gc.py    From estimator with Apache License 2.0 5 votes vote down vote up
def _get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path such
      as "/tmp/exports/100" an another may extract from a full file name such as
      "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  # We are mocking this in the test, hence we should not use public API
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    # ListDirectory() return paths with "/" at the last if base_dir was GCS URL
    r = tf.compat.as_str_any(r)
    if r[-1] == '/':
      r = r[0:len(r) - 1]
    p = parser(Path(os.path.join(tf.compat.as_str_any(base_dir), r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #14
Source File: exporter_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def testGC(self):
    export_path = os.path.join(tf.test.get_temp_dir(), "gc")
    self.doBasicsOneExportPath(export_path, global_step=100)
    self.assertEquals(gfile.ListDirectory(export_path), ["00000100"])
    self.doBasicsOneExportPath(export_path, global_step=101)
    self.assertEquals(
        sorted(gfile.ListDirectory(export_path)), ["00000100", "00000101"])
    self.doBasicsOneExportPath(export_path, global_step=102)
    self.assertEquals(
        sorted(gfile.ListDirectory(export_path)), ["00000101", "00000102"]) 
Example #15
Source File: gc.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(base_dir, r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #16
Source File: io_wrapper.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def ListDirectoryAbsolute(directory):
  """Yields all files in the given directory. The paths are absolute."""
  return (os.path.join(directory, path)
          for path in gfile.ListDirectory(directory)) 
Example #17
Source File: gc.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(base_dir, r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #18
Source File: gc.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(base_dir, r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #19
Source File: io_wrapper.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def ListDirectoryAbsolute(directory):
  """Yields all files in the given directory. The paths are absolute."""
  return (os.path.join(directory, path)
          for path in gfile.ListDirectory(directory)) 
Example #20
Source File: dumping_wrapper.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def __init__(self, sess, session_root, watch_fn=None, log_usage=True):
    """Constructor of DumpingDebugWrapperSession.

    Args:
      sess: The TensorFlow `Session` object being wrapped.
      session_root: (`str`) Path to the session root directory. Must be a
        directory that does not exist or an empty directory. If the directory
        does not exist, it will be created by the debugger core during debug
        [`Session.run()`](../../../g3doc/api_docs/python/client.md#session.run)
        calls.
        As the `run()` calls occur, subdirectories will be added to
        `session_root`. The subdirectories' names has the following pattern:
          run_<epoch_time_stamp>_<uuid>
        E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
      watch_fn: (`Callable`) A Callable that can be used to define per-run
        debug ops and watched tensors. See the doc of
        `NonInteractiveDebugWrapperSession.__init__()` for details.
      log_usage: (`bool`) whether the usage of this class is to be logged.

    Raises:
       ValueError: If `session_root` is an existing and non-empty directory or
       if `session_root` is a file.
    """

    if log_usage:
      pass  # No logging for open-source.

    framework.NonInteractiveDebugWrapperSession.__init__(
        self, sess, watch_fn=watch_fn)

    if gfile.Exists(session_root):
      if not gfile.IsDirectory(session_root):
        raise ValueError(
            "session_root path points to a file: %s" % session_root)
      elif gfile.ListDirectory(session_root):
        raise ValueError(
            "session_root path points to a non-empty directory: %s" %
            session_root)
    self._session_root = session_root 
Example #21
Source File: gc.py    From lambda-packs with MIT License 5 votes vote down vote up
def get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(base_dir, r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #22
Source File: gc.py    From lambda-packs with MIT License 5 votes vote down vote up
def get_paths(base_dir, parser):
  """Gets a list of Paths in a given directory.

  Args:
    base_dir: directory.
    parser: a function which gets the raw Path and can augment it with
      information such as the export_version, or ignore the path by returning
      None.  An example parser may extract the export version from a path
      such as "/tmp/exports/100" an another may extract from a full file
      name such as "/tmp/checkpoint-99.out".

  Returns:
    A list of Paths contained in the base directory with the parsing function
    applied.
    By default the following fields are populated,
      - Path.path
    The parsing function is responsible for populating,
      - Path.export_version
  """
  raw_paths = gfile.ListDirectory(base_dir)
  paths = []
  for r in raw_paths:
    p = parser(Path(os.path.join(base_dir, r), None))
    if p:
      paths.append(p)
  return sorted(paths) 
Example #23
Source File: event_file_inspector.py    From deep_image_model with Apache License 2.0 4 votes vote down vote up
def get_inspection_units(logdir='', event_file='', tag=''):
  """Returns a list of InspectionUnit objects given either logdir or event_file.

  If logdir is given, the number of InspectionUnits should equal the
  number of directories or subdirectories that contain event files.

  If event_file is given, the number of InspectionUnits should be 1.

  Args:
    logdir: A log directory that contains event files.
    event_file: Or, a particular event file path.
    tag: An optional tag name to query for.

  Returns:
    A list of InspectionUnit objects.
  """
  if logdir:
    subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
    inspection_units = []
    for subdir in subdirs:
      generator = itertools.chain(*[
          generator_from_event_file(os.path.join(subdir, f))
          for f in gfile.ListDirectory(subdir)
          if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
      ])
      inspection_units.append(InspectionUnit(
          name=subdir,
          generator=generator,
          field_to_obs=get_field_to_observations_map(generator, tag)))
    if inspection_units:
      print('Found event files in:\n{}\n'.format('\n'.join(
          [u.name for u in inspection_units])))
    elif event_accumulator.IsTensorFlowEventsFile(logdir):
      print(
          'It seems that {} may be an event file instead of a logdir. If this '
          'is the case, use --event_file instead of --logdir to pass '
          'it in.'.format(logdir))
    else:
      print('No event files found within logdir {}'.format(logdir))
    return inspection_units
  elif event_file:
    generator = generator_from_event_file(event_file)
    return [InspectionUnit(
        name=event_file,
        generator=generator,
        field_to_obs=get_field_to_observations_map(generator, tag))] 
Example #24
Source File: dumping_wrapper.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 4 votes vote down vote up
def __init__(self,
               sess,
               session_root,
               watch_fn=None,
               thread_name_filter=None,
               pass_through_operrors=None,
               log_usage=True):
    """Constructor of DumpingDebugWrapperSession.

    Args:
      sess: The TensorFlow `Session` object being wrapped.
      session_root: (`str`) Path to the session root directory. Must be a
        directory that does not exist or an empty directory. If the directory
        does not exist, it will be created by the debugger core during debug
        @{tf.Session.run}
        calls.
        As the `run()` calls occur, subdirectories will be added to
        `session_root`. The subdirectories' names has the following pattern:
          run_<epoch_time_stamp>_<zero_based_run_counter>
        E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
      watch_fn: (`Callable`) A Callable that can be used to define per-run
        debug ops and watched tensors. See the doc of
        `NonInteractiveDebugWrapperSession.__init__()` for details.
      thread_name_filter: Regular-expression white list for threads on which the
        wrapper session will be active. See doc of `BaseDebugWrapperSession` for
        more details.
      pass_through_operrors: If true, all captured OpErrors will be
        propagated. By default this captures all OpErrors.
      log_usage: (`bool`) whether the usage of this class is to be logged.

    Raises:
       ValueError: If `session_root` is an existing and non-empty directory or
       if `session_root` is a file.
    """

    if log_usage:
      pass  # No logging for open-source.

    framework.NonInteractiveDebugWrapperSession.__init__(
        self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter,
        pass_through_operrors=pass_through_operrors)

    if gfile.Exists(session_root):
      if not gfile.IsDirectory(session_root):
        raise ValueError(
            "session_root path points to a file: %s" % session_root)
      elif gfile.ListDirectory(session_root):
        raise ValueError(
            "session_root path points to a non-empty directory: %s" %
            session_root)
    else:
      gfile.MakeDirs(session_root)
    self._session_root = session_root

    self._run_counter = 0
    self._run_counter_lock = threading.Lock() 
Example #25
Source File: event_file_inspector.py    From auto-alt-text-lambda-api with MIT License 4 votes vote down vote up
def get_inspection_units(logdir='', event_file='', tag=''):
  """Returns a list of InspectionUnit objects given either logdir or event_file.

  If logdir is given, the number of InspectionUnits should equal the
  number of directories or subdirectories that contain event files.

  If event_file is given, the number of InspectionUnits should be 1.

  Args:
    logdir: A log directory that contains event files.
    event_file: Or, a particular event file path.
    tag: An optional tag name to query for.

  Returns:
    A list of InspectionUnit objects.
  """
  if logdir:
    subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
    inspection_units = []
    for subdir in subdirs:
      generator = itertools.chain(*[
          generator_from_event_file(os.path.join(subdir, f))
          for f in gfile.ListDirectory(subdir)
          if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
      ])
      inspection_units.append(InspectionUnit(
          name=subdir,
          generator=generator,
          field_to_obs=get_field_to_observations_map(generator, tag)))
    if inspection_units:
      print('Found event files in:\n{}\n'.format('\n'.join(
          [u.name for u in inspection_units])))
    elif event_accumulator.IsTensorFlowEventsFile(logdir):
      print(
          'It seems that {} may be an event file instead of a logdir. If this '
          'is the case, use --event_file instead of --logdir to pass '
          'it in.'.format(logdir))
    else:
      print('No event files found within logdir {}'.format(logdir))
    return inspection_units
  elif event_file:
    generator = generator_from_event_file(event_file)
    return [InspectionUnit(
        name=event_file,
        generator=generator,
        field_to_obs=get_field_to_observations_map(generator, tag))] 
Example #26
Source File: event_file_inspector.py    From keras-lambda with MIT License 4 votes vote down vote up
def get_inspection_units(logdir='', event_file='', tag=''):
  """Returns a list of InspectionUnit objects given either logdir or event_file.

  If logdir is given, the number of InspectionUnits should equal the
  number of directories or subdirectories that contain event files.

  If event_file is given, the number of InspectionUnits should be 1.

  Args:
    logdir: A log directory that contains event files.
    event_file: Or, a particular event file path.
    tag: An optional tag name to query for.

  Returns:
    A list of InspectionUnit objects.
  """
  if logdir:
    subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
    inspection_units = []
    for subdir in subdirs:
      generator = itertools.chain(*[
          generator_from_event_file(os.path.join(subdir, f))
          for f in gfile.ListDirectory(subdir)
          if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
      ])
      inspection_units.append(InspectionUnit(
          name=subdir,
          generator=generator,
          field_to_obs=get_field_to_observations_map(generator, tag)))
    if inspection_units:
      print('Found event files in:\n{}\n'.format('\n'.join(
          [u.name for u in inspection_units])))
    elif event_accumulator.IsTensorFlowEventsFile(logdir):
      print(
          'It seems that {} may be an event file instead of a logdir. If this '
          'is the case, use --event_file instead of --logdir to pass '
          'it in.'.format(logdir))
    else:
      print('No event files found within logdir {}'.format(logdir))
    return inspection_units
  elif event_file:
    generator = generator_from_event_file(event_file)
    return [InspectionUnit(
        name=event_file,
        generator=generator,
        field_to_obs=get_field_to_observations_map(generator, tag))] 
Example #27
Source File: event_file_inspector.py    From lambda-packs with MIT License 4 votes vote down vote up
def get_inspection_units(logdir='', event_file='', tag=''):
  """Returns a list of InspectionUnit objects given either logdir or event_file.

  If logdir is given, the number of InspectionUnits should equal the
  number of directories or subdirectories that contain event files.

  If event_file is given, the number of InspectionUnits should be 1.

  Args:
    logdir: A log directory that contains event files.
    event_file: Or, a particular event file path.
    tag: An optional tag name to query for.

  Returns:
    A list of InspectionUnit objects.
  """
  if logdir:
    subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
    inspection_units = []
    for subdir in subdirs:
      generator = itertools.chain(*[
          generator_from_event_file(os.path.join(subdir, f))
          for f in gfile.ListDirectory(subdir)
          if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
      ])
      inspection_units.append(InspectionUnit(
          name=subdir,
          generator=generator,
          field_to_obs=get_field_to_observations_map(generator, tag)))
    if inspection_units:
      print('Found event files in:\n{}\n'.format('\n'.join(
          [u.name for u in inspection_units])))
    elif event_accumulator.IsTensorFlowEventsFile(logdir):
      print(
          'It seems that {} may be an event file instead of a logdir. If this '
          'is the case, use --event_file instead of --logdir to pass '
          'it in.'.format(logdir))
    else:
      print('No event files found within logdir {}'.format(logdir))
    return inspection_units
  elif event_file:
    generator = generator_from_event_file(event_file)
    return [InspectionUnit(
        name=event_file,
        generator=generator,
        field_to_obs=get_field_to_observations_map(generator, tag))] 
Example #28
Source File: dumping_wrapper.py    From lambda-packs with MIT License 4 votes vote down vote up
def __init__(self,
               sess,
               session_root,
               watch_fn=None,
               thread_name_filter=None,
               log_usage=True):
    """Constructor of DumpingDebugWrapperSession.

    Args:
      sess: The TensorFlow `Session` object being wrapped.
      session_root: (`str`) Path to the session root directory. Must be a
        directory that does not exist or an empty directory. If the directory
        does not exist, it will be created by the debugger core during debug
        @{tf.Session.run}
        calls.
        As the `run()` calls occur, subdirectories will be added to
        `session_root`. The subdirectories' names has the following pattern:
          run_<epoch_time_stamp>_<zero_based_run_counter>
        E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
      watch_fn: (`Callable`) A Callable that can be used to define per-run
        debug ops and watched tensors. See the doc of
        `NonInteractiveDebugWrapperSession.__init__()` for details.
      thread_name_filter: Regular-expression white list for threads on which the
        wrapper session will be active. See doc of `BaseDebugWrapperSession` for
        more details.
      log_usage: (`bool`) whether the usage of this class is to be logged.

    Raises:
       ValueError: If `session_root` is an existing and non-empty directory or
       if `session_root` is a file.
    """

    if log_usage:
      pass  # No logging for open-source.

    framework.NonInteractiveDebugWrapperSession.__init__(
        self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter)

    if gfile.Exists(session_root):
      if not gfile.IsDirectory(session_root):
        raise ValueError(
            "session_root path points to a file: %s" % session_root)
      elif gfile.ListDirectory(session_root):
        raise ValueError(
            "session_root path points to a non-empty directory: %s" %
            session_root)
    self._session_root = session_root

    self._run_counter = 0
    self._run_counter_lock = threading.Lock()