Python tensorflow.python.platform.gfile.MakeDirs() Examples

The following are 30 code examples of tensorflow.python.platform.gfile.MakeDirs(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.platform.gfile , or try the search function .
Example #1
Source File: evaluation_test.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def testFinalOpsOnEvaluationLoop(self):
    value_op, update_op = slim.metrics.streaming_accuracy(
        self._predictions, self._labels)
    init_op = tf.group(tf.global_variables_initializer(),
                       tf.local_variables_initializer())
    # Create Checkpoint and log directories
    chkpt_dir = os.path.join(self.get_temp_dir(), 'tmp_logs/')
    gfile.MakeDirs(chkpt_dir)
    logdir = os.path.join(self.get_temp_dir(), 'tmp_logs2/')
    gfile.MakeDirs(logdir)

    # Save initialized variables to checkpoint directory
    saver = tf.train.Saver()
    with self.test_session() as sess:
      init_op.run()
      saver.save(sess, os.path.join(chkpt_dir, 'chkpt'))

    # Now, run the evaluation loop:
    accuracy_value = slim.evaluation.evaluation_loop(
        '', chkpt_dir, logdir, eval_op=update_op, final_op=value_op,
        max_number_of_evaluations=1)
    self.assertAlmostEqual(accuracy_value, self._expected_accuracy) 
Example #2
Source File: gc_test.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def testPathsWithParse(self):
    base_dir = os.path.join(tf.test.get_temp_dir(), "paths_parse")
    self.assertFalse(gfile.Exists(base_dir))
    for p in xrange(3):
      gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
    # add a base_directory to ignore
    gfile.MakeDirs(os.path.join(base_dir, "ignore"))

    # create a simple parser that pulls the export_version from the directory.
    def parser(path):
      match = re.match("^" + base_dir + "/(\\d+)$", path.path)
      if not match:
        return None
      return path._replace(export_version=int(match.group(1)))

    self.assertEquals(
        gc.get_paths(base_dir, parser=parser),
        [gc.Path(os.path.join(base_dir, "0"), 0),
         gc.Path(os.path.join(base_dir, "1"), 1),
         gc.Path(os.path.join(base_dir, "2"), 2)]) 
Example #3
Source File: base.py    From keras-lambda with MIT License 6 votes vote down vote up
def maybe_download(filename, work_directory, source_url):
  """Download the data from source url, unless it's already here.

  Args:
      filename: string, name of the file in the directory.
      work_directory: string, path to working directory.
      source_url: url to download from if file doesn't exist.

  Returns:
      Path to resulting file.
  """
  if not gfile.Exists(work_directory):
    gfile.MakeDirs(work_directory)
  filepath = os.path.join(work_directory, filename)
  if not gfile.Exists(filepath):
    temp_file_name, _ = urlretrieve_with_retry(source_url)
    gfile.Copy(temp_file_name, filepath)
    with gfile.GFile(filepath) as f:
      size = f.size()
    print('Successfully downloaded', filename, size, 'bytes.')
  return filepath 
Example #4
Source File: task.py    From solutions-vision-search with Apache License 2.0 6 votes vote down vote up
def maybe_download_and_extract(filename, data_dir, source_url):
  """Maybe download and extract a file."""
  if not gfile.Exists(data_dir):
    gfile.MakeDirs(data_dir)

  filepath = os.path.join(data_dir, filename)

  if not gfile.Exists(filepath):
    print('Downloading from {}'.format(source_url))
    temp_file_name, _ = urllib.request.urlretrieve(source_url)
    gfile.Copy(temp_file_name, filepath)
    with gfile.GFile(filepath) as f:
      size = f.size()
    print('Successfully downloaded \'{}\' of {} bytes'.format(filename, size))

  if filename.endswith('.zip'):
    print('Extracting {}'.format(filename))
    zipfile.ZipFile(file=filepath, mode='r').extractall(data_dir) 
Example #5
Source File: base.py    From lambda-packs with MIT License 6 votes vote down vote up
def maybe_download(filename, work_directory, source_url):
  """Download the data from source url, unless it's already here.

  Args:
      filename: string, name of the file in the directory.
      work_directory: string, path to working directory.
      source_url: url to download from if file doesn't exist.

  Returns:
      Path to resulting file.
  """
  if not gfile.Exists(work_directory):
    gfile.MakeDirs(work_directory)
  filepath = os.path.join(work_directory, filename)
  if not gfile.Exists(filepath):
    temp_file_name, _ = urlretrieve_with_retry(source_url)
    gfile.Copy(temp_file_name, filepath)
    with gfile.GFile(filepath) as f:
      size = f.size()
    print('Successfully downloaded', filename, size, 'bytes.')
  return filepath 
Example #6
Source File: graph_assets.py    From tensorlang with Apache License 2.0 6 votes vote down vote up
def maybe_download(filepath, source_url):
  """Download the data from source url, unless it's already here.
  Args:
      basename: string, name of the file in the directory.
      target_dir: string, path to working directory.
      source_url: url to download from if file doesn't exist.
  Returns:
      Path to resulting file.
  """
  target_dir = path.dirname(filepath)
  if not gfile.Exists(target_dir):
    gfile.MakeDirs(target_dir)
  if not gfile.Exists(filepath):
    print('Downloading', source_url, 'to', filepath)
    temp_file_name, _ = _urlretrieve_with_retry(source_url)
    gfile.Copy(temp_file_name, filepath)
    with gfile.GFile(filepath) as f:
      size = f.size()
    print('Successfully downloaded', filepath, size, 'bytes.')
  return filepath 
Example #7
Source File: base.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def maybe_download(filename, work_directory, source_url):
  """Download the data from source url, unless it's already here.

  Args:
      filename: string, name of the file in the directory.
      work_directory: string, path to working directory.
      source_url: url to download from if file doesn't exist.

  Returns:
      Path to resulting file.
  """
  if not gfile.Exists(work_directory):
    gfile.MakeDirs(work_directory)
  filepath = os.path.join(work_directory, filename)
  if not gfile.Exists(filepath):
    temp_file_name, _ = urlretrieve_with_retry(source_url)
    gfile.Copy(temp_file_name, filepath)
    with gfile.GFile(filepath) as f:
      size = f.size()
    print('Successfully downloaded', filename, size, 'bytes.')
  return filepath 
Example #8
Source File: evaluation_test.py    From tf-slim with Apache License 2.0 6 votes vote down vote up
def testReturnsSingleCheckpointIfOneCheckpointFound(self):
    checkpoint_dir = tempfile.mkdtemp('one_checkpoint_found')
    if not gfile.Exists(checkpoint_dir):
      gfile.MakeDirs(checkpoint_dir)

    global_step = variables.get_or_create_global_step()
    saver = saver_lib.Saver()  # Saves the global step.

    with self.cached_session() as session:
      session.run(variables_lib.global_variables_initializer())
      save_path = os.path.join(checkpoint_dir, 'model.ckpt')
      saver.save(session, save_path, global_step=global_step)

    num_found = 0
    for _ in evaluation.checkpoints_iterator(checkpoint_dir, timeout=0):
      num_found += 1
    self.assertEqual(num_found, 1) 
Example #9
Source File: profile_context.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def _maybe_dump(self):
    if not (self._step in self._dump_steps or self._dump_next_step):
      return
    if not gfile.Exists(self._profiler_dir):
      gfile.MakeDirs(self._profiler_dir)
    print_mdl.WriteProfile(
        os.path.join(compat.as_bytes(self._profiler_dir),
                     compat.as_bytes('profile_%d' % self._step))) 
Example #10
Source File: trainer_lib.py    From object_detection_with_tensorflow with MIT License 5 votes vote down vote up
def get_summary_writer(tensorboard_dir):
  """Creates a directory for writing summaries and returns a writer."""
  tf.logging.info('TensorBoard directory: %s', tensorboard_dir)
  tf.logging.info('Deleting prior data if exists...')
  try:
    gfile.DeleteRecursively(tensorboard_dir)
  except errors.OpError as err:
    tf.logging.error('Directory did not exist? Error: %s', err)
  tf.logging.info('Deleted! Creating the directory again...')
  gfile.MakeDirs(tensorboard_dir)
  tf.logging.info('Created! Instatiating SummaryWriter...')
  summary_writer = tf.summary.FileWriter(tensorboard_dir)
  return summary_writer 
Example #11
Source File: trainer_lib.py    From g-tensorflow-models with Apache License 2.0 5 votes vote down vote up
def get_summary_writer(tensorboard_dir):
  """Creates a directory for writing summaries and returns a writer."""
  tf.logging.info('TensorBoard directory: %s', tensorboard_dir)
  tf.logging.info('Deleting prior data if exists...')
  try:
    gfile.DeleteRecursively(tensorboard_dir)
  except errors.OpError as err:
    tf.logging.error('Directory did not exist? Error: %s', err)
  tf.logging.info('Deleted! Creating the directory again...')
  gfile.MakeDirs(tensorboard_dir)
  tf.logging.info('Created! Instatiating SummaryWriter...')
  summary_writer = tf.summary.FileWriter(tensorboard_dir)
  return summary_writer 
Example #12
Source File: trainer_lib.py    From object_detection_kitti with Apache License 2.0 5 votes vote down vote up
def get_summary_writer(tensorboard_dir):
  """Creates a directory for writing summaries and returns a writer."""
  tf.logging.info('TensorBoard directory: %s', tensorboard_dir)
  tf.logging.info('Deleting prior data if exists...')
  try:
    gfile.DeleteRecursively(tensorboard_dir)
  except errors.OpError as err:
    tf.logging.error('Directory did not exist? Error: %s', err)
  tf.logging.info('Deleted! Creating the directory again...')
  gfile.MakeDirs(tensorboard_dir)
  tf.logging.info('Created! Instatiating SummaryWriter...')
  summary_writer = tf.summary.FileWriter(tensorboard_dir)
  return summary_writer 
Example #13
Source File: trainer_lib.py    From hands-detection with MIT License 5 votes vote down vote up
def get_summary_writer(tensorboard_dir):
  """Creates a directory for writing summaries and returns a writer."""
  tf.logging.info('TensorBoard directory: %s', tensorboard_dir)
  tf.logging.info('Deleting prior data if exists...')
  try:
    gfile.DeleteRecursively(tensorboard_dir)
  except errors.OpError as err:
    tf.logging.error('Directory did not exist? Error: %s', err)
  tf.logging.info('Deleted! Creating the directory again...')
  gfile.MakeDirs(tensorboard_dir)
  tf.logging.info('Created! Instatiating SummaryWriter...')
  summary_writer = tf.summary.FileWriter(tensorboard_dir)
  return summary_writer 
Example #14
Source File: cifar10_train.py    From TensorFlow-Playground with MIT License 5 votes vote down vote up
def main(argv=None):  # pylint: disable=unused-argument
    cifar10.maybe_download_and_extract()
    if not gfile.Exists(FLAGS.train_dir):

        # gfile.DeleteRecursively(FLAGS.train_dir)
        gfile.MakeDirs(FLAGS.train_dir)
    train() 
Example #15
Source File: exporter.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def gfile_copy_callback(files_to_copy, export_dir_path):
  """Callback to copy files using `gfile.Copy` to an export directory.

  This method is used as the default `assets_callback` in `Exporter.init` to
  copy assets from the `assets_collection`. It can also be invoked directly to
  copy additional supplementary files into the export directory (in which case
  it is not a callback).

  Args:
    files_to_copy: A dictionary that maps original file paths to desired
      basename in the export directory.
    export_dir_path: Directory to copy the files to.
  """
  logging.info("Write assets into: %s using gfile_copy.", export_dir_path)
  gfile.MakeDirs(export_dir_path)
  for source_filepath, basename in files_to_copy.items():
    new_path = os.path.join(
        compat.as_bytes(export_dir_path), compat.as_bytes(basename))
    logging.info("Copying asset %s to path %s.", source_filepath, new_path)

    if gfile.Exists(new_path):
      # Guard against being restarted while copying assets, and the file
      # existing and being in an unknown state.
      # TODO(b/28676216): Do some file checks before deleting.
      logging.info("Removing file %s.", new_path)
      gfile.Remove(new_path)
    gfile.Copy(source_filepath, new_path) 
Example #16
Source File: event_file_writer.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def __init__(self, logdir, max_queue=10, flush_secs=120,
               filename_suffix=None):
    """Creates a `EventFileWriter` and an event file to write to.

    On construction the summary writer creates a new event file in `logdir`.
    This event file will contain `Event` protocol buffers, which are written to
    disk via the add_event method.

    The other arguments to the constructor control the asynchronous writes to
    the event file:

    *  `flush_secs`: How often, in seconds, to flush the added summaries
       and events to disk.
    *  `max_queue`: Maximum number of summaries or events pending to be
       written to disk before one of the 'add' calls block.

    Args:
      logdir: A string. Directory where event file will be written.
      max_queue: Integer. Size of the queue for pending events and summaries.
      flush_secs: Number. How often, in seconds, to flush the
        pending events and summaries to disk.
      filename_suffix: A string. Every event file's name is suffixed with
        `filename_suffix`.
    """
    self._logdir = logdir
    if not gfile.IsDirectory(self._logdir):
      gfile.MakeDirs(self._logdir)
    self._event_queue = six.moves.queue.Queue(max_queue)
    self._ev_writer = pywrap_tensorflow.EventsWriter(
        compat.as_bytes(os.path.join(self._logdir, "events")))
    self._flush_secs = flush_secs
    self._sentinel_event = self._get_sentinel_event()
    if filename_suffix:
      self._ev_writer.InitWithSuffix(compat.as_bytes(filename_suffix))
    self._closed = False
    self._worker = _EventLoggerThread(self._event_queue, self._ev_writer,
                                      self._flush_secs, self._sentinel_event)

    self._worker.start() 
Example #17
Source File: evaluation_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def testEvaluationLoopTimeout(self):
    _, update_op = slim.metrics.streaming_accuracy(
        self._predictions, self._labels)
    init_op = tf.group(tf.global_variables_initializer(),
                       tf.local_variables_initializer())

    # Create checkpoint and log directories.
    chkpt_dir = os.path.join(self.get_temp_dir(), 'tmp_logs/')
    gfile.MakeDirs(chkpt_dir)
    logdir = os.path.join(self.get_temp_dir(), 'tmp_logs2/')
    gfile.MakeDirs(logdir)

    # Save initialized variables to checkpoint directory.
    saver = tf.train.Saver()
    with self.test_session() as sess:
      init_op.run()
      saver.save(sess, os.path.join(chkpt_dir, 'chkpt'))

    # Run the evaluation loop with a timeout.
    with self.test_session() as sess:
      start = time.time()
      slim.evaluation.evaluation_loop(
          '', chkpt_dir, logdir, eval_op=update_op,
          eval_interval_secs=2.0, timeout=6.0)
      end = time.time()
      # Check we've waited for the timeout.
      self.assertGreater(end - start, 6.0)
      # Then the timeout kicked in and stops the loop.
      self.assertLess(end - start, 7.5) 
Example #18
Source File: cifar10_eval.py    From TensorFlow-Playground with MIT License 5 votes vote down vote up
def main(argv=None):  # pylint: disable=unused-argument
    cifar10.maybe_download_and_extract()
    if gfile.Exists(FLAGS.eval_dir):
        gfile.DeleteRecursively(FLAGS.eval_dir)
    gfile.MakeDirs(FLAGS.eval_dir)
    evaluate() 
Example #19
Source File: session_manager_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def testRecoverSession(self):
    # Create a checkpoint.
    checkpoint_dir = os.path.join(self.get_temp_dir(), "recover_session")
    try:
      gfile.DeleteRecursively(checkpoint_dir)
    except errors.OpError:
      pass                      # Ignore
    gfile.MakeDirs(checkpoint_dir)

    with tf.Graph().as_default():
      v = tf.Variable(1, name="v")
      sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
      saver = tf.train.Saver({"v": v})
      sess, initialized = sm.recover_session("", saver=saver,
                                             checkpoint_dir=checkpoint_dir)
      self.assertFalse(initialized)
      sess.run(v.initializer)
      self.assertEquals(1, sess.run(v))
      saver.save(sess, os.path.join(checkpoint_dir,
                                    "recover_session_checkpoint"))
    # Create a new Graph and SessionManager and recover.
    with tf.Graph().as_default():
      v = tf.Variable(2, name="v")
      with self.test_session():
        self.assertEqual(False, tf.is_variable_initialized(v).eval())
      sm2 = tf.train.SessionManager(
          ready_op=tf.report_uninitialized_variables())
      saver = tf.train.Saver({"v": v})
      sess, initialized = sm2.recover_session("", saver=saver,
                                              checkpoint_dir=checkpoint_dir)
      self.assertTrue(initialized)
      self.assertEqual(
          True, tf.is_variable_initialized(
              sess.graph.get_tensor_by_name("v:0")).eval(session=sess))
      self.assertEquals(1, sess.run(v)) 
Example #20
Source File: trainer_lib.py    From HumanRecognition with MIT License 5 votes vote down vote up
def get_summary_writer(tensorboard_dir):
  """Creates a directory for writing summaries and returns a writer."""
  tf.logging.info('TensorBoard directory: %s', tensorboard_dir)
  tf.logging.info('Deleting prior data if exists...')
  try:
    gfile.DeleteRecursively(tensorboard_dir)
  except errors.OpError as err:
    tf.logging.error('Directory did not exist? Error: %s', err)
  tf.logging.info('Deleted! Creating the directory again...')
  gfile.MakeDirs(tensorboard_dir)
  tf.logging.info('Created! Instatiating SummaryWriter...')
  summary_writer = tf.summary.FileWriter(tensorboard_dir)
  return summary_writer 
Example #21
Source File: cifar10_multi_gpu_train.py    From TensorFlow-Playground with MIT License 5 votes vote down vote up
def main(argv=None):  # pylint: disable=unused-argument
  cifar10.maybe_download_and_extract()
  if gfile.Exists(FLAGS.train_dir):
    gfile.DeleteRecursively(FLAGS.train_dir)
  gfile.MakeDirs(FLAGS.train_dir)
  train() 
Example #22
Source File: meta_graph_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _TestDir(test_name):
  test_dir = os.path.join(tf.test.get_temp_dir(), test_name)
  if os.path.exists(test_dir):
    shutil.rmtree(test_dir)
  gfile.MakeDirs(test_dir)
  return test_dir
# pylint: enable=invalid-name 
Example #23
Source File: writer.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def _write_plugin_assets(self, graph):
    plugin_assets = plugin_asset.get_all_plugin_assets(graph)
    logdir = self.event_writer.get_logdir()
    for asset_container in plugin_assets:
      plugin_name = asset_container.plugin_name
      plugin_dir = os.path.join(logdir, _PLUGINS_DIR, plugin_name)
      gfile.MakeDirs(plugin_dir)
      assets = asset_container.assets()
      for (asset_name, content) in assets.items():
        asset_path = os.path.join(plugin_dir, asset_name)
        with gfile.Open(asset_path, "w") as f:
          f.write(content) 
Example #24
Source File: event_file_writer.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def __init__(self, logdir, max_queue=10, flush_secs=120):
    """Creates a `EventFileWriter` and an event file to write to.

    On construction the summary writer creates a new event file in `logdir`.
    This event file will contain `Event` protocol buffers, which are written to
    disk via the add_event method.

    The other arguments to the constructor control the asynchronous writes to
    the event file:

    *  `flush_secs`: How often, in seconds, to flush the added summaries
       and events to disk.
    *  `max_queue`: Maximum number of summaries or events pending to be
       written to disk before one of the 'add' calls block.

    Args:
      logdir: A string. Directory where event file will be written.
      max_queue: Integer. Size of the queue for pending events and summaries.
      flush_secs: Number. How often, in seconds, to flush the
        pending events and summaries to disk.
    """
    self._logdir = logdir
    if not gfile.IsDirectory(self._logdir):
      gfile.MakeDirs(self._logdir)
    self._event_queue = six.moves.queue.Queue(max_queue)
    self._ev_writer = pywrap_tensorflow.EventsWriter(
        compat.as_bytes(os.path.join(self._logdir, "events")))
    self._closed = False
    self._worker = _EventLoggerThread(self._event_queue, self._ev_writer,
                                      flush_secs)

    self._worker.start() 
Example #25
Source File: saver_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def setUp(self):
    self._base_dir = os.path.join(self.get_temp_dir(), "saver_utils_test")
    gfile.MakeDirs(self._base_dir) 
Example #26
Source File: saver_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _TestDir(test_name):
  test_dir = os.path.join(tf.test.get_temp_dir(), test_name)
  if os.path.exists(test_dir):
    shutil.rmtree(test_dir)
  gfile.MakeDirs(test_dir)
  return test_dir
# pylint: enable=invalid-name 
Example #27
Source File: session_manager_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def testRecoverSession(self):
    # Create a checkpoint.
    checkpoint_dir = os.path.join(self.get_temp_dir(), "recover_session")
    try:
      gfile.DeleteRecursively(checkpoint_dir)
    except errors.OpError:
      pass                      # Ignore
    gfile.MakeDirs(checkpoint_dir)

    with tf.Graph().as_default():
      v = tf.Variable(1, name="v")
      sm = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
      saver = tf.train.Saver({"v": v})
      sess, initialized = sm.recover_session("", saver=saver,
                                             checkpoint_dir=checkpoint_dir)
      self.assertFalse(initialized)
      sess.run(v.initializer)
      self.assertEquals(1, sess.run(v))
      saver.save(sess, os.path.join(checkpoint_dir,
                                    "recover_session_checkpoint"))
    # Create a new Graph and SessionManager and recover.
    with tf.Graph().as_default():
      v = tf.Variable(2, name="v")
      with self.test_session():
        self.assertEqual(False, tf.is_variable_initialized(v).eval())
      sm2 = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
      saver = tf.train.Saver({"v": v})
      sess, initialized = sm2.recover_session("", saver=saver,
                                              checkpoint_dir=checkpoint_dir)
      self.assertTrue(initialized)
      self.assertEqual(
          True, tf.is_variable_initialized(
              sess.graph.get_tensor_by_name("v:0")).eval(session=sess))
      self.assertEquals(1, sess.run(v)) 
Example #28
Source File: mscnn_train.py    From mscnn with GNU General Public License v3.0 5 votes vote down vote up
def main(argv=None):
    if gfile.Exists(FLAGS.train_log):
        gfile.DeleteRecursively(FLAGS.train_log)
    gfile.MakeDirs(FLAGS.train_log)

    if not gfile.Exists(FLAGS.model_dir):
        gfile.MakeDirs(FLAGS.model_dir)

    if not gfile.Exists(FLAGS.output_dir):
        gfile.MakeDirs(FLAGS.output_dir)

    train() 
Example #29
Source File: mscnn_eval.py    From mscnn with GNU General Public License v3.0 5 votes vote down vote up
def main(argv=None):
    if gfile.Exists(FLAGS.eval_dir):
        gfile.DeleteRecursively(FLAGS.eval_dir)
    gfile.MakeDirs(FLAGS.eval_dir)

    evaluate() 
Example #30
Source File: trainer_lib.py    From multilabel-image-classification-tensorflow with MIT License 5 votes vote down vote up
def get_summary_writer(tensorboard_dir):
  """Creates a directory for writing summaries and returns a writer."""
  tf.logging.info('TensorBoard directory: %s', tensorboard_dir)
  tf.logging.info('Deleting prior data if exists...')
  try:
    gfile.DeleteRecursively(tensorboard_dir)
  except errors.OpError as err:
    tf.logging.error('Directory did not exist? Error: %s', err)
  tf.logging.info('Deleted! Creating the directory again...')
  gfile.MakeDirs(tensorboard_dir)
  tf.logging.info('Created! Instatiating SummaryWriter...')
  summary_writer = tf.summary.FileWriter(tensorboard_dir)
  return summary_writer