Python tensorflow.python.tools.freeze_graph.freeze_graph() Examples

The following are 27 code examples of tensorflow.python.tools.freeze_graph.freeze_graph(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.tools.freeze_graph , or try the search function .
Example #1
Source File: tensorflow_parser.py    From incubator-tvm with Apache License 2.0 6 votes vote down vote up
def _load_saved_model(self):
        """Load the tensorflow saved model."""
        try:
            from tensorflow.python.tools import freeze_graph
            from tensorflow.python.framework import ops
            from tensorflow.python.framework import graph_util
            from tensorflow.core.framework import graph_pb2
        except ImportError:
            raise ImportError(
                "InputConfiguration: Unable to import tensorflow which is "
                "required to restore from saved model.")

        saved_model_dir = self._model_dir
        output_graph_filename = self._tmp_dir.relpath("tf_frozen_model.pb")
        input_saved_model_dir = saved_model_dir
        output_node_names = self._get_output_names()

        input_binary = False
        input_saver_def_path = False
        restore_op_name = None
        filename_tensor_name = None
        clear_devices = True
        input_meta_graph = False
        checkpoint_path = None
        input_graph_filename = None
        saved_model_tags = ",".join(self._get_tag_set())

        freeze_graph.freeze_graph(input_graph_filename, input_saver_def_path,
                                  input_binary, checkpoint_path, output_node_names,
                                  restore_op_name, filename_tensor_name,
                                  output_graph_filename, clear_devices, "", "", "",
                                  input_meta_graph, input_saved_model_dir,
                                  saved_model_tags)

        with ops.Graph().as_default():
            output_graph_def = graph_pb2.GraphDef()
            with open(output_graph_filename, "rb") as f:
                output_graph_def.ParseFromString(f.read())
            output_graph_def = graph_util.remove_training_nodes(output_graph_def,
                                                                protected_nodes=self._outputs)
            return output_graph_def 
Example #2
Source File: 02_freeze_the_saved_model_v2.py    From PINTO_model_zoo with MIT License 6 votes vote down vote up
def freeze_model(saved_model_dir, output_node_names, output_filename):
  output_graph_filename = os.path.join(saved_model_dir, output_filename)
  initializer_nodes = ''
  freeze_graph.freeze_graph(
      input_saved_model_dir=saved_model_dir,
      output_graph=output_graph_filename,
      saved_model_tags = tag_constants.SERVING,
      output_node_names=output_node_names,
      initializer_nodes=initializer_nodes,
      input_graph=None,
      input_saver=False,
      input_binary=False,
      input_checkpoint=None,
      restore_op_name=None,
      filename_tensor_name=None,
      clear_devices=True,
      input_meta_graph=False,
  ) 
Example #3
Source File: 02_freeze_the_saved_model_v1.py    From PINTO_model_zoo with MIT License 6 votes vote down vote up
def freeze_model(saved_model_dir, output_node_names, output_filename):
  output_graph_filename = os.path.join(saved_model_dir, output_filename)
  initializer_nodes = ''
  freeze_graph.freeze_graph(
      input_saved_model_dir=saved_model_dir,
      output_graph=output_graph_filename,
      saved_model_tags = tag_constants.SERVING,
      output_node_names=output_node_names,
      initializer_nodes=initializer_nodes,
      input_graph=None,
      input_saver=False,
      input_binary=False,
      input_checkpoint=None,
      restore_op_name=None,
      filename_tensor_name=None,
      clear_devices=True,
      input_meta_graph=False,
  ) 
Example #4
Source File: 01_freeze_the_saved_model_v1.py    From PINTO_model_zoo with MIT License 6 votes vote down vote up
def freeze_model(saved_model_dir, output_node_names, output_filename):
  output_graph_filename = os.path.join(saved_model_dir, output_filename)
  initializer_nodes = ''
  freeze_graph.freeze_graph(
      input_saved_model_dir=saved_model_dir,
      output_graph=output_graph_filename,
      saved_model_tags = tag_constants.SERVING,
      output_node_names=output_node_names,
      initializer_nodes=initializer_nodes,
      input_graph=None,
      input_saver=False,
      input_binary=False,
      input_checkpoint=None,
      restore_op_name=None,
      filename_tensor_name=None,
      clear_devices=True,
      input_meta_graph=False,
  ) 
Example #5
Source File: exportPb.py    From R2CNN_Faster-RCNN_Tensorflow with MIT License 5 votes vote down vote up
def export_frozenPB():

    tf.reset_default_graph()

    dets = build_detection_graph()

    saver = tf.train.Saver()

    with tf.Session() as sess:
        print("we have restred the weights from =====>>\n", CKPT_PATH)
        saver.restore(sess, CKPT_PATH)

        tf.train.write_graph(sess.graph_def, OUT_DIR, PB_NAME)
        freeze_graph.freeze_graph(input_graph=os.path.join(OUT_DIR, PB_NAME),
                                  input_saver='',
                                  input_binary=False,
                                  input_checkpoint=CKPT_PATH,
                                  output_node_names="DetResults",
                                  restore_op_name="save/restore_all",
                                  filename_tensor_name='save/Const:0',
                                  output_graph=os.path.join(OUT_DIR, PB_NAME.replace('.pb', '_Frozen.pb')),
                                  clear_devices=False,
                                  initializer_nodes='') 
Example #6
Source File: models.py    From Unity_ML_Agents with Apache License 2.0 5 votes vote down vote up
def export_graph(model_path, env_name="env", target_nodes="action,value_estimate,action_probs"):
    """
    Exports latest saved model to .bytes format for Unity embedding.
    :param model_path: path of model checkpoints.
    :param env_name: Name of associated Learning Environment.
    :param target_nodes: Comma separated string of needed output nodes for embedded graph.
    """
    ckpt = tf.train.get_checkpoint_state(model_path)
    freeze_graph.freeze_graph(input_graph=model_path + '/raw_graph_def.pb',
                              input_binary=True,
                              input_checkpoint=ckpt.model_checkpoint_path,
                              output_node_names=target_nodes,
                              output_graph=model_path + '/' + env_name + '.bytes',
                              clear_devices=True, initializer_nodes="", input_saver="",
                              restore_op_name="save/restore_all", filename_tensor_name="save/Const:0") 
Example #7
Source File: tf_utils.py    From fritz-models with MIT License 5 votes vote down vote up
def freeze_keras_model_graph(model, basename, output_dir):
    """Extract and freeze the tensorflow graph from a Keras model.

    Args:
        model (keras.models.Model): A Keras model.
        basename (str): the basename of the Keras model. E.g. starry_night.h5
        output_dir (str): a directory to output the frozen graph
    
    Returns:
        output_graph_filename (str): a path to the saved frozen graph.
    """
    name, _ = os.path.splitext(basename)

    saver = tf.train.Saver()

    with keras.backend.get_session() as sess:
        checkpoint_filename = os.path.join(output_dir, '%s.ckpt' % name)
        output_graph_filename = os.path.join(output_dir, '%s_frozen.pb' % name)
        saver.save(sess, checkpoint_filename)
        tf.train.write_graph(
            sess.graph_def, output_dir, '%s_graph_def.pbtext' % name
        )

        freeze_graph.freeze_graph(
            input_graph=os.path.join(output_dir, '%s_graph_def.pbtext' % name),
            input_saver='',
            input_binary=False,
            input_checkpoint=checkpoint_filename,
            output_graph=output_graph_filename,
            output_node_names='deprocess_stylized_image_1/mul',
            restore_op_name="save/restore_all",
            filename_tensor_name="save/Const:0",
            clear_devices=True,
            initializer_nodes=None
        )
        logger.info('Saved frozen graph to: %s' % output_graph_filename)
    return output_graph_filename 
Example #8
Source File: convert_to_tfmobile.py    From fritz-models with MIT License 5 votes vote down vote up
def _freeze_graph(model, basename, output_dir):
    name, _ = os.path.splitext(basename)

    saver = tf.train.Saver()

    with keras.backend.get_session() as sess:
        checkpoint_filename = os.path.join(output_dir, '%s.ckpt' % name)
        output_graph_filename = os.path.join(output_dir, '%s_frozen.pb' % name)
        saver.save(sess, checkpoint_filename)
        tf.train.write_graph(
            sess.graph_def, output_dir, '%s_graph_def.pbtext' % name
        )

        freeze_graph.freeze_graph(
            input_graph=os.path.join(output_dir, '%s_graph_def.pbtext' % name),
            input_saver='',
            input_binary=False,
            input_checkpoint=checkpoint_filename,
            output_graph=output_graph_filename,
            output_node_names='deprocess_stylized_image_1/mul',
            restore_op_name="save/restore_all",
            filename_tensor_name="save/Const:0",
            clear_devices=True,
            initializer_nodes=None
        )
        logger.info('Saved frozen graph to: %s' % output_graph_filename) 
Example #9
Source File: convert_to_tfmobile.py    From fritz-models with MIT License 5 votes vote down vote up
def _freeze_graph(model, basename, output_dir):
    name, _ = os.path.splitext(basename)

    saver = tf.train.Saver()

    with keras.backend.get_session() as sess:
        checkpoint_filename = os.path.join(output_dir, '%s.ckpt' % name)
        output_graph_filename = os.path.join(output_dir, '%s_frozen.pb' % name)
        saver.save(sess, checkpoint_filename)
        tf.train.write_graph(
            sess.graph_def, output_dir, '%s_graph_def.pbtext' % name
        )

        freeze_graph.freeze_graph(
            input_graph=os.path.join(output_dir, '%s_graph_def.pbtext' % name),
            input_saver='',
            input_binary=False,
            input_checkpoint=checkpoint_filename,
            output_graph=output_graph_filename,
            output_node_names='conv6_interp/ResizeBilinear',
            restore_op_name="save/restore_all",
            filename_tensor_name="save/Const:0",
            clear_devices=True,
            initializer_nodes=None
        )
        logger.info('Saved frozen graph to: %s' % output_graph_filename) 
Example #10
Source File: freeze.py    From mobile-deeplab-v3-plus with MIT License 5 votes vote down vote up
def freeze_graph_func(model_dir, output_node_names, output_dir):
    """Extract the sub graph defined by the output nodes and convert 
    all its variables into constant 

    Args:
        model_dir: the root folder containing the checkpoint state file
        output_node_names: a string, containing all the output node's names, 
                            comma separated
    """
    if not tf.gfile.Exists(model_dir):
        raise AssertionError(
            "Export directory doesn't exists. Please specify an export "
            "directory: %s" % model_dir)

    if not output_node_names:
        print("You need to supply the name of a node to --output_node_names.")
        return -1

    sub_dirs = [name for name in os.listdir(model_dir)
         if os.path.isdir(os.path.join(model_dir, name))]
    model_dir = os.path.join(model_dir, sub_dirs[0])

    output_graph_filename = os.path.join(output_dir, 'frozen_model.pb')
    initializer_nodes = ''
    freeze_graph(
        input_graph=None,
        input_saver=False,
        input_binary=False,
        input_checkpoint=None,
        output_node_names=output_node_names,
        restore_op_name=None,
        filename_tensor_name=None,
        output_graph=output_graph_filename,
        clear_devices=True,
        initializer_nodes=initializer_nodes,
        input_meta_graph=False,
        input_saved_model_dir=model_dir,
        saved_model_tags=tag_constants.SERVING)
    print('model has been frozen!') 
Example #11
Source File: save_model.py    From Machine-Learning-with-TensorFlow-1.x with MIT License 5 votes vote down vote up
def save(name, data_input_path):
    def getpardir(path): return osp.split(path)[0]
    sys.path.append(getpardir(getpardir(getpardir(osp.realpath(__file__)))))
    # Import the converted model's class
    caffe_net_module = __import__(name)
    with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
        image_input = tf.placeholder(tf.float32, shape=[1, 227, 227, 3], name="data")
        net = caffe_net_module.CaffeNet({'data': image_input})

        # Save protocol buffer
        pb_name = name + '.pb'
        tf.train.write_graph(sess.graph_def, '.', pb_name + 'txt', True)
        tf.train.write_graph(sess.graph_def, '.', pb_name, False)

        if data_input_path is not None:
            # Load the data
            sess.run(tf.global_variables_initializer())
            net.load(data_input_path, sess)
            # Save the data
            saver = saver_lib.Saver(tf.global_variables())
            checkpoint_prefix = osp.join(osp.curdir, name + '.ckpt')
            checkpoint_path = saver.save(sess, checkpoint_prefix)

            # Freeze the graph
            freeze_graph.freeze_graph(pb_name, "",
                                      True, checkpoint_path, 'fc8/fc8',
                                      'save/restore_all', 'save/Const:0',
                                      name + '_frozen.pb', False, "") 
Example #12
Source File: trainer_controller.py    From DQN-using-PyTorch-and-ML-Agents with GNU General Public License v3.0 5 votes vote down vote up
def _export_graph(self):
        """
        Exports latest saved model to .bytes format for Unity embedding.
        """
        target_nodes = ','.join(self._process_graph())
        ckpt = tf.train.get_checkpoint_state(self.model_path)
        freeze_graph.freeze_graph(input_graph=self.model_path + '/raw_graph_def.pb',
                                  input_binary=True,
                                  input_checkpoint=ckpt.model_checkpoint_path,
                                  output_node_names=target_nodes,
                                  output_graph=self.model_path + '/' + self.env_name + "_" + self.run_id + '.bytes',
                                  clear_devices=True, initializer_nodes="", input_saver="",
                                  restore_op_name="save/restore_all", filename_tensor_name="save/Const:0") 
Example #13
Source File: hangul_model.py    From tensorflow-hangul-recognition with Apache License 2.0 5 votes vote down vote up
def export_model(model_output_dir, input_node_names, output_node_name):
    """Export the model so we can use it later.

    This will create two Protocol Buffer files in the model output directory.
    These files represent a serialized version of our model with all the
    learned weights and biases. One of the ProtoBuf files is a version
    optimized for inference-only usage.
    """

    name_base = os.path.join(model_output_dir, MODEL_NAME)
    frozen_graph_file = os.path.join(model_output_dir,
                                     'frozen_' + MODEL_NAME + '.pb')
    freeze_graph.freeze_graph(
        name_base + '.pbtxt', None, False, name_base + '.chkp',
        output_node_name, "save/restore_all", "save/Const:0",
        frozen_graph_file, True, ""
    )

    input_graph_def = tf.GraphDef()
    with tf.gfile.Open(frozen_graph_file, "rb") as f:
        input_graph_def.ParseFromString(f.read())

    output_graph_def = optimize_for_inference_lib.optimize_for_inference(
            input_graph_def, input_node_names, [output_node_name],
            tf.float32.as_datatype_enum)

    optimized_graph_file = os.path.join(model_output_dir,
                                        'optimized_' + MODEL_NAME + '.pb')
    with tf.gfile.GFile(optimized_graph_file, "wb") as f:
        f.write(output_graph_def.SerializeToString())

    print("Inference optimized graph saved at: " + optimized_graph_file) 
Example #14
Source File: mnist_cnn1.py    From Unity-MNIST with Apache License 2.0 5 votes vote down vote up
def export_model(saver, model, input_node_names, output_node_name):
    if not path.exists('out'):
        os.mkdir('out')

    tf.train.write_graph(K.get_session().graph_def, 'out', model_name + '_graph.pbtxt')

    saver.save(K.get_session(), 'out/' + model_name + '.chkp')

    freeze_graph.freeze_graph('out/' + model_name + '_graph.pbtxt', None, False,
                              'out/' + model_name + '.chkp', output_node_name,
                              "save/restore_all", "save/Const:0",
                              'out/frozen_' + model_name + '.bytes', True, "")

    input_graph_def = tf.GraphDef()
    with tf.gfile.Open('out/frozen_' + model_name + '.bytes', "rb") as f:
        input_graph_def.ParseFromString(f.read())

    output_graph_def = optimize_for_inference_lib.optimize_for_inference(
            input_graph_def, input_node_names, [output_node_name],
            tf.float32.as_datatype_enum)

    with tf.gfile.FastGFile('out/opt_' + model_name + '.bytes', "wb") as f:
        f.write(output_graph_def.SerializeToString())

    print("graph saved!")

########################################################################################################################
# Main program 
Example #15
Source File: freeze_code.py    From Intelligent-Projects-Using-Python with MIT License 5 votes vote down vote up
def model_freeze(path,MODEL_NAME='model'):

    # Freeze the graph

    input_graph_path = path + MODEL_NAME+'.pbtxt'
    checkpoint_path = path + 'model_ckpt'
    input_saver_def_path = ""
    input_binary = False
    output_node_names = 'positive_sentiment_probability'
    restore_op_name = "save/restore_all"
    filename_tensor_name = "save/Const:0"
    output_frozen_graph_name = path + 'frozen_'+MODEL_NAME+'.pb'
    output_optimized_graph_name = path + 'optimized_'+MODEL_NAME+'.pb'
    clear_devices = True


    freeze_graph.freeze_graph(input_graph_path, input_saver_def_path,
                            input_binary, checkpoint_path, output_node_names,
                            restore_op_name, filename_tensor_name,
    output_frozen_graph_name, clear_devices, "")

    input_graph_def = tf.GraphDef()

    with tf.gfile.Open(output_frozen_graph_name, "rb") as f:
        data = f.read()
        input_graph_def.ParseFromString(data)

    output_graph_def = optimize_for_inference_lib.optimize_for_inference(
            input_graph_def,
            ["inputs/X" ],#an array of the input node(s)
            ["positive_sentiment_probability"],
            tf.int32.as_datatype_enum # an array of output nodes
            )

    # Save the optimized graph

    f = tf.gfile.FastGFile(output_optimized_graph_name, "w")
    f.write(output_graph_def.SerializeToString()) 
Example #16
Source File: trainer_controller.py    From DRL_DeliveryDuel with MIT License 5 votes vote down vote up
def _export_graph(self):
        """
        Exports latest saved model to .bytes format for Unity embedding.
        """
        target_nodes = ','.join(self._process_graph())
        ckpt = tf.train.get_checkpoint_state(self.model_path)
        freeze_graph.freeze_graph(input_graph=self.model_path + '/raw_graph_def.pb',
                                  input_binary=True,
                                  input_checkpoint=ckpt.model_checkpoint_path,
                                  output_node_names=target_nodes,
                                  output_graph=self.model_path + '/' + self.env_name + "_" + self.run_id + '.bytes',
                                  clear_devices=True, initializer_nodes="", input_saver="",
                                  restore_op_name="save/restore_all", filename_tensor_name="save/Const:0") 
Example #17
Source File: freeze_graph_tool.py    From SketchCNN with MIT License 5 votes vote down vote up
def convert_model():
    input_checkpoint_path = tf.train.latest_checkpoint(hyper_parameters['ckpt_dir'])
    input_graph_path = os.path.join(hyper_parameters['output_dir'], hyper_parameters['input_graph_name'])
    input_saver_def_path = ""
    input_binary = False
    output_node_names = hyper_parameters[net_type]
    restore_op_name = "save/restore_all"
    filename_tensor_name = "save/Const:0"
    output_graph_path = os.path.join(hyper_parameters['output_dir'], hyper_parameters['output_graph_name'])
    clear_devices = False

    freeze_graph.freeze_graph(input_graph_path, input_saver_def_path, input_binary, input_checkpoint_path,
                              output_node_names, restore_op_name, filename_tensor_name, output_graph_path,
                              clear_devices, initializer_nodes='', variable_names_blacklist='') 
Example #18
Source File: cnn.py    From Frozen_Graph_TensorFlow with MIT License 4 votes vote down vote up
def save_as_pb(self, directory, filename):

        if not os.path.exists(directory):
            os.makedirs(directory)

        # Save check point for graph frozen later
        ckpt_filepath = self.save(directory=directory, filename=filename)
        pbtxt_filename = filename + '.pbtxt'
        pbtxt_filepath = os.path.join(directory, pbtxt_filename)
        pb_filepath = os.path.join(directory, filename + '.pb')
        # This will only save the graph but the variables will not be saved.
        # You have to freeze your model first.
        tf.train.write_graph(graph_or_graph_def=self.sess.graph_def,
                             logdir=directory,
                             name=pbtxt_filename,
                             as_text=True)

        # Freeze graph
        # Method 1
        freeze_graph.freeze_graph(input_graph=pbtxt_filepath,
                                  input_saver='',
                                  input_binary=False,
                                  input_checkpoint=ckpt_filepath,
                                  output_node_names='cnn/output',
                                  restore_op_name='save/restore_all',
                                  filename_tensor_name='save/Const:0',
                                  output_graph=pb_filepath,
                                  clear_devices=True,
                                  initializer_nodes='')

        # Method 2
        '''
        graph = tf.get_default_graph()
        input_graph_def = graph.as_graph_def()
        output_node_names = ['cnn/output']

        output_graph_def = graph_util.convert_variables_to_constants(self.sess, input_graph_def, output_node_names)

        with tf.gfile.GFile(pb_filepath, 'wb') as f:
            f.write(output_graph_def.SerializeToString())
        '''

        return pb_filepath 
Example #19
Source File: punctuator.py    From keras-punctuator with MIT License 4 votes vote down vote up
def freeze():
    checkpoint_prefix = os.path.join(TMP_DIR, "saved_checkpoint")
    checkpoint_state_name = "checkpoint_state"
    input_graph_name = "input_graph.pb"
    output_graph_name = "freezed.pb"
    saver_write_version = 1

    # We'll create an input graph that has a single variable containing 1.0,
    # and that then multiplies it by 2.
    from tensorflow.python.framework import ops
    with ops.Graph().as_default():
        from keras import backend as K
        K.set_learning_phase(0)
        model = createModel()
        model.load_weights(KERAS_WEIGHTS_FILE)

        sess = K.get_session()
        from tensorflow.python.framework.graph_util_impl import convert_variables_to_constants
        # convert_variables_to_constants(sess, sess.graph.as_graph_def(), [model.output.name.split(':')[0]])
        testGraph(sess, '')

        from tensorflow.python.training import saver as saver_lib
        saver = saver_lib.Saver(write_version=saver_write_version)
        checkpoint_path = saver.save(
            sess,
            checkpoint_prefix,
            global_step=0,
            latest_filename=checkpoint_state_name)
        from tensorflow.python.framework import graph_io
        graph_io.write_graph(sess.graph, TMP_DIR, input_graph_name)
        sess.close()


    # We save out the graph to disk, and then call the const conversion
    # routine.
    input_graph_path = os.path.join(TMP_DIR, input_graph_name)
    input_saver_def_path = ""
    input_binary = False
    output_node_names = model.output.name.split(':')[0]
    restore_op_name = "save/restore_all"
    filename_tensor_name = "save/Const:0"
    output_graph_path = os.path.join(MODEL_DATA_DIR, output_graph_name)
    clear_devices = False

    from tensorflow.python.tools import freeze_graph
    freeze_graph.freeze_graph(input_graph_path, input_saver_def_path,
                              input_binary, checkpoint_path, output_node_names,
                              restore_op_name, filename_tensor_name,
                              output_graph_path, clear_devices, "")

    exportWordIndex(loadWordIndex()) 
Example #20
Source File: freeze_graph_test.py    From TensorFlow_DCIGN with MIT License 4 votes vote down vote up
def testFreezeGraph(self):

    checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
    checkpoint_state_name = "checkpoint_state"
    input_graph_name = "input_graph.pb"
    output_graph_name = "output_graph.pb"

    # We'll create an input graph that has a single variable containing 1.0,
    # and that then multiplies it by 2.
    with tf.Graph().as_default():
      variable_node = tf.Variable(1.0, name="variable_node")
      output_node = tf.mul(variable_node, 2.0, name="output_node")
      sess = tf.Session()
      init = tf.initialize_all_variables()
      sess.run(init)
      output = sess.run(output_node)
      self.assertNear(2.0, output, 0.00001)
      saver = tf.train.Saver()
      saver.save(sess, checkpoint_prefix, global_step=0,
                 latest_filename=checkpoint_state_name)
      tf.train.write_graph(sess.graph.as_graph_def(), self.get_temp_dir(),
                           input_graph_name)

    # We save out the graph to disk, and then call the const conversion
    # routine.
    input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
    input_saver_def_path = ""
    input_binary = False
    input_checkpoint_path = checkpoint_prefix + "-0"
    output_node_names = "output_node"
    restore_op_name = "save/restore_all"
    filename_tensor_name = "save/Const:0"
    output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
    clear_devices = False

    freeze_graph.freeze_graph(input_graph_path, input_saver_def_path,
                              input_binary, input_checkpoint_path,
                              output_node_names, restore_op_name,
                              filename_tensor_name, output_graph_path,
                              clear_devices, "")

    # Now we make sure the variable is now a constant, and that the graph still
    # produces the expected result.
    with tf.Graph().as_default():
      output_graph_def = tf.GraphDef()
      with open(output_graph_path, "rb") as f:
        output_graph_def.ParseFromString(f.read())
        _ = tf.import_graph_def(output_graph_def, name="")

      self.assertEqual(4, len(output_graph_def.node))
      for node in output_graph_def.node:
        self.assertNotEqual("Variable", node.op)

      with tf.Session() as sess:
        output_node = sess.graph.get_tensor_by_name("output_node:0")
        output = sess.run(output_node)
        self.assertNear(2.0, output, 0.00001) 
Example #21
Source File: export.py    From DeepLabCut with GNU Lesser General Public License v3.0 4 votes vote down vote up
def tf_to_pb(sess, checkpoint, output, output_dir=None):
    """

    Saves a frozen tensorflow graph (a protobuf file).
    See also https://leimao.github.io/blog/Save-Load-Inference-From-TF-Frozen-Graph/

    Parameters
    ----------
    sess : tensorflow session
        session with graph to be saved

    checkpoint : string
        checkpoint of tensorflow model to be converted to protobuf (output will be <checkpoint>.pb)

    output : list of strings
        list of the names of output nodes (is returned by load_models)

    output_dir : string, optional
        path to the directory that exported models should be saved to.
        If None, will export to the directory of the checkpoint file.
    """

    output_dir = (
        os.path.expanduser(output_dir) if output_dir else os.path.dirname(checkpoint)
    )
    ckpt_base = os.path.basename(checkpoint)

    # save graph to pbtxt file
    pbtxt_file = os.path.normpath(output_dir + "/" + ckpt_base + ".pbtxt")
    tf.train.write_graph(sess.graph.as_graph_def(), "", pbtxt_file, as_text=True)

    # create frozen graph from pbtxt file
    pb_file = os.path.normpath(output_dir + "/" + ckpt_base + ".pb")

    freeze_graph.freeze_graph(
        input_graph=pbtxt_file,
        input_saver="",
        input_binary=False,
        input_checkpoint=checkpoint,
        output_node_names=",".join(output),
        restore_op_name="save/restore_all",
        filename_tensor_name="save/Const:0",
        output_graph=pb_file,
        clear_devices=True,
        initializer_nodes="",
    ) 
Example #22
Source File: load.py    From coremltools with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def _from_saved_model(saved_model_dir):
        from tensorflow.python.tools import freeze_graph

        # must import here as tf.contrib is only available on TF 1.x
        from tensorflow.contrib.saved_model.python.saved_model import reader

        saved_model_tags = reader.get_saved_model_tag_sets(saved_model_dir)[0]
        if not saved_model_tags:
            msg = "Unsupported SavedModel directory format: no tag_sets available"
            raise NotImplementedError(msg)

        # get model outputs
        output_node_names = []
        with tf.compat.v1.Session() as sess:
            metagraph = tf.saved_model.loader.load(
                sess, saved_model_tags, saved_model_dir
            )
            for sd in metagraph.signature_def.values():
                output_node_names += [o.name.split(":")[0] for o in sd.outputs.values()]

        # get frozen graph
        output_graph = mktemp()
        tf.compat.v1.reset_default_graph()
        freeze_graph.freeze_graph(
            input_graph=None,
            input_saver=None,
            input_binary=None,
            input_checkpoint=None,
            output_node_names=",".join(output_node_names),
            restore_op_name=None,
            filename_tensor_name=None,
            output_graph=output_graph,
            clear_devices=True,
            initializer_nodes="",
            variable_names_whitelist="",
            variable_names_blacklist="",
            input_meta_graph=None,
            input_saved_model_dir=saved_model_dir,
            saved_model_tags=",".join(saved_model_tags),
        )

        graph_def = tf.compat.v1.GraphDef()
        with open(output_graph, "rb") as f:
            graph_def.ParseFromString(f.read())
        graph_def = tf.compat.v1.graph_util.remove_training_nodes(graph_def)
        with tf.Graph().as_default() as graph:
            tf.graph_util.import_graph_def(graph_def, name="")
        return graph.as_graph_def(add_shapes=True) 
Example #23
Source File: freeze_graph_test.py    From deep_image_model with Apache License 2.0 4 votes vote down vote up
def _testFreezeGraph(self, saver_write_version):

    checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
    checkpoint_state_name = "checkpoint_state"
    input_graph_name = "input_graph.pb"
    output_graph_name = "output_graph.pb"

    # We'll create an input graph that has a single variable containing 1.0,
    # and that then multiplies it by 2.
    with tf.Graph().as_default():
      variable_node = tf.Variable(1.0, name="variable_node")
      output_node = tf.mul(variable_node, 2.0, name="output_node")
      sess = tf.Session()
      init = tf.global_variables_initializer()
      sess.run(init)
      output = sess.run(output_node)
      self.assertNear(2.0, output, 0.00001)
      saver = tf.train.Saver(write_version=saver_write_version)
      checkpoint_path = saver.save(sess, checkpoint_prefix, global_step=0,
                                   latest_filename=checkpoint_state_name)
      tf.train.write_graph(sess.graph, self.get_temp_dir(), input_graph_name)

    # We save out the graph to disk, and then call the const conversion
    # routine.
    input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
    input_saver_def_path = ""
    input_binary = False
    output_node_names = "output_node"
    restore_op_name = "save/restore_all"
    filename_tensor_name = "save/Const:0"
    output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
    clear_devices = False

    freeze_graph.freeze_graph(input_graph_path, input_saver_def_path,
                              input_binary, checkpoint_path,
                              output_node_names, restore_op_name,
                              filename_tensor_name, output_graph_path,
                              clear_devices, "")

    # Now we make sure the variable is now a constant, and that the graph still
    # produces the expected result.
    with tf.Graph().as_default():
      output_graph_def = tf.GraphDef()
      with open(output_graph_path, "rb") as f:
        output_graph_def.ParseFromString(f.read())
        _ = tf.import_graph_def(output_graph_def, name="")

      self.assertEqual(4, len(output_graph_def.node))
      for node in output_graph_def.node:
        self.assertNotEqual("Variable", node.op)

      with tf.Session() as sess:
        output_node = sess.graph.get_tensor_by_name("output_node:0")
        output = sess.run(output_node)
        self.assertNear(2.0, output, 0.00001) 
Example #24
Source File: model.py    From saliency with MIT License 4 votes vote down vote up
def optimize(self, sess, dataset, path, device):
        """The best performing model is frozen, optimized for inference
           by removing unneeded training operations, and written to disk.

        Args:
            sess (object): The current TF training session.
            path (str): The path used for saving the model.
            device (str): Represents either "cpu" or "gpu".

        .. seealso:: https://bit.ly/2VBBdqQ and https://bit.ly/2W7YqBa
        """

        model_name = "model_%s_%s" % (dataset, device)
        model_path = path + model_name

        tf.train.write_graph(sess.graph.as_graph_def(),
                             path, model_name + ".pbtxt")

        freeze_graph.freeze_graph(model_path + ".pbtxt", "", False,
                                  model_path + ".ckpt", "output",
                                  "save/restore_all", "save/Const:0",
                                  model_path + ".pb", True, "")

        os.remove(model_path + ".pbtxt")

        graph_def = tf.GraphDef()

        with tf.gfile.Open(model_path + ".pb", "rb") as file:
            graph_def.ParseFromString(file.read())

        transforms = ["remove_nodes(op=Identity)",
                      "merge_duplicate_nodes",
                      "strip_unused_nodes",
                      "fold_constants(ignore_errors=true)"]

        optimized_graph_def = TransformGraph(graph_def,
                                             ["input"],
                                             ["output"],
                                             transforms)

        tf.train.write_graph(optimized_graph_def,
                             logdir=path,
                             as_text=False,
                             name=model_name + ".pb") 
Example #25
Source File: training_pipeline.py    From paraphraser with MIT License 4 votes vote down vote up
def compress_graph(sess, args, model):
    """After training has completed, this function can be called to compress
    the model.  The computation graph is frozen turning the checkpoint
    variables into constants.  Finally, optimization is done by stripping
    away all unnecessary nodes from the graph if they are not used at
    inference time.

    Args:
        sess: Tensorflow session
        args: ArgumentParser config object
        model: model dictionary containing tensors of interest

    """
    from tensorflow.python.tools import freeze_graph 
    from tensorflow.python.tools import optimize_for_inference_lib

    tf.train.write_graph(sess.graph_def, '/media/sdb/models/paraphraser', 'model.pb', as_text=False)

    freeze_graph.freeze_graph(
        #input_graph='/tmp/model.pbtxt', 
        input_graph='/media/sdb/models/paraphraser/model.pb',
        input_saver='',
        input_binary=True, 
        input_checkpoint=args.checkpoint,
        output_node_names='predictions',
        restore_op_name='save/restore_all', 
        filename_tensor_name='save/Const:0',
        output_graph='/media/sdb/models/paraphraser/frozen_model.pb', 
        clear_devices=True, 
        initializer_nodes='')

    '''
    input_graph_def = tf.GraphDef()
    #with tf.gfile.Open('/media/sdb/models/paraphraser/frozen_model.pb', 'rb') as f:
    with tf.gfile.Open('/tmp/frozen_model.pb', 'rb') as f:
        data = f.read()
        input_graph_def.ParseFromString(data)
        with tf.Graph().as_default() as graph:
            tf.import_graph_def(input_graph_def)
            print(dir(graph))
            print(graph.find_tensor_by_name('placeholders/sampling_temperature'))

    output_graph_def = optimize_for_inference_lib.optimize_for_inference(
        input_graph_def,
        ['placeholders/source_ids', 'placeholders/sequence_source_lengths'],
        ['predictions'],
        tf.float32.as_datatype_enum)
    
    f = tf.gfile.FastGFile('/tmp/optimized_model.pb', "w")
    f.write(output_graph_def.SerializeToString())
    ''' 
Example #26
Source File: freeze_graph_test.py    From auto-alt-text-lambda-api with MIT License 4 votes vote down vote up
def _testFreezeGraph(self, saver_write_version):

    checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
    checkpoint_state_name = "checkpoint_state"
    input_graph_name = "input_graph.pb"
    output_graph_name = "output_graph.pb"

    # We'll create an input graph that has a single variable containing 1.0,
    # and that then multiplies it by 2.
    with ops.Graph().as_default():
      variable_node = variables.Variable(1.0, name="variable_node")
      output_node = math_ops.multiply(variable_node, 2.0, name="output_node")
      sess = session.Session()
      init = variables.global_variables_initializer()
      sess.run(init)
      output = sess.run(output_node)
      self.assertNear(2.0, output, 0.00001)
      saver = saver_lib.Saver(write_version=saver_write_version)
      checkpoint_path = saver.save(
          sess,
          checkpoint_prefix,
          global_step=0,
          latest_filename=checkpoint_state_name)
      graph_io.write_graph(sess.graph, self.get_temp_dir(), input_graph_name)

    # We save out the graph to disk, and then call the const conversion
    # routine.
    input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
    input_saver_def_path = ""
    input_binary = False
    output_node_names = "output_node"
    restore_op_name = "save/restore_all"
    filename_tensor_name = "save/Const:0"
    output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
    clear_devices = False

    freeze_graph.freeze_graph(input_graph_path, input_saver_def_path,
                              input_binary, checkpoint_path, output_node_names,
                              restore_op_name, filename_tensor_name,
                              output_graph_path, clear_devices, "")

    # Now we make sure the variable is now a constant, and that the graph still
    # produces the expected result.
    with ops.Graph().as_default():
      output_graph_def = graph_pb2.GraphDef()
      with open(output_graph_path, "rb") as f:
        output_graph_def.ParseFromString(f.read())
        _ = importer.import_graph_def(output_graph_def, name="")

      self.assertEqual(4, len(output_graph_def.node))
      for node in output_graph_def.node:
        self.assertNotEqual("VariableV2", node.op)
        self.assertNotEqual("Variable", node.op)

      with session.Session() as sess:
        output_node = sess.graph.get_tensor_by_name("output_node:0")
        output = sess.run(output_node)
        self.assertNear(2.0, output, 0.00001) 
Example #27
Source File: freeze_graph_test.py    From keras-lambda with MIT License 4 votes vote down vote up
def _testFreezeGraph(self, saver_write_version):

    checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
    checkpoint_state_name = "checkpoint_state"
    input_graph_name = "input_graph.pb"
    output_graph_name = "output_graph.pb"

    # We'll create an input graph that has a single variable containing 1.0,
    # and that then multiplies it by 2.
    with ops.Graph().as_default():
      variable_node = variables.Variable(1.0, name="variable_node")
      output_node = math_ops.multiply(variable_node, 2.0, name="output_node")
      sess = session.Session()
      init = variables.global_variables_initializer()
      sess.run(init)
      output = sess.run(output_node)
      self.assertNear(2.0, output, 0.00001)
      saver = saver_lib.Saver(write_version=saver_write_version)
      checkpoint_path = saver.save(
          sess,
          checkpoint_prefix,
          global_step=0,
          latest_filename=checkpoint_state_name)
      graph_io.write_graph(sess.graph, self.get_temp_dir(), input_graph_name)

    # We save out the graph to disk, and then call the const conversion
    # routine.
    input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
    input_saver_def_path = ""
    input_binary = False
    output_node_names = "output_node"
    restore_op_name = "save/restore_all"
    filename_tensor_name = "save/Const:0"
    output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
    clear_devices = False

    freeze_graph.freeze_graph(input_graph_path, input_saver_def_path,
                              input_binary, checkpoint_path, output_node_names,
                              restore_op_name, filename_tensor_name,
                              output_graph_path, clear_devices, "")

    # Now we make sure the variable is now a constant, and that the graph still
    # produces the expected result.
    with ops.Graph().as_default():
      output_graph_def = graph_pb2.GraphDef()
      with open(output_graph_path, "rb") as f:
        output_graph_def.ParseFromString(f.read())
        _ = importer.import_graph_def(output_graph_def, name="")

      self.assertEqual(4, len(output_graph_def.node))
      for node in output_graph_def.node:
        self.assertNotEqual("VariableV2", node.op)
        self.assertNotEqual("Variable", node.op)

      with session.Session() as sess:
        output_node = sess.graph.get_tensor_by_name("output_node:0")
        output = sess.run(output_node)
        self.assertNear(2.0, output, 0.00001)