Python tensorflow.python.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY Examples

The following are 30 code examples of tensorflow.python.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.saved_model.signature_constants , or try the search function .
Example #1
Source File: saved_model_export_utils.py    From keras-lambda with MIT License 6 votes vote down vote up
def build_all_signature_defs(input_alternatives, output_alternatives,
                             actual_default_output_alternative_key):
  """Build `SignatureDef`s from all pairs of input and output alternatives."""

  signature_def_map = {
      ('%s:%s' % (input_key, output_key or 'None')):
      build_standardized_signature_def(
          inputs, outputs, problem_type)
      for input_key, inputs in input_alternatives.items()
      for output_key, (problem_type, outputs)
      in output_alternatives.items()}

  # Add the default SignatureDef
  default_inputs = input_alternatives.get(DEFAULT_INPUT_ALTERNATIVE_KEY)
  if not default_inputs:
    raise ValueError('A default input_alternative must be provided.')
    # default_inputs = input_alternatives[FEATURES_INPUT_ALTERNATIVE_KEY]
  # default outputs are guaranteed to exist above
  (default_problem_type, default_outputs) = (
      output_alternatives[actual_default_output_alternative_key])
  signature_def_map[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = (
      build_standardized_signature_def(
          default_inputs, default_outputs, default_problem_type))

  return signature_def_map 
Example #2
Source File: saved_model_export_utils.py    From lambda-packs with MIT License 6 votes vote down vote up
def build_all_signature_defs(input_alternatives, output_alternatives,
                             actual_default_output_alternative_key):
  """Build `SignatureDef`s from all pairs of input and output alternatives."""

  signature_def_map = {
      ('%s:%s' % (input_key, output_key or 'None')):
      build_standardized_signature_def(
          inputs, outputs, problem_type)
      for input_key, inputs in input_alternatives.items()
      for output_key, (problem_type, outputs)
      in output_alternatives.items()}

  # Add the default SignatureDef
  default_inputs = input_alternatives.get(DEFAULT_INPUT_ALTERNATIVE_KEY)
  if not default_inputs:
    raise ValueError('A default input_alternative must be provided.')
    # default_inputs = input_alternatives[FEATURES_INPUT_ALTERNATIVE_KEY]
  # default outputs are guaranteed to exist above
  (default_problem_type, default_outputs) = (
      output_alternatives[actual_default_output_alternative_key])
  signature_def_map[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = (
      build_standardized_signature_def(
          default_inputs, default_outputs, default_problem_type))

  return signature_def_map 
Example #3
Source File: saved_model_export_utils.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def build_all_signature_defs(input_alternatives, output_alternatives,
                             actual_default_output_alternative_key):
  """Build `SignatureDef`s from all pairs of input and output alternatives."""

  signature_def_map = {
      ('%s:%s' % (input_key, output_key or 'None')):
      build_standardized_signature_def(
          inputs, outputs, problem_type)
      for input_key, inputs in input_alternatives.items()
      for output_key, (problem_type, outputs)
      in output_alternatives.items()}

  # Add the default SignatureDef
  default_inputs = input_alternatives.get(DEFAULT_INPUT_ALTERNATIVE_KEY)
  if not default_inputs:
    raise ValueError('A default input_alternative must be provided.')
    # default_inputs = input_alternatives[FEATURES_INPUT_ALTERNATIVE_KEY]
  # default outputs are guaranteed to exist above
  (default_problem_type, default_outputs) = (
      output_alternatives[actual_default_output_alternative_key])
  signature_def_map[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = (
      build_standardized_signature_def(
          default_inputs, default_outputs, default_problem_type))

  return signature_def_map 
Example #4
Source File: tf_savedmodel_artifact.py    From BentoML with Apache License 2.0 5 votes vote down vote up
def _load_tf_saved_model(path):
    try:
        import tensorflow as tf
        from tensorflow.python.training.tracking.tracking import AutoTrackable

        TF2 = tf.__version__.startswith('2')
    except ImportError:
        raise MissingDependencyException(
            "Tensorflow package is required to use TfSavedModelArtifact"
        )

    if TF2:
        return tf.saved_model.load(path)
    else:
        loaded = tf.compat.v2.saved_model.load(path)
        if isinstance(loaded, AutoTrackable) and not hasattr(loaded, "__call__"):
            logger.warning(
                '''Importing SavedModels from TensorFlow 1.x.
                `outputs = imported(inputs)` is not supported in bento service due to
                tensorflow API.

                Recommended usage:

                ```python
                from tensorflow.python.saved_model import signature_constants

                imported = tf.saved_model.load(path_to_v1_saved_model)
                wrapped_function = imported.signatures[
                    signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
                wrapped_function(tf.ones([]))
                ```

                See https://www.tensorflow.org/api_docs/python/tf/saved_model/load for
                details.
                '''
            )
        return loaded 
Example #5
Source File: export_model.py    From youtube8mchallenge with Apache License 2.0 5 votes vote down vote up
def export_model(self, model_dir, global_step_val, last_checkpoint):
    """Exports the model so that it can used for batch predictions."""

    with self.graph.as_default():
      with tf.Session() as session:
        session.run(tf.global_variables_initializer())
        self.saver.restore(session, last_checkpoint)

        signature = signature_def_utils.build_signature_def(
            inputs=self.inputs,
            outputs=self.outputs,
            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                         signature}

        model_builder = saved_model_builder.SavedModelBuilder(model_dir)
        model_builder.add_meta_graph_and_variables(session,
            tags=[tag_constants.SERVING],
            signature_def_map=signature_map,
            clear_devices=True)
        model_builder.save() 
Example #6
Source File: punctuator.py    From keras-punctuator with MIT License 5 votes vote down vote up
def saveWithSavedModel():
    # K.set_learning_phase(0)  # all new operations will be in test mode from now on

    # wordIndex = loadWordIndex()
    model = createModel()
    model.load_weights(KERAS_WEIGHTS_FILE)


    export_path = os.path.join(PUNCTUATOR_DIR, 'graph') # where to save the exported graph

    shutil.rmtree(export_path, True)
    export_version = 1 # version number (integer)

    import tensorflow as tf
    sess = tf.Session()

    saver = tf.train.Saver(sharded=True)
    from tensorflow.contrib.session_bundle import exporter
    model_exporter = exporter.Exporter(saver)
    signature = exporter.classification_signature(input_tensor=model.input,scores_tensor=model.output)
    # model_exporter.init(sess.graph.as_graph_def(),default_graph_signature=signature)
    tf.initialize_all_variables().run(session=sess)
    # model_exporter.export(export_path, tf.constant(export_version), sess)
    from tensorflow.python.saved_model import builder as saved_model_builder
    builder = saved_model_builder.SavedModelBuilder(export_path)
    from tensorflow.python.saved_model import signature_constants
    from tensorflow.python.saved_model import tag_constants
    legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
    from tensorflow.python.saved_model.signature_def_utils_impl import predict_signature_def
    signature_def = predict_signature_def(
        {signature_constants.PREDICT_INPUTS: model.input},
        {signature_constants.PREDICT_OUTPUTS: model.output})
    builder.add_meta_graph_and_variables(
        sess, [tag_constants.SERVING],
        signature_def_map={
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature_def
        },
        legacy_init_op=legacy_init_op)
    builder.save() 
Example #7
Source File: cnn.py    From Frozen_Graph_TensorFlow with MIT License 5 votes vote down vote up
def save_signature(self, directory):

        signature = signature_def_utils.build_signature_def(
            inputs={
                'input':
                saved_model_utils.build_tensor_info(self.input),
                'dropout_rate':
                saved_model_utils.build_tensor_info(self.dropout_rate)
            },
            outputs={
                'output': saved_model_utils.build_tensor_info(self.output)
            },
            method_name=signature_constants.PREDICT_METHOD_NAME)
        signature_map = {
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
        }
        model_builder = saved_model_builder.SavedModelBuilder(directory)
        model_builder.add_meta_graph_and_variables(
            self.sess,
            tags=[tag_constants.SERVING],
            signature_def_map=signature_map,
            clear_devices=True)
        model_builder.save(as_text=False) 
Example #8
Source File: export_model.py    From youtube-8m with Apache License 2.0 5 votes vote down vote up
def export_model(self, model_dir, global_step_val, last_checkpoint):
    """Exports the model so that it can used for batch predictions."""

    with self.graph.as_default():
      with tf.Session() as session:
        session.run(tf.global_variables_initializer())
        self.saver.restore(session, last_checkpoint)

        signature = signature_def_utils.build_signature_def(
            inputs=self.inputs,
            outputs=self.outputs,
            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
        }

        model_builder = saved_model_builder.SavedModelBuilder(model_dir)
        model_builder.add_meta_graph_and_variables(
            session,
            tags=[tag_constants.SERVING],
            signature_def_map=signature_map,
            clear_devices=True)
        model_builder.save() 
Example #9
Source File: _local_predict.py    From pydatalab with Apache License 2.0 5 votes vote down vote up
def _tf_load_model(sess, model_dir):
  """Load a tf model from model_dir, and return input/output alias maps."""

  meta_graph_pb = tf.saved_model.loader.load(
      sess=sess,
      tags=[tf.saved_model.tag_constants.SERVING],
      export_dir=model_dir)

  signature = meta_graph_pb.signature_def[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
  input_alias_map = {friendly_name: tensor_info_proto.name
                     for (friendly_name, tensor_info_proto) in signature.inputs.items()}
  output_alias_map = {friendly_name: tensor_info_proto.name
                      for (friendly_name, tensor_info_proto) in signature.outputs.items()}

  return input_alias_map, output_alias_map 
Example #10
Source File: tensorflow_save_and_load_using_model_builder.py    From Mastering-OpenCV-4-with-Python with MIT License 5 votes vote down vote up
def export_model():
    """Exports the model"""

    trained_checkpoint_prefix = 'linear_regression'

    loaded_graph = tf.Graph()
    with tf.Session(graph=loaded_graph) as sess:
        sess.run(tf.global_variables_initializer())

        # Restore from checkpoint:
        loader = tf.train.import_meta_graph(trained_checkpoint_prefix + '.meta')
        loader.restore(sess, trained_checkpoint_prefix)

        # Add signature:
        graph = tf.get_default_graph()
        inputs = tf.saved_model.utils.build_tensor_info(graph.get_tensor_by_name('X:0'))
        outputs = tf.saved_model.utils.build_tensor_info(graph.get_tensor_by_name('y_model:0'))

        signature = signature_def_utils.build_signature_def(inputs={'X': inputs},
                                                            outputs={'y_model': outputs},
                                                            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature}

        # Export model:
        builder = tf.saved_model.builder.SavedModelBuilder('./my_model')
        builder.add_meta_graph_and_variables(sess, signature_def_map=signature_map,
                                             tags=[tf.saved_model.tag_constants.SERVING])
        builder.save()


# Export the model: 
Example #11
Source File: mnist_tensorflow_save_and_load_model_builder.py    From Mastering-OpenCV-4-with-Python with MIT License 5 votes vote down vote up
def export_model():
    """Exports the model"""

    trained_checkpoint_prefix = 'softmax_regression_model_mnist'

    loaded_graph = tf.Graph()
    with tf.Session(graph=loaded_graph) as sess:
        sess.run(tf.global_variables_initializer())

        # Restore from checkpoint
        loader = tf.train.import_meta_graph(trained_checkpoint_prefix + '.meta')
        loader.restore(sess, trained_checkpoint_prefix)

        # Add signature:
        graph = tf.get_default_graph()
        inputs = tf.saved_model.utils.build_tensor_info(graph.get_tensor_by_name('myInput:0'))
        outputs = tf.saved_model.utils.build_tensor_info(graph.get_tensor_by_name('myOutput:0'))

        signature = signature_def_utils.build_signature_def(inputs={'myInput': inputs},
                                                            outputs={'myOutput': outputs},
                                                            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature}

        # Export model:
        builder = tf.saved_model.builder.SavedModelBuilder('./my_model')
        builder.add_meta_graph_and_variables(sess, signature_def_map=signature_map,
                                             tags=[tf.saved_model.tag_constants.SERVING])
        builder.save()


# Export the model: 
Example #12
Source File: export_model.py    From AttentionCluster with Apache License 2.0 5 votes vote down vote up
def export_model(self, model_dir, global_step_val, last_checkpoint):
        """Exports the model so that it can used for batch predictions."""
        with self.graph.as_default():
            with tf.Session() as session:
                session.run(tf.global_variables_initializer())
                self.saver.restore(session, last_checkpoint)

                signature = signature_def_utils.build_signature_def(
                    inputs=self.inputs,
                    outputs=self.outputs,
                    method_name=signature_constants.PREDICT_METHOD_NAME)

                signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                                     signature}

                model_builder = saved_model_builder.SavedModelBuilder(model_dir)
                model_builder.add_meta_graph_and_variables(session,
                                                           tags=[tag_constants.SERVING],
                                                           signature_def_map=signature_map,
                                                           clear_devices=True)
                model_builder.save() 
Example #13
Source File: export_model.py    From Y8M with Apache License 2.0 5 votes vote down vote up
def export_model(self, model_dir, global_step_val, last_checkpoint):
    """Exports the model so that it can used for batch predictions."""

    with self.graph.as_default():
      with tf.Session(config=self.config) as session:
        session.run(tf.global_variables_initializer())
        self.saver.restore(session, last_checkpoint)

        signature = signature_def_utils.build_signature_def(
            inputs=self.inputs,
            outputs=self.outputs,
            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                         signature}

        model_builder = saved_model_builder.SavedModelBuilder(model_dir)
        model_builder.add_meta_graph_and_variables(session,
            tags=[tag_constants.SERVING],
            signature_def_map=signature_map,
            clear_devices=True)
        model_builder.save() 
Example #14
Source File: mlflow_utils.py    From nucleus7 with Mozilla Public License 2.0 5 votes vote down vote up
def log_saved_model(saved_model_path: Union[bytes, str],
                    global_step: int,
                    saved_model_load_fn: Callable):
    """
    Log all the saved models to mlflow

    Parameters
    ----------
    saved_model_path
        path to saved model
    global_step
        global step for saved model
    """
    # pylint: disable=unused-argument
    # saved_model_load_fn is coming from patch
    if mlflow.active_run() is None:
        _warn_about_no_run()
        return
    if isinstance(saved_model_path, bytes):
        saved_model_path = saved_model_path.decode()
    saved_model_tag = os.path.split(saved_model_path)[-1]
    artifact_path = os.path.join("models", saved_model_tag)
    mlflow_tf.log_model(
        tf_saved_model_dir=saved_model_path,
        tf_meta_graph_tags=[tag_constants.SERVING],
        tf_signature_def_key=
        signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY,
        artifact_path=artifact_path)
    mlflow_artifacts_path = mlflow.get_artifact_uri()
    saved_model_artifact_path = os.path.join(
        mlflow_artifacts_path, artifact_path)
    project_utils.log_exported_model_info(
        saved_model_artifact_path, global_step) 
Example #15
Source File: mlp_classifier.py    From text-antispam with MIT License 5 votes vote down vote up
def export(model_version, model_dir, sess, inputs, y_op):
    """导出tensorflow_serving可用的模型(Saved Model方式)(推荐)
    prediction_signature必备的三个参数分别是输入inputs、输出outputs和方法名method_name,如果缺失方法名将会报错:“grpc.framework.interfaces.face.face.AbortionError: AbortionError(code=StatusCode.INTERNAL, details="Expected prediction signature method_name to be one of {tensorflow/serving/predict, tensorflow/serving/classify, tensorflow/serving/regress}. Was: ")”。每一个SavedModel关联着一个独立的checkpoint。每一个图元都绑定一个或多个标签,这些标签用来明确图元被加载的方式。标签只接受两种类型:serve或者train,保存时可以同时包含两个标签。其中tag_constants.SERVING = "serve",tag_constants.TRAINING = "train"。模型用于TensorFlow Serving时,标签必须包含serve类型。如果标签只包含train类型,TensorFlow Serving加载模型时会报错:“E tensorflow_serving/core/aspired_versions_manager.cc:351] Servable {name: default version: 2} cannot be loaded: Not found: Could not find meta graph def matching supplied tags.”。定义signature_def_map时注意定义默认服务签名键,如果缺少则会报错:“grpc.framework.interfaces.face.face.AbortionError: AbortionError(code=StatusCode.FAILED_PRECONDITION, details="Default serving signature key not found.")”。
    """
    if model_version <= 0:
        print('Please specify a positive value for version number.')
        sys.exit()

    path = os.path.dirname(os.path.abspath(model_dir))
    if os.path.isdir(path) == False:
        logging.warning('Path (%s) not exists, making directories...', path)
        os.makedirs(path)

    export_path = os.path.join(
        compat.as_bytes(model_dir),
        compat.as_bytes(str(model_version)))

    if os.path.isdir(export_path) == True:
        logging.warning('Path (%s) exists, removing directories...', export_path)
        shutil.rmtree(export_path)

    builder = saved_model_builder.SavedModelBuilder(export_path)
    tensor_info_x = utils.build_tensor_info(inputs)
    tensor_info_y = utils.build_tensor_info(y_op)

    prediction_signature = signature_def_utils.build_signature_def(
        inputs={'x': tensor_info_x},
        outputs={'y': tensor_info_y},
        method_name=signature_constants.PREDICT_METHOD_NAME)

    builder.add_meta_graph_and_variables(
        sess,
        [tag_constants.SERVING],
        signature_def_map={
            'predict_text': prediction_signature,
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: prediction_signature
        })

    builder.save() 
Example #16
Source File: export_model.py    From Y8M with Apache License 2.0 5 votes vote down vote up
def export_model(self, model_dir, global_step_val, last_checkpoint):
    """Exports the model so that it can used for batch predictions."""

    with self.graph.as_default():
      with tf.Session(config=self.config) as session:
        session.run(tf.global_variables_initializer())
        self.saver.restore(session, last_checkpoint)

        signature = signature_def_utils.build_signature_def(
            inputs=self.inputs,
            outputs=self.outputs,
            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                         signature}

        model_builder = saved_model_builder.SavedModelBuilder(model_dir)
        model_builder.add_meta_graph_and_variables(session,
            tags=[tag_constants.SERVING],
            signature_def_map=signature_map,
            clear_devices=True)
        model_builder.save() 
Example #17
Source File: export_saved_model_tpu_lib.py    From models with Apache License 2.0 5 votes vote down vote up
def run_inference_from_saved_model(inputs,
                                   saved_model_dir,
                                   input_placeholder_name='placeholder_tensor',
                                   repeat=1):
  """Loads saved model and run inference on TPU.

  Args:
    inputs: Input image with the same type as `input_type`
    saved_model_dir: The directory SavedModel being exported to.
    input_placeholder_name: input placeholder's name in SavedModel signature.
    repeat: Number of times to repeat running the provided input for profiling.

  Returns:
    A dict of resulting tensors.
  """
  with tf.Graph().as_default(), tf.Session() as sess:
    meta_graph = loader.load(sess, [tag_constants.SERVING, tag_constants.TPU],
                             saved_model_dir)

    sess.run(tpu.initialize_system())

    key_prediction = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY

    tensor_name_input = (
        meta_graph.signature_def[key_prediction].inputs[input_placeholder_name]
        .name)
    tensor_name_output = {
        k: v.name
        for k, v in (meta_graph.signature_def[key_prediction].outputs.items())
    }

    for _ in range(repeat):
      tensor_dict_out = sess.run(
          tensor_name_output, feed_dict={tensor_name_input: [inputs]})

    sess.run(tpu.shutdown_system())

    return tensor_dict_out 
Example #18
Source File: infer.py    From cloudml-edge-automation with Apache License 2.0 5 votes vote down vote up
def __init__(self):

        model_path = os.environ.get('MODEL_PATH', '/model')

        self.sess = tf.Session(graph=tf.Graph())
        saved_metagraphdef = tf.saved_model.loader.load(self.sess,
                [tag_constants.SERVING], model_path)

        self.inputs_tensor_info = signature_def_utils.get_signature_def_by_key(
                saved_metagraphdef,
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY).inputs
        outputs_tensor_info = signature_def_utils.get_signature_def_by_key(
                saved_metagraphdef,
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY).outputs

        self.output_tensor_keys_sorted = sorted(outputs_tensor_info.keys())
        self.output_tensor_names_sorted = [
           outputs_tensor_info[tensor_key].name
           for tensor_key in self.output_tensor_keys_sorted
           ] 
Example #19
Source File: model.py    From cloudml-edge-automation with Apache License 2.0 5 votes vote down vote up
def export(self, last_checkpoint, output_dir):
    """Builds a prediction graph and xports the model.

    Args:
      last_checkpoint: Path to the latest checkpoint file from training.
      output_dir: Path to the folder to be used to output the model.
    """
    logging.info('Exporting prediction graph to %s', output_dir)
    with tf.Session(graph=tf.Graph()) as sess:
      # Build and save prediction meta graph and trained variable values.
      inputs, outputs = self.build_prediction_graph()
      init_op = tf.global_variables_initializer()
      sess.run(init_op)
      self.restore_from_checkpoint(sess, self.inception_checkpoint_file,
                                   last_checkpoint)
      signature_def = build_signature(inputs=inputs, outputs=outputs)
      signature_def_map = {
          signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature_def
      }
      builder = saved_model_builder.SavedModelBuilder(output_dir)
      builder.add_meta_graph_and_variables(
          sess,
          tags=[tag_constants.SERVING],
          signature_def_map=signature_def_map)
      builder.save() 
Example #20
Source File: export_model.py    From Youtube-8M-WILLOW with Apache License 2.0 5 votes vote down vote up
def export_model(self, model_dir, global_step_val, last_checkpoint):
    """Exports the model so that it can used for batch predictions."""

    with self.graph.as_default():
      with tf.Session() as session:
        session.run(tf.global_variables_initializer())
        self.saver.restore(session, last_checkpoint)

        signature = signature_def_utils.build_signature_def(
            inputs=self.inputs,
            outputs=self.outputs,
            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: 
                         signature}

        model_builder = saved_model_builder.SavedModelBuilder(model_dir)
        model_builder.add_meta_graph_and_variables(session,
            tags=[tag_constants.SERVING],
            signature_def_map=signature_map,
            clear_devices=True)
        model_builder.save() 
Example #21
Source File: model.py    From cloudml-samples with Apache License 2.0 5 votes vote down vote up
def to_savedmodel(model, export_path):
    """Convert the Keras HDF5 model into TensorFlow SavedModel."""

    builder = saved_model_builder.SavedModelBuilder(export_path)

    signature = predict_signature_def(
        inputs={'input': model.inputs[0]}, outputs={'income': model.outputs[0]})

    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
            })
        builder.save() 
Example #22
Source File: model.py    From cloudml-samples with Apache License 2.0 5 votes vote down vote up
def export(self, last_checkpoint, output_dir):
    """Builds a prediction graph and xports the model.

    Args:
      last_checkpoint: Path to the latest checkpoint file from training.
      output_dir: Path to the folder to be used to output the model.
    """
    logging.info('Exporting prediction graph to %s', output_dir)
    with tf.Session(graph=tf.Graph()) as sess:
      # Build and save prediction meta graph and trained variable values.
      inputs, outputs = self.build_prediction_graph()
      init_op = tf.global_variables_initializer()
      sess.run(init_op)
      self.restore_from_checkpoint(sess, self.inception_checkpoint_file,
                                   last_checkpoint)
      signature_def = build_signature(inputs=inputs, outputs=outputs)
      signature_def_map = {
          signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature_def
      }
      builder = saved_model_builder.SavedModelBuilder(output_dir)
      builder.add_meta_graph_and_variables(
          sess,
          tags=[tag_constants.SERVING],
          signature_def_map=signature_def_map)
      builder.save() 
Example #23
Source File: export_model.py    From Y8M with Apache License 2.0 5 votes vote down vote up
def export_model(self, model_dir, global_step_val, last_checkpoint):
    """Exports the model so that it can used for batch predictions."""

    with self.graph.as_default():
      with tf.Session() as session:
        session.run(tf.global_variables_initializer())
        self.saver.restore(session, last_checkpoint)

        signature = signature_def_utils.build_signature_def(
            inputs=self.inputs,
            outputs=self.outputs,
            method_name=signature_constants.PREDICT_METHOD_NAME)

        signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                         signature}

        model_builder = saved_model_builder.SavedModelBuilder(model_dir)
        model_builder.add_meta_graph_and_variables(session,
            tags=[tag_constants.SERVING],
            signature_def_map=signature_map,
            clear_devices=True)
        model_builder.save() 
Example #24
Source File: exporter.py    From Elphas with Apache License 2.0 4 votes vote down vote up
def _write_saved_model(saved_model_path,
                       frozen_graph_def,
                       inputs,
                       outputs):
    """Writes SavedModel to disk.

    If checkpoint_path is not None bakes the weights into the graph thereby
    eliminating the need of checkpoint files during inference. If the model
    was trained with moving averages, setting use_moving_averages to true
    restores the moving averages, otherwise the original set of variables
    is restored.

    Args:
      saved_model_path: Path to write SavedModel.
      frozen_graph_def: tf.GraphDef holding frozen graph.
      inputs: The input image tensor to use for detection.
      outputs: A tensor dictionary containing the outputs of a DetectionModel.
    """
    with tf.Graph().as_default():
        with session.Session() as sess:

            tf.import_graph_def(frozen_graph_def, name='')

            builder = tf.saved_model.builder.SavedModelBuilder(
                saved_model_path)

            tensor_info_inputs = {
                'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
            tensor_info_outputs = {}
            for k, v in outputs.items():
                tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(
                    v)

            detection_signature = (
                tf.saved_model.signature_def_utils.build_signature_def(
                    inputs=tensor_info_inputs,
                    outputs=tensor_info_outputs,
                    method_name=signature_constants.PREDICT_METHOD_NAME))

            builder.add_meta_graph_and_variables(
                sess, [tf.saved_model.tag_constants.SERVING],
                signature_def_map={
                    signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                        detection_signature,
                },
            )
            builder.save() 
Example #25
Source File: exporter.py    From Gun-Detector with Apache License 2.0 4 votes vote down vote up
def write_saved_model(saved_model_path,
                      frozen_graph_def,
                      inputs,
                      outputs):
  """Writes SavedModel to disk.

  If checkpoint_path is not None bakes the weights into the graph thereby
  eliminating the need of checkpoint files during inference. If the model
  was trained with moving averages, setting use_moving_averages to true
  restores the moving averages, otherwise the original set of variables
  is restored.

  Args:
    saved_model_path: Path to write SavedModel.
    frozen_graph_def: tf.GraphDef holding frozen graph.
    inputs: The input image tensor to use for detection.
    outputs: A tensor dictionary containing the outputs of a DetectionModel.
  """
  with tf.Graph().as_default():
    with session.Session() as sess:

      tf.import_graph_def(frozen_graph_def, name='')

      builder = tf.saved_model.builder.SavedModelBuilder(saved_model_path)

      tensor_info_inputs = {
          'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
      tensor_info_outputs = {}
      for k, v in outputs.items():
        tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v)

      detection_signature = (
          tf.saved_model.signature_def_utils.build_signature_def(
              inputs=tensor_info_inputs,
              outputs=tensor_info_outputs,
              method_name=signature_constants.PREDICT_METHOD_NAME))

      builder.add_meta_graph_and_variables(
          sess, [tf.saved_model.tag_constants.SERVING],
          signature_def_map={
              signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                  detection_signature,
          },
      )
      builder.save() 
Example #26
Source File: decode_image.py    From StegaStamp with MIT License 4 votes vote down vote up
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('model', type=str)
    parser.add_argument('--image', type=str, default=None)
    parser.add_argument('--images_dir', type=str, default=None)
    parser.add_argument('--secret_size', type=int, default=100)
    args = parser.parse_args()

    if args.image is not None:
        files_list = [args.image]
    elif args.images_dir is not None:
        files_list = glob.glob(args.images_dir + '/*')
    else:
        print('Missing input image')
        return

    sess = tf.InteractiveSession(graph=tf.Graph())

    model = tf.saved_model.loader.load(sess, [tag_constants.SERVING], args.model)

    input_image_name = model.signature_def[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].inputs['image'].name
    input_image = tf.get_default_graph().get_tensor_by_name(input_image_name)

    output_secret_name = model.signature_def[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].outputs['decoded'].name
    output_secret = tf.get_default_graph().get_tensor_by_name(output_secret_name)

    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

    for filename in files_list:
        image = Image.open(filename).convert("RGB")
        image = np.array(ImageOps.fit(image,(400, 400)),dtype=np.float32)
        image /= 255.

        feed_dict = {input_image:[image]}

        secret = sess.run([output_secret],feed_dict=feed_dict)[0][0]

        packet_binary = "".join([str(int(bit)) for bit in secret[:96]])
        packet = bytes(int(packet_binary[i : i + 8], 2) for i in range(0, len(packet_binary), 8))
        packet = bytearray(packet)

        data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]

        bitflips = bch.decode_inplace(data, ecc)

        if bitflips != -1:
            try:
                code = data.decode("utf-8")
                print(filename, code)
                continue
            except:
                continue
        print(filename, 'Failed to decode') 
Example #27
Source File: exporter.py    From hands-detection with MIT License 4 votes vote down vote up
def _write_saved_model(inference_graph_path, inputs, outputs,
                       checkpoint_path=None, use_moving_averages=False):
  """Writes SavedModel to disk.

  If checkpoint_path is not None bakes the weights into the graph thereby
  eliminating the need of checkpoint files during inference. If the model
  was trained with moving averages, setting use_moving_averages to true
  restores the moving averages, otherwise the original set of variables
  is restored.

  Args:
    inference_graph_path: Path to write inference graph.
    inputs: The input image tensor to use for detection.
    outputs: A tensor dictionary containing the outputs of a DetectionModel.
    checkpoint_path: Optional path to the checkpoint file.
    use_moving_averages: Whether to export the original or the moving averages
      of the trainable variables from the checkpoint.
  """
  inference_graph_def = tf.get_default_graph().as_graph_def()
  checkpoint_graph_def = None
  if checkpoint_path:
    output_node_names = ','.join(outputs.keys())
    checkpoint_graph_def = get_frozen_graph_def(
        inference_graph_def=inference_graph_def,
        use_moving_averages=use_moving_averages,
        input_checkpoint=checkpoint_path,
        output_node_names=output_node_names
    )

  with tf.Graph().as_default():
    with session.Session() as sess:

      tf.import_graph_def(checkpoint_graph_def)

      builder = tf.saved_model.builder.SavedModelBuilder(inference_graph_path)

      tensor_info_inputs = {
          'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
      tensor_info_outputs = {}
      for k, v in outputs.items():
        tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v)

      detection_signature = (
          tf.saved_model.signature_def_utils.build_signature_def(
              inputs=tensor_info_inputs,
              outputs=tensor_info_outputs,
              method_name=signature_constants.PREDICT_METHOD_NAME))

      builder.add_meta_graph_and_variables(
          sess, [tf.saved_model.tag_constants.SERVING],
          signature_def_map={
              signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                  detection_signature,
          },
      )
      builder.save() 
Example #28
Source File: bundle_shim.py    From keras-lambda with MIT License 4 votes vote down vote up
def _load_saved_model_from_session_bundle_path(export_dir, target, config):
  """Load legacy TF Exporter/SessionBundle checkpoint.

  Args:
    export_dir: the directory that contains files exported by exporter.
    target: The execution engine to connect to. See target in tf.Session()
    config: A ConfigProto proto with configuration options. See config in
    tf.Session()

  Returns:
    session: a tensorflow session created from the variable files.
    metagraph_def: The `MetaGraphDef` protocol buffer loaded in the provided
    session. This can be used to further extract signature-defs,
    collection-defs, etc.
    This model is up-converted to SavedModel format. Specifically, metagraph_def
    SignatureDef field is populated with Signatures converted from legacy
    signatures contained within CollectionDef

  Raises:
    RuntimeError: If metagraph already contains signature_def and cannot be
    up-converted.
  """

  meta_graph_filename = os.path.join(export_dir,
                                     legacy_constants.META_GRAPH_DEF_FILENAME)

  metagraph_def = meta_graph.read_meta_graph_file(meta_graph_filename)
  if metagraph_def.signature_def:
    raise RuntimeError("Legacy graph contains signature def, unable to "
                       "up-convert.")

  # Add SignatureDef to metagraph.
  default_signature_def, named_signature_def = (
      _convert_signatures_to_signature_defs(metagraph_def))
  if default_signature_def:
    metagraph_def.signature_def[
        signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].CopyFrom(
            default_signature_def)
  if named_signature_def:
    signature_def_key = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
    if default_signature_def:
      signature_def_key += "_from_named"
    metagraph_def.signature_def[signature_def_key].CopyFrom(named_signature_def)

  # We cannot just output session we loaded with older metagraph_def and
  # up-converted metagraph definition because Session has an internal object of
  # type Graph which is populated from meta_graph_def. If we do not create
  # session with our new meta_graph_def, then Graph will be out of sync with
  # meta_graph_def.
  sess, metagraph_def = session_bundle.load_session_bundle_from_path(
      export_dir, target, config, meta_graph_def=metagraph_def)
  return sess, metagraph_def 
Example #29
Source File: exporter.py    From object_detection_kitti with Apache License 2.0 4 votes vote down vote up
def _write_saved_model(saved_model_path,
                       frozen_graph_def,
                       inputs,
                       outputs):
  """Writes SavedModel to disk.

  If checkpoint_path is not None bakes the weights into the graph thereby
  eliminating the need of checkpoint files during inference. If the model
  was trained with moving averages, setting use_moving_averages to true
  restores the moving averages, otherwise the original set of variables
  is restored.

  Args:
    saved_model_path: Path to write SavedModel.
    frozen_graph_def: tf.GraphDef holding frozen graph.
    inputs: The input image tensor to use for detection.
    outputs: A tensor dictionary containing the outputs of a DetectionModel.
  """
  with tf.Graph().as_default():
    with session.Session() as sess:

      tf.import_graph_def(frozen_graph_def, name='')

      builder = tf.saved_model.builder.SavedModelBuilder(saved_model_path)

      tensor_info_inputs = {
          'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
      tensor_info_outputs = {}
      for k, v in outputs.items():
        tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v)

      detection_signature = (
          tf.saved_model.signature_def_utils.build_signature_def(
              inputs=tensor_info_inputs,
              outputs=tensor_info_outputs,
              method_name=signature_constants.PREDICT_METHOD_NAME))

      builder.add_meta_graph_and_variables(
          sess, [tf.saved_model.tag_constants.SERVING],
          signature_def_map={
              signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                  detection_signature,
          },
      )
      builder.save() 
Example #30
Source File: exporter.py    From MBMD with MIT License 4 votes vote down vote up
def _write_saved_model(saved_model_path,
                       frozen_graph_def,
                       inputs,
                       outputs):
  """Writes SavedModel to disk.

  If checkpoint_path is not None bakes the weights into the graph thereby
  eliminating the need of checkpoint files during inference. If the model
  was trained with moving averages, setting use_moving_averages to true
  restores the moving averages, otherwise the original set of variables
  is restored.

  Args:
    saved_model_path: Path to write SavedModel.
    frozen_graph_def: tf.GraphDef holding frozen graph.
    inputs: The input image tensor to use for detection.
    outputs: A tensor dictionary containing the outputs of a DetectionModel.
  """
  with tf.Graph().as_default():
    with session.Session() as sess:

      tf.import_graph_def(frozen_graph_def, name='')

      builder = tf.saved_model.builder.SavedModelBuilder(saved_model_path)

      tensor_info_inputs = {
          'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
      tensor_info_outputs = {}
      for k, v in outputs.items():
        tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v)

      detection_signature = (
          tf.saved_model.signature_def_utils.build_signature_def(
              inputs=tensor_info_inputs,
              outputs=tensor_info_outputs,
              method_name=signature_constants.PREDICT_METHOD_NAME))

      builder.add_meta_graph_and_variables(
          sess, [tf.saved_model.tag_constants.SERVING],
          signature_def_map={
              signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                  detection_signature,
          },
      )
      builder.save()