Python tensorflow.keras.models.model_from_json() Examples

The following are 8 code examples of tensorflow.keras.models.model_from_json(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.keras.models , or try the search function .
Example #1
Source File: model.py    From kryptoflow with GNU General Public License v3.0 6 votes vote down vote up
def load(self, run_number: Union[str, int]='last', name: str='sklearn'):
        """
        Load a keras/tf.txt model from pickled instance

        Args:
            run_number: 'last' or integer value representing the run number
            name: name of the model

        Returns: scikit learn representation of the model

        """
        if run_number is not 'last':
            self.number = str(run_number)

        json_model_file = open(os.path.join(self.model_path, name + '.json'), "r").read()
        loaded_model = model_from_json(json_model_file)
        loaded_model.load_weights(os.path.join(self.model_path, name + '.h5'))
        return loaded_model 
Example #2
Source File: model.py    From kryptoflow with GNU General Public License v3.0 6 votes vote down vote up
def _store_tf(self, name, session):

        json_model_file = open(os.path.join(self.model_path, name + '.json'), "r").read()
        loaded_model = model_from_json(json_model_file)
        loaded_model.load_weights(os.path.join(self.model_path, name + '.h5'))

        builder = saved_model_builder.SavedModelBuilder(os.path.join(self.model_path, 'tf.txt'))
        signature = predict_signature_def(inputs={'states': loaded_model.input},
                                          outputs={'price': loaded_model.output})

        builder.add_meta_graph_and_variables(sess=session,
                                             tags=[tag_constants.SERVING],
                                             signature_def_map={'helpers': signature})
        builder.save()

        _logger.info("Saved tf.txt model to disk") 
Example #3
Source File: utils.py    From qkeras with Apache License 2.0 5 votes vote down vote up
def quantized_model_from_json(json_string, custom_objects=None):
  if not custom_objects:
    custom_objects = {}

  # let's make a deep copy to make sure our objects are not shared elsewhere
  custom_objects = copy.deepcopy(custom_objects)

  _add_supported_quantized_objects(custom_objects)

  qmodel = model_from_json(json_string, custom_objects=custom_objects)

  return qmodel 
Example #4
Source File: shapelets.py    From tslearn with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def _organize_model(cls, model):
        """
        Instantiate the model with all hyper-parameters,
        set all model parameters and then return the model.
        Do not use directly. Use the designated classmethod to load a model.
        Parameters
        ----------
        cls : instance of model that inherits from `BaseModelPackage`
            a model instance
        model : dict
            Model dict containing hyper-parameters and model-parameters
        Returns
        -------
        model: instance of model that inherits from `BaseModelPackage`
            instance of the model class with hyper-parameters and
            model parameters set from the passed model dict
        """

        model_params = model.pop('model_params')
        hyper_params = model.pop('hyper_params')  # hyper-params

        # instantiate with hyper-parameters
        inst = cls(**hyper_params)

        if "model_" in model_params.keys():
            # set all model params
            inst.model_ = model_from_json(
                model_params.pop("model_"),
                custom_objects={
                    "LocalSquaredDistanceLayer": LocalSquaredDistanceLayer,
                    "GlobalMinPooling1D": GlobalMinPooling1D
                }
            )
            inst.set_weights(model_params.pop("model_weights_"))
        for p in model_params.keys():
            setattr(inst, p, model_params[p])
        inst._X_fit_dims = tuple(inst._X_fit_dims)
        inst._build_auxiliary_models()

        return inst 
Example #5
Source File: keras_input_lib.py    From snn_toolbox with MIT License 4 votes vote down vote up
def load(path, filename, **kwargs):
    """Load network from file.

    Parameters
    ----------

    path: str
        Path to directory where to load model from.

    filename: str
        Name of file to load model from.

    Returns
    -------

    : dict[str, Union[keras.models.Sequential, function]]
        A dictionary of objects that constitute the input model. It must
        contain the following two keys:

        - 'model': keras.models.Sequential
            Keras model instance of the network.
        - 'val_fn': function
            Function that allows evaluating the original model.
    """

    filepath = str(os.path.join(path, filename))

    if os.path.exists(filepath + '.json'):
        model = models.model_from_json(open(filepath + '.json').read())
        try:
            model.load_weights(filepath + '.h5')
        except OSError:
            # Allows h5 files without a .h5 extension to be loaded.
            model.load_weights(filepath)
        # With this loading method, optimizer and loss cannot be recovered.
        # Could be specified by user, but since they are not really needed
        # at inference time, set them to the most common choice.
        # TODO: Proper reinstantiation should be doable since Keras2
        model.compile('sgd', 'categorical_crossentropy',
                      ['accuracy', metrics.top_k_categorical_accuracy])
    else:
        filepath_custom_objects = kwargs.get('filepath_custom_objects', None)
        if filepath_custom_objects is not None:
            filepath_custom_objects = str(filepath_custom_objects)  # python 2

        custom_dicts = assemble_custom_dict(
            get_custom_activations_dict(filepath_custom_objects),
            get_custom_layers_dict())
        try:
            model = models.load_model(filepath + '.h5', custom_dicts)
        except OSError as e:
            print(e)
            print("Trying to load without '.h5' extension.")
            model = models.load_model(filepath, custom_dicts)
        model.compile(model.optimizer, model.loss,
                      ['accuracy', metrics.top_k_categorical_accuracy])

    model.summary()
    return {'model': model, 'val_fn': model.evaluate} 
Example #6
Source File: 02_keras_to_tensorflow.py    From PINTO_model_zoo with MIT License 4 votes vote down vote up
def load_model(input_model_path, input_json_path=None, input_yaml_path=None):
    if not Path(input_model_path).exists():
        raise FileNotFoundError(
            'Model file `{}` does not exist.'.format(input_model_path))
    try:
        model = keras.models.load_model(input_model_path)
        return model
    except FileNotFoundError as err:
        logging.error('Input mode file (%s) does not exist.', FLAGS.input_model)
        raise err
    except ValueError as wrong_file_err:
        if input_json_path:
            if not Path(input_json_path).exists():
                raise FileNotFoundError(
                    'Model description json file `{}` does not exist.'.format(
                        input_json_path))
            try:
                model = model_from_json(open(str(input_json_path)).read())
                model.load_weights(input_model_path)
                return model
            except Exception as err:
                logging.error("Couldn't load model from json.")
                raise err
        elif input_yaml_path:
            if not Path(input_yaml_path).exists():
                raise FileNotFoundError(
                    'Model description yaml file `{}` does not exist.'.format(
                        input_yaml_path))
            try:
                model = model_from_yaml(open(str(input_yaml_path)).read())
                model.load_weights(input_model_path)
                return model
            except Exception as err:
                logging.error("Couldn't load model from yaml.")
                raise err
        else:
            logging.error(
                'Input file specified only holds the weights, and not '
                'the model definition. Save the model using '
                'model.save(filename.h5) which will contain the network '
                'architecture as well as its weights. '
                'If the model is saved using the '
                'model.save_weights(filename) function, either '
                'input_model_json or input_model_yaml flags should be set to '
                'to import the network architecture prior to loading the '
                'weights. \n'
                'Check the keras documentation for more details '
                '(https://keras.io/getting-started/faq/)')
            raise wrong_file_err 
Example #7
Source File: prediction_denoise.py    From Speech-enhancement with MIT License 4 votes vote down vote up
def prediction(weights_path, name_model, audio_dir_prediction, dir_save_prediction, audio_input_prediction,
audio_output_prediction, sample_rate, min_duration, frame_length, hop_length_frame, n_fft, hop_length_fft):
    """ This function takes as input pretrained weights, noisy voice sound to denoise, predict
    the denoise sound and save it to disk.
    """

    # load json and create model
    json_file = open(weights_path+'/'+name_model+'.json', 'r')
    loaded_model_json = json_file.read()
    json_file.close()
    loaded_model = model_from_json(loaded_model_json)
    # load weights into new model
    loaded_model.load_weights(weights_path+'/'+name_model+'.h5')
    print("Loaded model from disk")

    # Extracting noise and voice from folder and convert to numpy
    audio = audio_files_to_numpy(audio_dir_prediction, audio_input_prediction, sample_rate,
                                 frame_length, hop_length_frame, min_duration)

    #Dimensions of squared spectrogram
    dim_square_spec = int(n_fft / 2) + 1
    print(dim_square_spec)

    # Create Amplitude and phase of the sounds
    m_amp_db_audio,  m_pha_audio = numpy_audio_to_matrix_spectrogram(
        audio, dim_square_spec, n_fft, hop_length_fft)

    #global scaling to have distribution -1/1
    X_in = scaled_in(m_amp_db_audio)
    #Reshape for prediction
    X_in = X_in.reshape(X_in.shape[0],X_in.shape[1],X_in.shape[2],1)
    #Prediction using loaded network
    X_pred = loaded_model.predict(X_in)
    #Rescale back the noise model
    inv_sca_X_pred = inv_scaled_ou(X_pred)
    #Remove noise model from noisy speech
    X_denoise = m_amp_db_audio - inv_sca_X_pred[:,:,:,0]
    #Reconstruct audio from denoised spectrogram and phase
    print(X_denoise.shape)
    print(m_pha_audio.shape)
    print(frame_length)
    print(hop_length_fft)
    audio_denoise_recons = matrix_spectrogram_to_numpy_audio(X_denoise, m_pha_audio, frame_length, hop_length_fft)
    #Number of frames
    nb_samples = audio_denoise_recons.shape[0]
    #Save all frames in one file
    denoise_long = audio_denoise_recons.reshape(1, nb_samples * frame_length)*10
    librosa.output.write_wav(dir_save_prediction + audio_output_prediction, denoise_long[0, :], sample_rate) 
Example #8
Source File: keras_to_tensorflow.py    From keras-YOLOv3-model-set with MIT License 4 votes vote down vote up
def load_input_model(input_model_path, input_json_path=None, input_yaml_path=None, custom_objects=None):
    if not Path(input_model_path).exists():
        raise FileNotFoundError(
            'Model file `{}` does not exist.'.format(input_model_path))
    try:
        model = load_model(input_model_path, custom_objects=custom_objects)
        return model
    except FileNotFoundError as err:
        logging.error('Input mode file (%s) does not exist.', FLAGS.input_model)
        raise err
    except ValueError as wrong_file_err:
        if input_json_path:
            if not Path(input_json_path).exists():
                raise FileNotFoundError(
                    'Model description json file `{}` does not exist.'.format(
                        input_json_path))
            try:
                model = model_from_json(open(str(input_json_path)).read())
                model.load_weights(input_model_path)
                return model
            except Exception as err:
                logging.error("Couldn't load model from json.")
                raise err
        elif input_yaml_path:
            if not Path(input_yaml_path).exists():
                raise FileNotFoundError(
                    'Model description yaml file `{}` does not exist.'.format(
                        input_yaml_path))
            try:
                model = model_from_yaml(open(str(input_yaml_path)).read())
                model.load_weights(input_model_path)
                return model
            except Exception as err:
                logging.error("Couldn't load model from yaml.")
                raise err
        else:
            logging.error(
                'Input file specified only holds the weights, and not '
                'the model definition. Save the model using '
                'model.save(filename.h5) which will contain the network '
                'architecture as well as its weights. '
                'If the model is saved using the '
                'model.save_weights(filename) function, either '
                'input_model_json or input_model_yaml flags should be set to '
                'to import the network architecture prior to loading the '
                'weights. \n'
                'Check the keras documentation for more details '
                '(https://keras.io/getting-started/faq/)')
            raise wrong_file_err