Python keras.models.model_from_json() Examples

The following are 30 code examples for showing how to use keras.models.model_from_json(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module keras.models , or try the search function .

Example 1
Project: reading-text-in-the-wild   Author: mathDR   File: use_charnet.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, architecture_file=None, weight_file=None, optimizer=None):
        # Generate mapping for softmax layer to characters
        output_str = '0123456789abcdefghijklmnopqrstuvwxyz '
        self.output = [x for x in output_str]
        self.L = len(self.output)

        # Load model and saved weights
        from keras.models import model_from_json
        if architecture_file is None:
            self.model = model_from_json(open('char2_architecture.json').read())
        else:
            self.model = model_from_json(open(architecture_file).read())

        if weight_file is None:
            self.model.load_weights('char2_weights.h5')
        else:
            self.model.load_weights(weight_file)

        if optimizer is None:
            from keras.optimizers import SGD
            optimizer = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
        self.model.compile(loss='categorical_crossentropy', optimizer=optimizer) 
Example 2
Project: deep-smoke-machine   Author: CMU-CREATE-Lab   File: keras_utils.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def load_model(json_path, weight_path, metrics=None, loss=None, optimizer=None, custom_objects=None, is_compile=True):
    with open(json_path, 'r') as f:
        model_json_string = json.load(f)
    model_json_dict = json.loads(model_json_string)
    model = model_from_json(model_json_string, custom_objects=custom_objects)
    model.load_weights(weight_path)

    if is_compile:
        if optimizer is None:
            optimizer = model_json_dict['optimizer']['name']

        if loss is None:
            loss = model_json_dict['loss']

        if metrics is None:
            model.compile(loss=loss, optimizer=optimizer)
        else:
            model.compile(loss=loss, optimizer=optimizer, metrics=metrics)

    return model 
Example 3
Project: timeception   Author: noureldien   File: keras_utils.py    License: GNU General Public License v3.0 6 votes vote down vote up
def load_model(json_path, weight_path, metrics=None, loss=None, optimizer=None, custom_objects=None, is_compile=True):
    with open(json_path, 'r') as f:
        model_json_string = json.load(f)
    model_json_dict = json.loads(model_json_string)
    model = model_from_json(model_json_string, custom_objects=custom_objects)
    model.load_weights(weight_path)

    if is_compile:
        if optimizer is None:
            optimizer = model_json_dict['optimizer']['name']

        if loss is None:
            loss = model_json_dict['loss']

        if metrics is None:
            model.compile(loss=loss, optimizer=optimizer)
        else:
            model.compile(loss=loss, optimizer=optimizer, metrics=metrics)

    return model 
Example 4
Project: Document-Classifier-LSTM   Author: AlexGidiotis   File: classifier.py    License: MIT License 6 votes vote down vote up
def load_model(stamp):
	"""
	"""

	json_file = open(stamp+'.json', 'r')
	loaded_model_json = json_file.read()
	json_file.close()
	model = model_from_json(loaded_model_json, {'AttentionWithContext': AttentionWithContext})

	model.load_weights(stamp+'.h5')
	print("Loaded model from disk")

	model.summary()


	adam = Adam(lr=0.001)
	model.compile(loss='binary_crossentropy',
		optimizer=adam,
		metrics=[f1_score])


	return model 
Example 5
Project: kits19.MIScnn   Author: muellerdo   File: neural_network.py    License: GNU General Public License v3.0 6 votes vote down vote up
def load(self, name):
        # Create model input path
        inpath_model = os.path.join(self.config["model_path"],
                                    name + ".model.json")
        inpath_weights = os.path.join(self.config["model_path"],
                                      name + ".weights.h5")
        # Load json and create model
        json_file = open(inpath_model, 'r')
        loaded_model_json = json_file.read()
        json_file.close()
        self.model = model_from_json(loaded_model_json)
        # Load weights into new model
        self.model.load_weights(inpath_weights)
        # Compile model
        self.model.compile(optimizer=Adam(lr=self.config["learninig_rate"]),
                           loss=tversky_loss,
                           metrics=self.metrics) 
Example 6
Project: robotreviewer   Author: ijmarshall   File: punchline_extractor.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, architecture_path=None, weights_path=None):
        self.bc = None
        try: 
            self.bc = BertClient() 
        except:
            raise Exception("PunchlineExtractor: Cannot instantiate BertClient. Is it running???")

        # check if we're loading in a pre-trained model
        if architecture_path is not None:
            assert(weights_path is not None)
            
            with open(architecture_path) as model_arch:
                model_arch_str = model_arch.read()
                self.model = model_from_json(model_arch_str)

            self.model.load_weights(weights_path)
        else:
            self.build_model() 
Example 7
Project: robotreviewer   Author: ijmarshall   File: punchline_extractor.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, architecture_path=None, weights_path=None):
        self.bc = None
        try: 
            self.bc = BertClient() 
        except:
            raise Exception("PunchlineExtractor: Cannot instantiate BertClient. Is it running???")

        # check if we're loading in a pre-trained model
        if architecture_path is not None:
            assert(weights_path is not None)
            
            with open(architecture_path) as model_arch:
                model_arch_str = model_arch.read()
                self.model = model_from_json(model_arch_str)

            self.model.load_weights(weights_path)
        else:
            self.build_model() 
Example 8
Project: PSPNet-Keras-tensorflow   Author: Vladkryvoruchko   File: pspnet-video.py    License: MIT License 6 votes vote down vote up
def __init__(self, nb_classes, resnet_layers, input_shape, weights):
        """Instanciate a PSPNet."""
        self.input_shape = input_shape
        json_path = join("weights", "keras", weights + ".json")
        h5_path = join("weights", "keras", weights + ".h5")
        if isfile(json_path) and isfile(h5_path):
            print("Keras model & weights found, loading...")
            with open(json_path, 'r') as file_handle:
                self.model = model_from_json(file_handle.read())
            self.model.load_weights(h5_path)
        else:
            print("No Keras model & weights found, import from npy weights.")
            self.model = layers.build_pspnet(nb_classes=nb_classes,
                                             resnet_layers=resnet_layers,
                                             input_shape=self.input_shape)
            self.set_npy_weights(weights) 
Example 9
Project: PSPNet-Keras-tensorflow   Author: Vladkryvoruchko   File: pspnet.py    License: MIT License 6 votes vote down vote up
def __init__(self, nb_classes, resnet_layers, input_shape, weights):
        self.input_shape = input_shape
        self.num_classes = nb_classes

        json_path = join("weights", "keras", weights + ".json")
        h5_path = join("weights", "keras", weights + ".h5")
        if 'pspnet' in weights:
            if os.path.isfile(json_path) and os.path.isfile(h5_path):
                print("Keras model & weights found, loading...")
                with CustomObjectScope({'Interp': layers.Interp}):
                    with open(json_path) as file_handle:
                        self.model = model_from_json(file_handle.read())
                self.model.load_weights(h5_path)
            else:
                print("No Keras model & weights found, import from npy weights.")
                self.model = layers.build_pspnet(nb_classes=nb_classes,
                                                 resnet_layers=resnet_layers,
                                                 input_shape=self.input_shape)
                self.set_npy_weights(weights)
        else:
            print('Load pre-trained weights')
            self.model = load_model(weights) 
Example 10
Project: GEM-Benchmark   Author: palash1992   File: gcn.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_reconst_from_embed(self, embed, node_l=None, filesuffix=None):
        if filesuffix is None:
            if node_l is not None:
                return self._decoder.predict(
                    embed,
                    batch_size=self._n_batch)[:, node_l]
            else:
                return self._decoder.predict(embed, batch_size=self._n_batch)
        else:
            try:
                decoder = model_from_json(
                    open('decoder_model_' + filesuffix + '.json').read()
                )
            except:
                print('Error reading file: {0}. Cannot load previous model'.format('decoder_model_'+filesuffix+'.json'))
                exit()
            try:
                decoder.load_weights('decoder_weights_' + filesuffix + '.hdf5')
            except:
                print('Error reading file: {0}. Cannot load previous weights'.format('decoder_weights_'+filesuffix+'.hdf5'))
                exit()
            if node_l is not None:
                return decoder.predict(embed, batch_size=self._n_batch)[:, node_l]
            else:
                return decoder.predict(embed, batch_size=self._n_batch) 
Example 11
Project: GEM-Benchmark   Author: palash1992   File: vae.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_reconst_from_embed(self, embed, node_l=None, filesuffix=None):
        if filesuffix is None:
            if node_l is not None:
                return self._decoder.predict(
                    embed,
                    batch_size=self._n_batch
                )[:, node_l]
            else:
                return self._decoder.predict(embed, batch_size=self._n_batch)
        else:
            try:
                decoder = model_from_json(
                    open('decoder_model_' + filesuffix + '.json').read())
            except:
                print('Error reading file: {0}. Cannot load previous model'.format('decoder_model_'+filesuffix+'.json'))
                exit()
            try:
                decoder.load_weights('decoder_weights_'+filesuffix+'.hdf5')
            except:
                print('Error reading file: {0}. Cannot load previous weights'.format('decoder_weights_'+filesuffix+'.hdf5'))
                exit()
            if node_l is not None:
                return decoder.predict(embed, batch_size=self._n_batch)[:, node_l]
            else:
                return decoder.predict(embed, batch_size=self._n_batch) 
Example 12
Project: GEM-Benchmark   Author: palash1992   File: sdne.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_reconst_from_embed(self, embed, node_l=None, filesuffix=None):
        if filesuffix is None:
            if node_l is not None:
                return self._decoder.predict(
                    embed,
                    batch_size=self._n_batch)[:, node_l]
            else:
                return self._decoder.predict(embed, batch_size=self._n_batch)
        else:
            try:
                decoder = model_from_json(
                    open('decoder_model_' + filesuffix + '.json').read()
                )
            except:
                print('Error reading file: {0}. Cannot load previous model'.format('decoder_model_'+filesuffix+'.json'))
                exit()
            try:
                decoder.load_weights('decoder_weights_' + filesuffix + '.hdf5')
            except:
                print('Error reading file: {0}. Cannot load previous weights'.format('decoder_weights_'+filesuffix+'.hdf5'))
                exit()
            if node_l is not None:
                return decoder.predict(embed, batch_size=self._n_batch)[:, node_l]
            else:
                return decoder.predict(embed, batch_size=self._n_batch) 
Example 13
Project: GEM-Benchmark   Author: palash1992   File: ae_static.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_reconst_from_embed(self, embed, node_l=None, filesuffix=None):
        if filesuffix is None:
            if node_l is not None:
                return self._decoder.predict(
                    embed,
                    batch_size=self._n_batch
                )[:, node_l]
            else:
                return self._decoder.predict(embed, batch_size=self._n_batch)
        else:
            try:
                decoder = model_from_json(
                    open('decoder_model_' + filesuffix + '.json').read())
            except:
                print('Error reading file: {0}. Cannot load previous model'.format('decoder_model_'+filesuffix+'.json'))
                exit()
            try:
                decoder.load_weights('decoder_weights_'+filesuffix+'.hdf5')
            except:
                print('Error reading file: {0}. Cannot load previous weights'.format('decoder_weights_'+filesuffix+'.hdf5'))
                exit()
            if node_l is not None:
                return decoder.predict(embed, batch_size=self._n_batch)[:, node_l]
            else:
                return decoder.predict(embed, batch_size=self._n_batch) 
Example 14
Project: sia-cog   Author: tech-quantum   File: pipelinecomponents.py    License: MIT License 6 votes vote down vote up
def model_predict(X, pipeline):
    if model_type == "mlp":
        json_file = open(projectfolder + '/model.json', 'r')
        loaded_model_json = json_file.read()
        json_file.close()
        model = model_from_json(loaded_model_json)
        model.load_weights(projectfolder + "/weights.hdf5")
        model.compile(loss=pipeline['options']['loss'], optimizer=pipeline['options']['optimizer'],
                         metrics=pipeline['options']['scoring'])
        if type(X) is pandas.DataFrame:
            X = X.values
        Y = model.predict(X)
    else:
        picklefile = projectfolder + "/model.out"
        with open(picklefile, "rb") as f:
            model = pickle.load(f)
        Y = model.predict(X)

    return Y 
Example 15
Project: reversi_ai   Author: andysalerno   File: q_learning_agent.py    License: MIT License 6 votes vote down vote up
def get_model(self, filename=None):
        """Given a filename, load that model file; otherwise, generate a new model."""
        model = None
        if filename:
            info('attempting to load model {}'.format(filename))
            try:
                model = model_from_json(open(filename).read())
            except FileNotFoundError:
                print('could not load file {}'.format(filename))
                quit()
            print('loaded model file {}'.format(filename))
        else:
            print('no model file loaded, generating new model.')
            size = self.reversi.size ** 2
            model = Sequential()
            model.add(Dense(HIDDEN_SIZE, activation='relu', input_dim=size))
            # model.add(Dense(HIDDEN_SIZE, activation='relu'))
            model.add(Dense(size))

        model.compile(loss='mse', optimizer=optimizer)
        return model 
Example 16
Project: sonic_contest   Author: flyyufelix   File: atari_wrappers.py    License: MIT License 6 votes vote down vote up
def __init__(self, env):
        """Warp frames to 84x84 as done in the Nature paper and later work."""
        gym.ObservationWrapper.__init__(self, env)
        self.width = 84
        self.height = 84
        self.observation_space = spaces.Box(low=0, high=255,
            shape=(self.height, self.width, 1), dtype=np.uint8)
        #print("Load Keras Model!!!")
        # Load Keras model
        #self.json_name = './retro-movies/architecture_level_classifier_v5.json'
        #self.weight_name = './retro-movies/model_weights_level_classifier_v5.h5'
        #self.levelcls_model = model_from_json(open(self.json_name).read())
        #self.levelcls_model.load_weights(self.weight_name, by_name=True)
        ##self.levelcls_model.load_weights(self.weight_name)
        #print("Done Loading Keras Model!!!")
        #self.mean_pixel = [103.939, 116.779, 123.68]
        #self.warmup = 1000
        #self.interval = 500
        #self.counter = 0
        #self.num_inference = 0
        #self.max_inference = 5
        self.level_pred = [] 
Example 17
Project: Contrastive-Explanation-Method   Author: IBM   File: Utils.py    License: Apache License 2.0 6 votes vote down vote up
def load_AE(codec_prefix, print_summary=False):

    saveFilePrefix = "models/AE_codec/" + codec_prefix + "_"

    decoder_model_filename = saveFilePrefix + "decoder.json"
    decoder_weight_filename = saveFilePrefix + "decoder.h5"

    if not os.path.isfile(decoder_model_filename):
        raise Exception("The file for decoder model does not exist:{}".format(decoder_model_filename))
    json_file = open(decoder_model_filename, 'r')
    decoder = model_from_json(json_file.read(), custom_objects={"tf": tf})
    json_file.close()

    if not os.path.isfile(decoder_weight_filename):
        raise Exception("The file for decoder weights does not exist:{}".format(decoder_weight_filename))
    decoder.load_weights(decoder_weight_filename)

    if print_summary:
        print("Decoder summaries")
        decoder.summary()

    return decoder 
Example 18
Project: DLSCORE   Author: sirimullalab   File: NNScore2.01.02.py    License: MIT License 6 votes vote down vote up
def getout(self):
        #get and denormalize output units
    
        for k in range(1,len(self.outno)+1):
            self.output[k] = self.deo[k][1] * self.units[self.outno[k]] + self.deo[k][2]
			
			
#def dlscore():
    # Load the model
#	with open("model.json", "r") as json_file:
#	    loaded_model = model_from_json(json_file.read())

	# Load weights
#	loaded_model.load_weights("model.h5")

	# Compile the model
#	loaded_model.compile(
#		loss='mean_squared_error',
#		optimizer=keras.optimizers.Adam(lr=0.001),
#		metrics=[metrics.MSE])
	
#	return loaded_model 
Example 19
Project: DLSCORE   Author: sirimullalab   File: NNScore2.01.03.py    License: MIT License 6 votes vote down vote up
def getout(self):
        #get and denormalize output units
    
        for k in range(1,len(self.outno)+1):
            self.output[k] = self.deo[k][1] * self.units[self.outno[k]] + self.deo[k][2]
			
			
#def dlscore():
    # Load the model
#	with open("model.json", "r") as json_file:
#	    loaded_model = model_from_json(json_file.read())

	# Load weights
#	loaded_model.load_weights("model.h5")

	# Compile the model
#	loaded_model.compile(
#		loss='mean_squared_error',
#		optimizer=keras.optimizers.Adam(lr=0.001),
#		metrics=[metrics.MSE])
	
#	return loaded_model 
Example 20
Project: mljar-supervised   Author: mljar   File: nn.py    License: MIT License 6 votes vote down vote up
def __init__(self, params):
        super(NeuralNetworkAlgorithm, self).__init__(params)

        self.library_version = keras.__version__

        self.rounds = additional.get("one_step", 1)
        self.max_iters = additional.get("max_steps", 1)
        self.learner_params = {
            "dense_layers": params.get("dense_layers"),
            "dense_1_size": params.get("dense_1_size"),
            "dense_2_size": params.get("dense_2_size"),
            "dropout": params.get("dropout"),
            "learning_rate": params.get("learning_rate"),
            "momentum": params.get("momentum"),
            "decay": params.get("decay"),
        }
        self.model = None  # we need input data shape to construct model

        if "model_architecture_json" in params:
            self.model = model_from_json(
                json.loads(params.get("model_architecture_json"))
            )
            self.compile_model()

        logger.debug("NeuralNetworkAlgorithm __init__") 
Example 21
Project: models   Author: kipoi   File: model.py    License: MIT License 5 votes vote down vote up
def __init__(self, embd_arch, embd_weights, arch, weights):
        self.embd_model = model_from_json(open(embd_arch).read())
        self.embd_model.load_weights(embd_weights)
        self.model = model_from_json(open(arch).read())
        self.model.load_weights(weights) 
Example 22
Project: keras-anomaly-detection   Author: chen0040   File: feedforward.py    License: MIT License 5 votes vote down vote up
def load_model(self, model_dir_path):
        config_file_path = FeedForwardAutoEncoder.get_config_file_path(model_dir_path)
        self.config = np.load(config_file_path).item()
        self.input_dim = self.config['input_dim']
        self.threshold = self.config['threshold']

        architecture_file_path = FeedForwardAutoEncoder.get_architecture_file_path(model_dir_path)
        self.model = model_from_json(open(architecture_file_path, 'r').read())
        weight_file_path = FeedForwardAutoEncoder.get_weight_file_path(model_dir_path)
        self.model.load_weights(weight_file_path) 
Example 23
Project: visual_turing_test-tutorial   Author: mateuszmalinowski   File: read_write.py    License: MIT License 5 votes vote down vote up
def json_to_model(path):
    """
    Loads a model from the json file.
    """
    import json
    from keras.models import model_from_json
    with open(path, 'r') as f:
        json_model = json.load(f)
    model = model_from_json(json_model)
    return model 
Example 24
Project: tartarus   Author: sergiooramas   File: predict.py    License: MIT License 5 votes vote down vote up
def load_model(model_file):
    """Loads the model from the given model file."""
    with open(model_file) as f:
        json_str = json.load(f)
    return model_from_json(json_str) 
Example 25
Project: Keras-BiGAN   Author: manicman1999   File: bigan.py    License: MIT License 5 votes vote down vote up
def __init__(self, steps = 1, lr = 0.0001, decay = 0.00001):

        #Models
        self.D = None
        self.E = None
        self.G = None

        self.GE = None
        self.EE = None

        self.DM = None
        self.AM = None

        #Config
        self.LR = lr
        self.steps = steps
        self.beta = 0.999

        #Init Models
        self.discriminator()
        self.generator()
        self.encoder()

        self.EE = model_from_json(self.E.to_json())
        self.EE.set_weights(self.E.get_weights())

        self.GE = model_from_json(self.G.to_json())
        self.GE.set_weights(self.G.get_weights()) 
Example 26
Project: Keras-BiGAN   Author: manicman1999   File: bigan.py    License: MIT License 5 votes vote down vote up
def loadModel(self, name, num):

        file = open("Models/"+name+".json", 'r')
        json = file.read()
        file.close()

        mod = model_from_json(json)
        mod.load_weights("Models/"+name+"_"+str(num)+".h5")

        return mod 
Example 27
Project: Keras-BiGAN   Author: manicman1999   File: guess.py    License: MIT License 5 votes vote down vote up
def loadModel(name, num):

    file = open("Models/"+name+".json", 'r')
    json = file.read()
    file.close()

    mod = model_from_json(json)
    mod.load_weights("Models/"+name+"_"+str(num)+".h5")

    return mod 
Example 28
Project: blackbox-attacks   Author: sunblaze-ucb   File: mnist.py    License: MIT License 5 votes vote down vote up
def load_model(model_path, type=1):

    try:
        with open(model_path+'.json', 'r') as f:
            json_string = f.read()
            model = model_from_json(json_string)
    except IOError:
        model = model_mnist(type=type)

    model.load_weights(model_path)
    return model 
Example 29
Project: Document-Classifier-LSTM   Author: AlexGidiotis   File: hatt_classifier.py    License: MIT License 5 votes vote down vote up
def load_model(stamp,
	multilabel=True):
	"""
	"""

	json_file = open(stamp+'.json', 'r')
	loaded_model_json = json_file.read()
	json_file.close()
	model = model_from_json(loaded_model_json)

	model.load_weights(stamp+'.h5')
	print("Loaded model from disk")

	model.summary()


	adam = Adam(lr=0.001)
	if multilabel:
		model.compile(loss='binary_crossentropy',
			optimizer=adam,
			metrics=[f1_score])
	else:
		model.compile(loss='categorical_crossentropy',
			optimizer=adam,
			metrics=['accuracy'])

	return model 
Example 30
Project: AIX360   Author: IBM   File: test_CEM.py    License: Apache License 2.0 5 votes vote down vote up
def load_model(self, model_json_file, model_wt_file):

        # read model json file
        with open(model_json_file, 'r') as f:
            model = model_from_json(f.read())

        # read model weights file
        model.load_weights(model_wt_file)

        return model