Python keras.callbacks.ReduceLROnPlateau() Examples

The following are 30 code examples of keras.callbacks.ReduceLROnPlateau(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module keras.callbacks , or try the search function .
Example #1
Source File: train.py    From keras-ctpn with Apache License 2.0 6 votes vote down vote up
def get_call_back():
    """
    定义call back
    :return:
    """
    checkpoint = ModelCheckpoint(filepath='/tmp/ctpn.{epoch:03d}.h5',
                                 monitor='val_loss',
                                 verbose=1,
                                 save_best_only=False,
                                 save_weights_only=True,
                                 period=5)

    # 验证误差没有提升
    lr_reducer = ReduceLROnPlateau(monitor='loss',
                                   factor=0.1,
                                   cooldown=0,
                                   patience=10,
                                   min_lr=1e-4)
    log = TensorBoard(log_dir='log')
    return [lr_reducer, checkpoint, log] 
Example #2
Source File: chatbot.py    From Intelligent-Projects-Using-Python with MIT License 6 votes vote down vote up
def train_model(self,model,X_train,X_test,y_train,y_test):
        input_y_train = self.include_start_token(y_train)
        print(input_y_train.shape)
        input_y_test = self.include_start_token(y_test)
        print(input_y_test.shape)
        early = EarlyStopping(monitor='val_loss',patience=10,mode='auto')

        checkpoint = ModelCheckpoint(self.outpath + 's2s_model_' + str(self.version) + '_.h5',monitor='val_loss',verbose=1,save_best_only=True,mode='auto')
        lr_reduce = ReduceLROnPlateau(monitor='val_loss',factor=0.5, patience=2, verbose=0, mode='auto')
        model.fit([X_train,input_y_train],y_train, 
		      epochs=self.epochs,
		      batch_size=self.batch_size, 
		      validation_data=[[X_test,input_y_test],y_test], 
		      callbacks=[early,checkpoint,lr_reduce], 
		      shuffle=True)
        return model 
Example #3
Source File: callbacks.py    From keras-bert-ner with MIT License 6 votes vote down vote up
def NerCallbacks(id_to_tag, best_fit_params=None, mask_tag=None, log_path=None):
    """模型训练过程中的回调函数
    """
    callbacks = [Accuracy(id_to_tag, mask_tag, log_path)]
    if best_fit_params is not None:
        early_stopping = EarlyStopping(
            monitor="val_crf_accuracy",
            patience=best_fit_params.get("early_stop_patience"))
        reduce_lr_on_plateau = ReduceLROnPlateau(
            monitor="val_crf_accuracy", verbose=1, mode="max",
            factor=best_fit_params.get("reduce_lr_factor"),
            patience=best_fit_params.get("reduce_lr_patience"))
        model_check_point = ModelCheckpoint(
            best_fit_params.get("save_path"),
            monitor="val_crf_accuracy", verbose=2, mode="max", save_best_only=True)
        callbacks.extend([early_stopping, reduce_lr_on_plateau, model_check_point])
    return callbacks 
Example #4
Source File: __init__.py    From ImageAI with MIT License 6 votes vote down vote up
def _create_callbacks(self, saved_weights_name, model_to_save):

        checkpoint = CustomModelCheckpoint(
            model_to_save=model_to_save,
            filepath=saved_weights_name + 'ex-{epoch:03d}--loss-{loss:08.3f}.h5',
            monitor='loss',
            verbose=0,
            save_best_only=True,
            mode='min',
            period=1
        )
        reduce_on_plateau = ReduceLROnPlateau(
            monitor='loss',
            factor=0.1,
            patience=2,
            verbose=0,
            mode='min',
            epsilon=0.01,
            cooldown=0,
            min_lr=0
        )
        tensor_board = TensorBoard(
            log_dir=self.__logs_directory
        )
        return [checkpoint, reduce_on_plateau, tensor_board] 
Example #5
Source File: vae.py    From KATE with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def fit(self, train_X, val_X, nb_epoch=50, batch_size=100):
        print 'Training variational autoencoder'
        optimizer = Adadelta(lr=2.)
        self.vae.compile(optimizer=optimizer, loss=self.vae_loss)

        self.vae.fit(train_X[0], train_X[1],
                shuffle=True,
                epochs=nb_epoch,
                batch_size=batch_size,
                validation_data=(val_X[0], val_X[1]),
                callbacks=[ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.01),
                            EarlyStopping(monitor='val_loss', min_delta=1e-5, patience=5, verbose=1, mode='auto'),
                            CustomModelCheckpoint(self.encoder, self.save_model, monitor='val_loss', save_best_only=True, mode='auto')
                        ]
                )

        return self 
Example #6
Source File: test_callbacks.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def test_ReduceLROnPlateau_patience():
    class DummyOptimizer(object):
        def __init__(self):
            self.lr = K.variable(1.0)

    class DummyModel(object):
        def __init__(self):
            self.optimizer = DummyOptimizer()

    reduce_on_plateau = callbacks.ReduceLROnPlateau(monitor='val_loss',
                                                    patience=2)
    reduce_on_plateau.model = DummyModel()

    losses = [0.0860, 0.1096, 0.1040]
    lrs = []

    for epoch in range(len(losses)):
        reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
        lrs.append(K.get_value(reduce_on_plateau.model.optimizer.lr))

    # The learning rates should be 1.0 except the last one
    assert all([lr == 1.0 for lr in lrs[:-1]]) and lrs[-1] < 1.0 
Example #7
Source File: test_callbacks.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def test_ReduceLROnPlateau_patience():
    class DummyOptimizer(object):
        def __init__(self):
            self.lr = K.variable(1.0)

    class DummyModel(object):
        def __init__(self):
            self.optimizer = DummyOptimizer()

    reduce_on_plateau = callbacks.ReduceLROnPlateau(monitor='val_loss',
                                                    patience=2)
    reduce_on_plateau.model = DummyModel()

    losses = [0.0860, 0.1096, 0.1040]
    lrs = []

    for epoch in range(len(losses)):
        reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
        lrs.append(K.get_value(reduce_on_plateau.model.optimizer.lr))

    # The learning rates should be 1.0 except the last one
    assert all([lr == 1.0 for lr in lrs[:-1]]) and lrs[-1] < 1.0 
Example #8
Source File: test_callbacks.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def test_ReduceLROnPlateau_patience():
    class DummyOptimizer(object):
        def __init__(self):
            self.lr = K.variable(1.0)

    class DummyModel(object):
        def __init__(self):
            self.optimizer = DummyOptimizer()

    reduce_on_plateau = callbacks.ReduceLROnPlateau(monitor='val_loss',
                                                    patience=2)
    reduce_on_plateau.model = DummyModel()

    losses = [0.0860, 0.1096, 0.1040]
    lrs = []

    for epoch in range(len(losses)):
        reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
        lrs.append(K.get_value(reduce_on_plateau.model.optimizer.lr))

    # The learning rates should be 1.0 except the last one
    assert all([lr == 1.0 for lr in lrs[:-1]]) and lrs[-1] < 1.0 
Example #9
Source File: test_callbacks.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def test_ReduceLROnPlateau_patience():
    class DummyOptimizer(object):
        def __init__(self):
            self.lr = K.variable(1.0)

    class DummyModel(object):
        def __init__(self):
            self.optimizer = DummyOptimizer()

    reduce_on_plateau = callbacks.ReduceLROnPlateau(monitor='val_loss',
                                                    patience=2)
    reduce_on_plateau.model = DummyModel()

    losses = [0.0860, 0.1096, 0.1040]
    lrs = []

    for epoch in range(len(losses)):
        reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
        lrs.append(K.get_value(reduce_on_plateau.model.optimizer.lr))

    # The learning rates should be 1.0 except the last one
    assert all([lr == 1.0 for lr in lrs[:-1]]) and lrs[-1] < 1.0 
Example #10
Source File: test_callbacks.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def test_ReduceLROnPlateau_patience():
    class DummyOptimizer(object):
        def __init__(self):
            self.lr = K.variable(1.0)

    class DummyModel(object):
        def __init__(self):
            self.optimizer = DummyOptimizer()

    reduce_on_plateau = callbacks.ReduceLROnPlateau(monitor='val_loss',
                                                    patience=2)
    reduce_on_plateau.model = DummyModel()

    losses = [0.0860, 0.1096, 0.1040]
    lrs = []

    for epoch in range(len(losses)):
        reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
        lrs.append(K.get_value(reduce_on_plateau.model.optimizer.lr))

    # The learning rates should be 1.0 except the last one
    assert all([lr == 1.0 for lr in lrs[:-1]]) and lrs[-1] < 1.0 
Example #11
Source File: mlearn.py    From easy12306 with Artistic License 2.0 6 votes vote down vote up
def main():
    (train_x, train_y), (test_x, test_y) = load_data()
    model = models.Sequential([
        layers.Conv2D(64, (3, 3), padding='same', activation='relu', input_shape=(None, None, 1)),
        layers.MaxPooling2D(),  # 19 -> 9
        layers.Conv2D(64, (3, 3), padding='same', activation='relu'),
        layers.MaxPooling2D(),  # 9 -> 4
        layers.Conv2D(64, (3, 3), padding='same', activation='relu'),
        layers.MaxPooling2D(),  # 4 -> 2
        layers.GlobalAveragePooling2D(),
        layers.Dropout(0.25),
        layers.Dense(64, activation='relu'),
        layers.Dense(80, activation='softmax'),
    ])
    model.summary()
    model.compile(optimizer='rmsprop',
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    # 当标准评估停止提升时,降低学习速率
    reduce_lr = ReduceLROnPlateau(verbose=1)
    history = model.fit(train_x, train_y, epochs=100,
                        validation_data=(test_x, test_y),
                        callbacks=[reduce_lr])
    savefig(history, start=10)
    model.save('model.v1.0.h5', include_optimizer=False) 
Example #12
Source File: test_lr_plateau.py    From hyperas with MIT License 6 votes vote down vote up
def create_model(x_train, y_train, x_test, y_test):
    model = Sequential()
    model.add(Dense(44, input_shape=(784,)))
    model.add(Activation({{choice(['relu', 'sigmoid'])}}))
    model.add(Dense(44))
    model.add(Activation({{choice(['relu', 'sigmoid'])}}))
    model.add(Dense(10))

    model.compile(loss='mae', metrics=['mse'], optimizer="adam")

    es = EarlyStopping(monitor='val_loss', min_delta=1e-5, patience=10)
    rlr = ReduceLROnPlateau(factor=0.1, patience=10)
    _ = model.fit(x_train, y_train, epochs=1, verbose=0, callbacks=[es, rlr],
                  batch_size=24, validation_data=(x_test, y_test))

    mae, mse = model.evaluate(x_test, y_test, verbose=0)
    print('MAE:', mae)
    return {'loss': mae, 'status': STATUS_OK, 'model': model} 
Example #13
Source File: training.py    From 3D-CNNs-for-Liver-Classification with Apache License 2.0 6 votes vote down vote up
def get_callbacks(model_file, initial_learning_rate=0.0001, learning_rate_drop=0.5, learning_rate_epochs=None,
                  learning_rate_patience=50, logging_file="training.log", verbosity=1,
                  early_stopping_patience=None):
    callbacks = list()
    callbacks.append(ModelCheckpoint(model_file,monitor='val_acc', save_best_only=True,verbose=verbosity, save_weights_only=True))
    # callbacks.append(ModelCheckpoint(model_file, save_best_only=True, save_weights_only=True))
    callbacks.append(CSVLogger(logging_file, append=True))
    if learning_rate_epochs:
        callbacks.append(LearningRateScheduler(partial(step_decay, initial_lrate=initial_learning_rate,
                                                       drop=learning_rate_drop, epochs_drop=learning_rate_epochs)))
    else:
        callbacks.append(ReduceLROnPlateau(factor=learning_rate_drop, patience=learning_rate_patience,
                                           verbose=verbosity))
    if early_stopping_patience:
        callbacks.append(EarlyStopping(verbose=verbosity, patience=early_stopping_patience))
    return callbacks 
Example #14
Source File: neuralnets.py    From EmoPy with GNU Affero General Public License v3.0 6 votes vote down vote up
def fit(self, features, labels, validation_split, epochs=50):
        """
        Trains the neural net on the data provided.

        :param features: Numpy array of training data.
        :param labels: Numpy array of target (label) data.
        :param validation_split: Float between 0 and 1. Percentage of training data to use for validation
        :param epochs: Max number of times to train over dataset.
        """
        self.model.fit(x=features, y=labels, epochs=epochs, verbose=1,
                       callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
                       shuffle=True)

        for layer in self.model.layers[:self._NUM_BOTTOM_LAYERS_TO_RETRAIN]:
            layer.trainable = False
        for layer in self.model.layers[self._NUM_BOTTOM_LAYERS_TO_RETRAIN:]:
            layer.trainable = True

        self.model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])
        self.model.fit(x=features, y=labels, epochs=50, verbose=1,
                       callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
                       shuffle=True) 
Example #15
Source File: core.py    From enet-keras with MIT License 6 votes vote down vote up
def callbacks(self):
        """
        :return:
        """
        # TODO: Add ReduceLROnPlateau callback
        cbs = []

        tb = TensorBoard(log_dir=self.log_dir,
                         write_graph=True,
                         write_images=True)
        cbs.append(tb)

        best_model_filename = self.model_name + '_best.h5'
        best_model = os.path.join(self.checkpoint_dir, best_model_filename)
        save_best = ModelCheckpoint(best_model, save_best_only=True)
        cbs.append(save_best)

        checkpoints = ModelCheckpoint(filepath=self.checkpoint_file, verbose=1)
        cbs.append(checkpoints)

        reduce_lr = ReduceLROnPlateau(patience=1, verbose=1)
        cbs.append(reduce_lr)
        return cbs 
Example #16
Source File: training.py    From Keras-Brats-Improved-Unet3d with MIT License 5 votes vote down vote up
def get_callbacks(model_file, initial_learning_rate=0.0001, learning_rate_drop=0.5, learning_rate_epochs=None,
                  learning_rate_patience=50, logging_file="training.log", verbosity=1,
                  early_stopping_patience=None):
    callbacks = list()
    callbacks.append(ModelCheckpoint(model_file, save_best_only=True))
    callbacks.append(CSVLogger(logging_file, append=True))
    if learning_rate_epochs:
        callbacks.append(LearningRateScheduler(partial(step_decay, initial_lrate=initial_learning_rate,
                                                       drop=learning_rate_drop, epochs_drop=learning_rate_epochs)))
    else:
        callbacks.append(ReduceLROnPlateau(factor=learning_rate_drop, patience=learning_rate_patience,
                                           verbose=verbosity))
    if early_stopping_patience:
        callbacks.append(EarlyStopping(verbose=verbosity, patience=early_stopping_patience))
    return callbacks 
Example #17
Source File: train.py    From keras-molecules with MIT License 5 votes vote down vote up
def main():
    args = get_arguments()
    np.random.seed(args.random_seed)

    from molecules.model import MoleculeVAE
    from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, \
        decode_smiles_from_indexes, load_dataset
    from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau
    
    data_train, data_test, charset = load_dataset(args.data)
    model = MoleculeVAE()
    if os.path.isfile(args.model):
        model.load(charset, args.model, latent_rep_size = args.latent_dim)
    else:
        model.create(charset, latent_rep_size = args.latent_dim)

    checkpointer = ModelCheckpoint(filepath = args.model,
                                   verbose = 1,
                                   save_best_only = True)

    reduce_lr = ReduceLROnPlateau(monitor = 'val_loss',
                                  factor = 0.2,
                                  patience = 3,
                                  min_lr = 0.0001)

    model.autoencoder.fit(
        data_train,
        data_train,
        shuffle = True,
        nb_epoch = args.epochs,
        batch_size = args.batch_size,
        callbacks = [checkpointer, reduce_lr],
        validation_data = (data_test, data_test)
    ) 
Example #18
Source File: training.py    From 3DUnetCNN with MIT License 5 votes vote down vote up
def get_callbacks(model_file, initial_learning_rate=0.0001, learning_rate_drop=0.5, learning_rate_epochs=None,
                  learning_rate_patience=50, logging_file="training.log", verbosity=1,
                  early_stopping_patience=None):
    callbacks = list()
    callbacks.append(ModelCheckpoint(model_file, save_best_only=True))
    callbacks.append(CSVLogger(logging_file, append=True))
    if learning_rate_epochs:
        callbacks.append(LearningRateScheduler(partial(step_decay, initial_lrate=initial_learning_rate,
                                                       drop=learning_rate_drop, epochs_drop=learning_rate_epochs)))
    else:
        callbacks.append(ReduceLROnPlateau(factor=learning_rate_drop, patience=learning_rate_patience,
                                           verbose=verbosity))
    if early_stopping_patience:
        callbacks.append(EarlyStopping(verbose=verbosity, patience=early_stopping_patience))
    return callbacks 
Example #19
Source File: train.py    From MSCNN with MIT License 5 votes vote down vote up
def train(batch, epochs, size):
    """Train the model.

    Arguments:
        batch: Integer, The number of train samples per batch.
        epochs: Integer, The number of train iterations.
        size: Integer, image size.
    """
    if not os.path.exists('model'):
        os.makedirs('model')

    model = MSCNN((size, size, 3))

    opt = SGD(lr=1e-5, momentum=0.9, decay=0.0005)
    model.compile(optimizer=opt, loss='mse')

    lr = ReduceLROnPlateau(monitor='loss', min_lr=1e-7)

    indices = list(range(1500))
    train, test = train_test_split(indices, test_size=0.25)

    hist = model.fit_generator(
        generator(train, batch, size),
        validation_data=generator(test, batch, size),
        steps_per_epoch=len(train) // batch,
        validation_steps=len(test) // batch,
        epochs=epochs,
        callbacks=[lr])

    model.save_weights('model\\final_weights.h5')

    df = pd.DataFrame.from_dict(hist.history)
    df.to_csv('model\\history.csv', index=False, encoding='utf-8') 
Example #20
Source File: callbacks.py    From PSPNet-Keras-tensorflow with MIT License 5 votes vote down vote up
def callbacks(logdir):
  model_checkpoint = ModelCheckpoint("weights_train/weights.{epoch:02d}-{loss:.2f}.h5", monitor='loss', verbose=1, period=10) 
  tensorboard_callback = TensorBoard(log_dir=logdir, write_graph=True, write_images=True, histogram_freq=1)
  plateau_callback = ReduceLROnPlateau(monitor='loss', factor=0.99, verbose=1, patience=0, min_lr=0.00001) 
  #return [CheckPoints(), tensorboard_callback, LrReducer()]
  return [model_checkpoint, tensorboard_callback, plateau_callback, LrReducer()] 
Example #21
Source File: test_keras_helper.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def test_keras_callback_to_key():
    expected_key = "ReduceLROnPlateau(cooldown=0, factor=0.1, kwargs=None, min_delta=0.0001, min_lr=0, mode='auto', monitor='val_loss', patience=32)"
    assert keras_callback_to_key(callbacks.ReduceLROnPlateau(patience=32)) == expected_key 
Example #22
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def opt_regressor():
    optimizer = DummyOptPro(iterations=1)
    optimizer.forge_experiment(
        model_initializer=KerasRegressor,
        model_init_params=_build_fn_regressor,
        model_extra_params=dict(
            callbacks=[ReduceLROnPlateau(patience=Integer(5, 10))],
            batch_size=Categorical([32, 64], transform="onehot"),
            epochs=10,
            verbose=0,
        ),
    )
    optimizer.go() 
Example #23
Source File: optimization_example.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _execute():
    #################### Environment ####################
    env = Environment(
        train_dataset=get_breast_cancer_data(target="target"),
        results_path="HyperparameterHunterAssets",
        metrics=["roc_auc_score"],
        cv_type="StratifiedKFold",
        cv_params=dict(n_splits=5, shuffle=True, random_state=32),
    )

    #################### Experimentation ####################
    experiment = CVExperiment(
        model_initializer=KerasClassifier,
        model_init_params=dict(build_fn=_build_fn_experiment),
        model_extra_params=dict(
            callbacks=[ReduceLROnPlateau(patience=5)], batch_size=32, epochs=10, verbose=0
        ),
    )

    #################### Optimization ####################
    optimizer = BayesianOptPro(iterations=10)
    optimizer.forge_experiment(
        model_initializer=KerasClassifier,
        model_init_params=dict(build_fn=_build_fn_optimization),
        model_extra_params=dict(
            callbacks=[ReduceLROnPlateau(patience=Integer(5, 10))],
            batch_size=Categorical([32, 64], transform="onehot"),
            epochs=10,
            verbose=0,
        ),
    )
    optimizer.go() 
Example #24
Source File: care_standard.py    From CSBDeep with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def prepare_for_training(self, optimizer=None, **kwargs):
        """Prepare for neural network training.

        Calls :func:`csbdeep.internals.train.prepare_model` and creates
        `Keras Callbacks <https://keras.io/callbacks/>`_ to be used for training.

        Note that this method will be implicitly called once by :func:`train`
        (with default arguments) if not done so explicitly beforehand.

        Parameters
        ----------
        optimizer : obj or None
            Instance of a `Keras Optimizer <https://keras.io/optimizers/>`_ to be used for training.
            If ``None`` (default), uses ``Adam`` with the learning rate specified in ``config``.
        kwargs : dict
            Additional arguments for :func:`csbdeep.internals.train.prepare_model`.

        """
        if optimizer is None:
            from keras.optimizers import Adam
            optimizer = Adam(lr=self.config.train_learning_rate)
        self.callbacks = train.prepare_model(self.keras_model, optimizer, self.config.train_loss, **kwargs)

        if self.basedir is not None:
            self.callbacks += self._checkpoint_callbacks()

            if self.config.train_tensorboard:
                from ..utils.tf import CARETensorBoard
                self.callbacks.append(CARETensorBoard(log_dir=str(self.logdir), prefix_with_timestamp=False, n_images=3, write_images=True, prob_out=self.config.probabilistic))

        if self.config.train_reduce_lr is not None:
            from keras.callbacks import ReduceLROnPlateau
            rlrop_params = self.config.train_reduce_lr
            if 'verbose' not in rlrop_params:
                rlrop_params['verbose'] = True
            self.callbacks.append(ReduceLROnPlateau(**rlrop_params))

        self._model_prepared = True 
Example #25
Source File: train.py    From keras-yolo3 with MIT License 5 votes vote down vote up
def create_callbacks(saved_weights_name, tensorboard_logs, model_to_save):
    makedirs(tensorboard_logs)
    
    early_stop = EarlyStopping(
        monitor     = 'loss', 
        min_delta   = 0.01, 
        patience    = 7, 
        mode        = 'min', 
        verbose     = 1
    )
    checkpoint = CustomModelCheckpoint(
        model_to_save   = model_to_save,
        filepath        = saved_weights_name,# + '{epoch:02d}.h5', 
        monitor         = 'loss', 
        verbose         = 1, 
        save_best_only  = True, 
        mode            = 'min', 
        period          = 1
    )
    reduce_on_plateau = ReduceLROnPlateau(
        monitor  = 'loss',
        factor   = 0.1,
        patience = 2,
        verbose  = 1,
        mode     = 'min',
        epsilon  = 0.01,
        cooldown = 0,
        min_lr   = 0
    )
    tensorboard = CustomTensorBoard(
        log_dir                = tensorboard_logs,
        write_graph            = True,
        write_images           = True,
    )    
    return [early_stop, checkpoint, reduce_on_plateau, tensorboard] 
Example #26
Source File: mlearn_for_image.py    From easy12306 with Artistic License 2.0 5 votes vote down vote up
def learn():
    (train_x, train_y, sample_weight), (test_x, test_y) = load_data()
    datagen = ImageDataGenerator(horizontal_flip=True,
                                 vertical_flip=True)
    train_generator = datagen.flow(train_x, train_y, sample_weight=sample_weight)
    base = VGG16(weights='imagenet', include_top=False, input_shape=(None, None, 3))
    for layer in base.layers[:-4]:
        layer.trainable = False
    model = models.Sequential([
        base,
        layers.BatchNormalization(),
        layers.Conv2D(64, (3, 3), activation='relu', padding='same'),
        layers.GlobalAveragePooling2D(),
        layers.BatchNormalization(),
        layers.Dense(64, activation='relu'),
        layers.BatchNormalization(),
        layers.Dropout(0.20),
        layers.Dense(80, activation='softmax')
    ])
    model.compile(optimizer=optimizers.RMSprop(lr=1e-5),
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    model.summary()
    reduce_lr = ReduceLROnPlateau(verbose=1)
    model.fit_generator(train_generator, epochs=400,
                        steps_per_epoch=100,
                        validation_data=(test_x[:800], test_y[:800]),
                        callbacks=[reduce_lr])
    result = model.evaluate(test_x, test_y)
    print(result)
    model.save('12306.image.model.h5', include_optimizer=False) 
Example #27
Source File: mlearn.py    From easy12306 with Artistic License 2.0 5 votes vote down vote up
def main_v20():
    (train_x, train_y), (test_x, test_y) = load_data()
    model = models.Sequential([
        layers.Conv2D(64, (3, 3), activation='relu', padding='same', input_shape=(None, None, 1)),
        layers.MaxPooling2D(),  # 19 -> 9
        layers.Conv2D(64, (3, 3), activation='relu', padding='same'),
        layers.MaxPooling2D(),  # 9 -> 4
        layers.Conv2D(64, (3, 3), activation='relu', padding='same'),
        layers.MaxPooling2D(),  # 4 -> 2
        layers.Conv2D(64, (3, 3), activation='relu', padding='same'),
        layers.Conv2D(64, (3, 3), activation='relu', padding='same'),
        layers.MaxPooling2D(),  # 2 -> 1
        layers.GlobalAveragePooling2D(),
        layers.Dropout(0.25),
        layers.Dense(64, activation='relu'),
        layers.Dense(80, activation='softmax'),
    ])
    model.summary()
    model.compile(optimizer='rmsprop',
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    model.fit(train_x, train_y, epochs=10,
              validation_data=(test_x, test_y))
    (train_x, train_y), (test_x, test_y) = load_data_v2()
    model.compile(optimizer='rmsprop',
                  loss='categorical_hinge',
                  metrics=[acc])
    reduce_lr = ReduceLROnPlateau(verbose=1)
    history = model.fit(train_x, train_y, epochs=100,
                        validation_data=(test_x, test_y),
                        callbacks=[reduce_lr])
    savefig(history)
    # 保存,并扔掉优化器
    model.save('model.v2.0.h5', include_optimizer=False) 
Example #28
Source File: mlearn.py    From easy12306 with Artistic License 2.0 5 votes vote down vote up
def main_v19():     # 1.9
    (train_x, train_y), (test_x, test_y) = load_data_v2()
    model = models.load_model('model.v1.0.h5')
    model.compile(optimizer='RMSprop',
                  loss='categorical_hinge',
                  metrics=[acc])
    reduce_lr = ReduceLROnPlateau(verbose=1)
    history = model.fit(train_x, train_y, epochs=100,
                        validation_data=(test_x, test_y),
                        callbacks=[reduce_lr])
    savefig(history)
    model.save('model.v1.9.h5', include_optimizer=False) 
Example #29
Source File: hccrn.py    From hwr-address with GNU General Public License v3.0 5 votes vote down vote up
def model_train(model, dataset, batch_size, weights_path, history_path, nb_epoch=200, samples_per_epoch=1000000):
	checkpointer = ModelCheckpoint(filepath=weights_path, verbose=1, save_best_only=False, save_weights_only=True)
	#lrate = LearningRateScheduler(step_decay)
	lrate = ReduceLROnPlateau(monitor='loss', factor=0.3, patience=5, verbose=0, mode='auto', epsilon=0.0001, cooldown=0, min_lr=0.00001)
	#early_stop = EarlyStopping(monitor='loss', patience=5, verbose=0, mode='auto')
	history = model.fit_generator(input_data.generate_data(dataset, batch_size), samples_per_epoch, nb_epoch, callbacks=[checkpointer, lrate])
	with open(history_path, 'w') as f:
		f.write(str(history.history))
	return model 
Example #30
Source File: liver_trainer.py    From MCF-3D-CNN with MIT License 5 votes vote down vote up
def init_callbacks(self):
        # self.callbacks.append(
            # ModelCheckpoint(
                # filepath=os.path.join(self.config.checkpoint_dir, '%s-{epoch:02d}-{val_loss:.2f}.hdf5' % self.config.exp_name),
                # monitor=self.config.checkpoint_monitor,
                # mode=self.config.checkpoint_mode,
                # save_best_only=self.config.checkpoint_save_best_only,
                # save_weights_only=self.config.checkpoint_save_weights_only,
                # verbose=self.config.checkpoint_verbose,
            # )
        # )

        # self.callbacks.append(
                # TensorBoard(
                    # log_dir=self.config.tensorboard_log_dir,
                    # write_graph=self.config.tensorboard_write_graph,
                # )
            # )
        #学习率衰减
        reduce_lr = callbacks.ReduceLROnPlateau(monitor='val_loss', factor=1/math.e,
                                                verbose=1, patience=self.patience, min_lr=self.min_lr)
        self.callbacks.append(reduce_lr)  
        # if hasattr(self.config,"comet_api_key"):
            # from comet_ml import Experiment
            # experiment = Experiment(api_key=self.config.comet_api_key, project_name=self.config.exp_name)
            # experiment.disable_mp()
            # experiment.log_multiple_params(self.config)
            # self.callbacks.append(experiment.get_keras_callback())