Python keras.metrics() Examples

The following are 9 code examples for showing how to use keras.metrics(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module keras , or try the search function .

Example 1
Project: urgent-care-comparative   Author: illidanlab   File: deep_models.py    License: GNU General Public License v3.0 6 votes vote down vote up
def lstm_model(input_shape, hidden = 256, targets = 1, learn_rate = 1e-4, multiclass=False):
    model = Sequential()
    model.add(Bidirectional(LSTM(hidden), merge_mode = 'concat'))
    model.add(Activation('tanh'))
    model.add(Dropout(0.5))

    if (targets > 1) and not multiclass:
        model.add(Bidirectional(LSTM(hidden), merge_mode = 'concat'))
        model.add(Activation('tanh'))
        model.add(Dropout(0.5))
    
    model.add(Dense(targets))
    if multiclass:
        model.add(Activation('softmax'))
        model.compile(loss='categorical_crossentropy', 
                  optimizer=Adam(lr=learn_rate, beta_1 =.5 ), metrics=['categorical_accuracy'])
    else:
        model.add(Activation ('sigmoid'))
        model.compile(loss='binary_crossentropy', 
                  optimizer=Adam(lr=learn_rate, beta_1 =.5 ), metrics=['accuracy'])
    return (model) 
Example 2
Project: urgent-care-comparative   Author: illidanlab   File: deep_models.py    License: GNU General Public License v3.0 6 votes vote down vote up
def cnn_model(input_shape, hidden = 256, targets = 1, learn_rate = 1e-4):
    model = Sequential()
    model.add(Convolution1D(input_shape = input_shape, nb_filter = 64, filter_length = 3, border_mode = 'same', activation = 'relu'))
    model.add(MaxPooling1D(pool_length = 3))
    model.add(Bidirectional(LSTM(hidden), merge_mode = 'concat'))
    model.add(Activation('tanh'))
    model.add(Dropout(0.5))
    model.add(Dense(targets))
    if multiclass:
        model.add(Activation('softmax'))
        model.compile(loss='categorical_crossentropy', 
                  optimizer=Adam(lr=learn_rate, beta_1 =.5 ), metrics=['categorical_accuracy'])
    else:
        model.add(Activation ('sigmoid'))
        model.compile(loss='binary_crossentropy', 
                  optimizer=Adam(lr=learn_rate, beta_1 =.5 ), metrics=['accuracy'])
    return (model) 
Example 3
Project: urgent-care-comparative   Author: illidanlab   File: deep_models.py    License: GNU General Public License v3.0 6 votes vote down vote up
def mlp_model(input_shape, hidden =512, targets = 1, multiclass = False, learn_rate = 1e-4):
    model = Sequential()
    model.add(Dense(hidden, activation = 'relu', input_shape = input_shape))
    model.add(Dropout(.5))
    model.add(Dense(hidden, activation = 'relu'))
    model.add(Dropout(.5))
    model.add(Dense(hidden, activation = 'relu'))
    model.add(Dropout(.5))
    model.add(Dense(targets))
    if multiclass:
        model.add(Activation('softmax'))
        model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=learn_rate, beta_1 =.5 ), 
                      metrics=['categorical_accuracy'])
    else:
        model.add(Activation('sigmoid'))
        model.compile(loss='binary_crossentropy', optimizer=Adam(lr=learn_rate, beta_1 =.5 ), metrics=['accuracy'])
    return (model) 
Example 4
Project: urgent-care-comparative   Author: illidanlab   File: deep_models.py    License: GNU General Public License v3.0 6 votes vote down vote up
def hierarchical_cnn (input_shape, aux_shape, targets = 1, hidden = 256, multiclass = False, learn_rate=1e-4):
    x = Input(shape = input_shape, name = 'x')
    xx = Convolution1D(nb_filter = 64, filter_length = 3, border_mode = 'same', activation = 'relu') (x)
    xx = MaxPooling1D(pool_length = 3) (xx)
    
    xx = Bidirectional(LSTM (256, activation = 'relu'), merge_mode = 'concat') (xx)
    xx = Dropout(0.5)(xx)
    
    dx = Input(shape = aux_shape, name = 'aux')

    xx = concatenate([xx, dx])
    if multiclass:
        y = Dense(targets, activation = 'softmax') (xx)
        model = Model(inputs = [x, dx], outputs = [y])
        model.compile (loss = 'categorical_crossentropy', optimizer = Adam(lr = learn_rate), metrics = ['categorical_accuracy'])
    else:
        y = Dense(targets, activation = 'sigmoid') (xx)
        model = Model(inputs = [x, dx], outputs = [y])
        model.compile (loss = 'binary_crossentropy', optimizer = Adam(lr = learn_rate), metrics = ['accuracy'])
    return (model) 
Example 5
Project: urgent-care-comparative   Author: illidanlab   File: deep_models.py    License: GNU General Public License v3.0 6 votes vote down vote up
def hierarchical_lstm (input_shape, aux_shape, targets = 1, hidden = 256, multiclass = False, learn_rate = 1e-4):
    x = Input(shape = input_shape, name = 'x')    
    xx = Bidirectional(LSTM (hidden, activation = 'relu'), merge_mode = 'concat') (x)
    xx = Dropout(0.5)(xx)
    
    dx = Input(shape = aux_shape, name = 'aux')

    xx = concatenate([xx, dx])
    xx = Dense(512, activation = 'relu') (xx)
    if multiclass:
        y = Dense(targets, activation = 'softmax') (xx)
        model = Model(inputs = [x, dx], outputs = [y])
        model.compile (loss = 'categorical_crossentropy', optimizer = Adam(lr = learn_rate), metrics = ['categorical_accuracy'])
    else:
        y = Dense(targets, activation = 'sigmoid') (xx)
        model = Model(inputs = [x, dx], outputs = [y])
        model.compile (loss = 'binary_crossentropy', optimizer = Adam(lr = learn_rate), metrics = ['accuracy'])
    return (model) 
Example 6
Project: intent_classifier   Author: deepmipt   File: multiclass.py    License: Apache License 2.0 6 votes vote down vote up
def infer_on_batch(self, batch, labels=None):
        """
        Method infers the model on the given batch
        Args:
            batch - list of texts
            labels - list of labels

        Returns:
            loss and metrics values on the given batch, if labels are given
            predictions, otherwise
        """
        texts = batch
        if labels:
            features = self.texts2vec(texts)
            onehot_labels = labels2onehot(labels, classes=self.classes)
            metrics_values = self.model.test_on_batch(features, onehot_labels.reshape(-1, self.n_classes))
            return metrics_values
        else:
            features = self.texts2vec(texts)
            predictions = self.model.predict(features)
            return predictions 
Example 7
Project: dts   Author: albertogaspar   File: Seq2Seq.py    License: MIT License 5 votes vote down vote up
def evaluate(self, data, fn_inverse=None, horizon=1, fn_plot=None):
        """
        Evaluate model
        :return:
        """
        encoder_input_data, decoder_input_exog, y = data

        y_hat = self.predict(encoder_inputs=encoder_input_data,
                             pred_steps=horizon,
                             decoder_input_exog=decoder_input_exog)

        if fn_inverse is not None:
            y = fn_inverse(y)
            y_hat = fn_inverse(y_hat)

        y = np.float32(y)
        y_hat = np.float32(y_hat)

        if fn_plot is not None:
            fn_plot([y,y_hat])

        results = []
        for m in self.model.metrics:
            if isinstance(m, str):
                results.append(K.eval(K.mean(get(m)(y, y_hat))))
            else:
                results.append(K.eval(K.mean(m(y, y_hat))))
        return results 
Example 8
Project: dts   Author: albertogaspar   File: Seq2Seq.py    License: MIT License 5 votes vote down vote up
def evaluate(self, data, fn_inverse=None, fn_plot=None):
        try:
            encoder_inputs, decoder_inputs, decoder_inputs_exog, y = data
            y_hat = self.model.predict([encoder_inputs, decoder_inputs, decoder_inputs_exog])
        except:
            encoder_inputs, decoder_inputs, y = data
            y_hat = self.model.predict([encoder_inputs, decoder_inputs])


        if fn_inverse is not None:
            y = fn_inverse(y)
            y_hat = fn_inverse(y_hat)

        y = np.float32(y)
        y_hat = np.float32(y_hat)

        if fn_plot is not None:
            fn_plot([y, y_hat])

        results = []
        for m in self.model.metrics:
            if isinstance(m, str):
                results.append(K.eval(K.mean(get(m)(y, y_hat))))
            else:
                results.append(K.eval(K.mean(m(y, y_hat))))
        return results 
Example 9
Project: intent_classifier   Author: deepmipt   File: multiclass.py    License: Apache License 2.0 5 votes vote down vote up
def train_on_batch(self, batch):
        """
        Method trains the intent_model on the given batch
        Args:
            batch - list of tuples (preprocessed text, labels)

        Returns:
            loss and metrics values on the given batch
        """
        texts = list(batch[0])
        labels = list(batch[1])
        features = self.texts2vec(texts)
        onehot_labels = labels2onehot(labels, classes=self.classes)
        metrics_values = self.model.train_on_batch(features, onehot_labels)
        return metrics_values