Python matplotlib.pyplot.matshow() Examples

The following are 30 code examples of matplotlib.pyplot.matshow(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module matplotlib.pyplot , or try the search function .
Example #1
Source File: pearsons_filtering.py    From simba with GNU Lesser General Public License v3.0 7 votes vote down vote up
def pearson_filter(projectPath, featuresDf, del_corr_status, del_corr_threshold, del_corr_plot_status):
    print('Reducing features. Correlation threshold: ' + str(del_corr_threshold))
    col_corr = set()
    corr_matrix = featuresDf.corr()
    for i in range(len(corr_matrix.columns)):
        for j in range(i):
            if (corr_matrix.iloc[i, j] >= del_corr_threshold) and (corr_matrix.columns[j] not in col_corr):
                colname = corr_matrix.columns[i]
                col_corr.add(colname)
                if colname in featuresDf.columns:
                    del featuresDf[colname]
    if del_corr_plot_status == 'yes':
        print('Creating feature correlation heatmap...')
        dateTime = datetime.now().strftime('%Y%m%d%H%M%S')
        plt.matshow(featuresDf.corr())
        plt.tight_layout()
        plt.savefig(os.path.join(projectPath, 'logs', 'Feature_correlations_' + dateTime + '.png'), dpi=300)
        plt.close('all')
        print('Feature correlation heatmap .png saved in project_folder/logs directory')

    return featuresDf 
Example #2
Source File: denoising_autoencoder.py    From Deep-Learning-with-TensorFlow-Second-Edition with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWRK PARAMETERS 
Example #3
Source File: deconvolutional_autoencoder.py    From Deep-Learning-with-TensorFlow-Second-Edition with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWORK PARAMETERS 
Example #4
Source File: utils_visualization.py    From deepwriting with MIT License 6 votes vote down vote up
def plot_matrix_and_get_image(plot_data, fig_height=8, fig_width=12, axis_off=False, colormap="jet"):
    fig = plt.figure()
    fig.set_figheight(fig_height)
    fig.set_figwidth(fig_width)
    plt.matshow(plot_data, fig.number)

    if fig_height < fig_width:
        plt.colorbar(orientation="horizontal")
    else:
        plt.colorbar(orientation="vertical")

    plt.set_cmap(colormap)
    if axis_off:
        plt.axis('off')

    img = fig_to_img(fig)
    plt.close(fig)
    return img 
Example #5
Source File: plot_results.py    From nasbench-1shot1 with Apache License 2.0 6 votes vote down vote up
def plot_correlation_image(single_one_shot_training_database, epoch_idx=-1):
    correlation_matrix = np.zeros((3, 5))
    for idx_cell, num_cells in enumerate([3, 6, 9]):
        for idx_ch, num_channels in enumerate([2, 4, 8, 16, 36]):
            config = single_one_shot_training_database.query(
                {'unrolled': False, 'cutout': False, 'search_space': '3', 'epochs': 50, 'init_channels': num_channels,
                 'weight_decay': 0.0003, 'warm_start_epochs': 0, 'learning_rate': 0.025, 'layers': num_cells})
            if len(config) > 0:
                correlation = extract_correlation_per_epoch(config)
                correlation_matrix[idx_cell, idx_ch] = 1 - correlation[epoch_idx]

    plt.figure()
    plt.matshow(correlation_matrix)
    plt.xticks(np.arange(5), (2, 4, 8, 16, 36))
    plt.yticks(np.arange(3), (3, 6, 9))
    plt.colorbar()
    plt.savefig('test_correlation.png')
    plt.close()
    return correlation_matrix 
Example #6
Source File: tf_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    print(type(cwtmatr))
    print(len(cwtmatr))
    print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[100])
    
    plt.plot(cwtmatr[1200])
    plt.plot(cwtmatr[1210])
    plt.plot(cwtmatr[1300])
    plt.plot(cwtmatr[1400])
    plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #7
Source File: interactive-eval.py    From multiffn-nli with MIT License 6 votes vote down vote up
def plot_attention(tokens1, tokens2, attention):
    """
    Print a colormap showing attention values from tokens 1 to
    tokens 2.
    """
    len1 = len(tokens1)
    len2 = len(tokens2)
    extent = [0, len2, 0, len1]
    pl.matshow(attention, extent=extent, aspect='auto')
    ticks1 = np.arange(len1) + 0.5
    ticks2 = np.arange(len2) + 0.5
    pl.xticks(ticks2, tokens2, rotation=45)
    pl.yticks(ticks1, reversed(tokens1))
    ax = pl.gca()
    ax.xaxis.set_ticks_position('bottom')
    pl.colorbar()
    pl.title('Alignments')
    pl.show(block=False) 
Example #8
Source File: pursuit_evade.py    From MADRL with MIT License 6 votes vote down vote up
def render(self, plt_delay=1.0):
        plt.matshow(self.model_state[0].T, cmap=plt.get_cmap('Greys'), fignum=1)
        for i in range(self.pursuer_layer.n_agents()):
            x, y = self.pursuer_layer.get_position(i)
            plt.plot(x, y, "r*", markersize=12)
            if self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#FF9848"))
        for i in range(self.evader_layer.n_agents()):
            x, y = self.evader_layer.get_position(i)
            plt.plot(x, y, "b*", markersize=12)
            if not self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#009ACD"))
        plt.pause(plt_delay)
        plt.clf() 
Example #9
Source File: GetMLPara.py    From dr_droid with Apache License 2.0 6 votes vote down vote up
def draw_confusion_matrix(y_test, y_pred):

    from sklearn.metrics import confusion_matrix
    cm = confusion_matrix(y_test, y_pred)
    print(cm)

    # Show confusion matrix in a separate window
    plt.matshow(cm)
    plt.title('Confusion matrix')
    plt.colorbar()
    plt.ylabel('True label')
    plt.xlabel('Predicted label')
    plt.show()


####################10 CV FALSE POSITIVE FLASE NEGATIVe################################################# 
Example #10
Source File: deconvolutional_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWORK PARAMETERS 
Example #11
Source File: denoising_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWRK PARAMETERS 
Example #12
Source File: deconvolutional_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg   = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWORK PARAMETERS 
Example #13
Source File: denoising_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg   = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWRK PARAMETERS 
Example #14
Source File: visualization_utils.py    From ludwig with Apache License 2.0 5 votes vote down vote up
def confusion_matrix_plot(
        confusion_matrix,
        labels=None,
        output_feature_name=None,
        filename=None
):
    mpl.rcParams.update({'figure.autolayout': True})
    fig, ax = plt.subplots()

    ax.invert_yaxis()
    ax.xaxis.tick_top()
    ax.xaxis.set_label_position('top')

    cax = ax.matshow(confusion_matrix, cmap='viridis')

    ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
    ax.yaxis.set_major_locator(ticker.MultipleLocator(1))
    ax.set_xticklabels([''] + labels, rotation=45, ha='left')
    ax.set_yticklabels([''] + labels)
    ax.grid(False)
    ax.tick_params(axis='both', which='both', length=0)
    fig.colorbar(cax, ax=ax, extend='max')
    ax.set_xlabel('Predicted {}'.format(output_feature_name))
    ax.set_ylabel('Actual {}'.format(output_feature_name))

    plt.tight_layout()
    ludwig.contrib.contrib_command("visualize_figure", plt.gcf())
    if filename:
        plt.savefig(filename)
    else:
        plt.show() 
Example #15
Source File: visualization_utils.py    From ludwig with Apache License 2.0 5 votes vote down vote up
def plot_matrix(
        matrix,
        cmap='hot',
        filename=None
):
    plt.matshow(matrix, cmap=cmap)
    ludwig.contrib.contrib_command("visualize_figure", plt.gcf())
    if filename:
        plt.savefig(filename)
    else:
        plt.show() 
Example #16
Source File: cnn_dogs_cats.py    From Neural-Network-Programming-with-TensorFlow with MIT License 5 votes vote down vote up
def plot_confusion_matrix(cls_pred, data):
    # cls_pred is an array of the predicted class-number for
    # all images in the test-set.

    # Get the true classifications for the test-set.
    cls_true = data.valid.cls
    
    # Get the confusion matrix using sklearn.
    cm = confusion_matrix(y_true=cls_true,
                          y_pred=cls_pred)

    # Print the confusion matrix as text.
    print(cm)

    # Plot the confusion matrix as an image.
    plt.matshow(cm)

    # Make various adjustments to the plot.
    plt.colorbar()
    tick_marks = np.arange(num_classes)
    plt.xticks(tick_marks, range(num_classes))
    plt.yticks(tick_marks, range(num_classes))
    plt.xlabel('Predicted')
    plt.ylabel('True')

    # Ensure the plot is shown correctly with multiple plots
    # in a single Notebook cell.
    plt.show() 
Example #17
Source File: notebook.py    From attention-lvcsr with MIT License 5 votes vote down vote up
def show_alignment(weights, transcription,
                   bos_symbol=False, energies=None,
                   **kwargs):
    f = pyplot.figure(figsize=(15, 0.20 * len(transcription)))
    ax = f.gca()
    ax.matshow(weights, aspect='auto', **kwargs)
    ax.set_yticks((1 if bos_symbol else 0) + numpy.arange(len(transcription)))
    ax.set_yticklabels(transcription)
    pyplot.show()

    if energies is not None:
        pyplot.matshow(energies, **kwargs)
        pyplot.colorbar()
        pyplot.show() 
Example #18
Source File: testProject.py    From FaceDetection with MIT License 5 votes vote down vote up
def func():
        assert False
        pyplot.matshow(self.image)
        pylab.show() 
Example #19
Source File: image.py    From FaceDetection with MIT License 5 votes vote down vote up
def show(image = None):
        if image == None:
            return
        pyplot.matshow(image)
        pylab.show() 
Example #20
Source File: tensorflow_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    # print(type(cwtmatr))
    # print(len(cwtmatr))
    # print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #21
Source File: tf_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    print(type(cwtmatr))
    print(len(cwtmatr))
    print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #22
Source File: tensorflow_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    print(type(cwtmatr))
    print(len(cwtmatr))
    print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #23
Source File: tensorflow_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    # print(type(cwtmatr))
    # print(len(cwtmatr))
    # print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #24
Source File: tensorflow_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    # print(type(cwtmatr))
    # print(len(cwtmatr))
    # print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #25
Source File: tensorflow_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    # print(type(cwtmatr))
    # print(len(cwtmatr))
    # print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #26
Source File: tensorflow_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    # print(type(cwtmatr))
    # print(len(cwtmatr))
    # print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #27
Source File: visualization.py    From tf-image-segmentation with MIT License 5 votes vote down vote up
def _discrete_matshow_adaptive(data, labels_names=[], title=""):
    """Displays segmentation results using colormap that is adapted
    to a number of classes. Uses labels_names to write class names
    aside the color label. Used as a helper function for 
    visualize_segmentation_adaptive() function.
    
    Parameters
    ----------
    data : 2d numpy array (width, height)
        Array with integers representing class predictions
    labels_names : list
        List with class_names
    """
    
    fig_size = [7, 6]
    plt.rcParams["figure.figsize"] = fig_size
    
    #get discrete colormap
    cmap = plt.get_cmap('Paired', np.max(data)-np.min(data)+1)
    
    # set limits .5 outside true range
    mat = plt.matshow(data,
                      cmap=cmap,
                      vmin = np.min(data)-.5,
                      vmax = np.max(data)+.5)
    
    #tell the colorbar to tick at integers
    cax = plt.colorbar(mat,
                       ticks=np.arange(np.min(data),np.max(data)+1))
    
    # The names to be printed aside the colorbar
    if labels_names:
        cax.ax.set_yticklabels(labels_names)
    
    if title:
        plt.suptitle(title, fontsize=15, fontweight='bold')
    
    plt.show() 
Example #28
Source File: plot_quasar_transform.py    From 3DChromatin_ReplicateQC with MIT License 5 votes vote down vote up
def main():
    parser = argparse.ArgumentParser(description='')
    parser.add_argument('--transform')
    parser.add_argument('--out')
    args = parser.parse_args()
    
    infile1 = h5py.File(args.transform, 'r')
    resolutions = infile1['resolutions'][...]
    chroms = infile1['chromosomes'][...]
    data1 = load_data(infile1, chroms, resolutions)
    infile1.close()

    '''
    #for now, don't plot this
    for resolution in data1.keys():
        for chromo in chroms:
            N = data1[resolution][chromo][1].shape[0]
            full=numpy.empty((N,N))
            #full=full/0
            for i in range(100):
                temp1 = numpy.arange(N - i - 1)
                temp2 = numpy.arange(i+1, N)
                full[temp1, temp2] = data1[resolution][chromo][1][temp1, i]
                full[temp2, temp1] = full[temp1, temp2]
            x=0.8
            plt.matshow(full,cmap='seismic',vmin=-x,vmax=x)
            plt.colorbar()
            plt.show()
            plt.savefig(args.out+'.res'+str(resolution)+'.chr'+chromo+'.pdf')    
   ''' 
Example #29
Source File: CNN_DogvsCat_Classifier.py    From Practical-Convolutional-Neural-Networks with MIT License 5 votes vote down vote up
def plot_confusion_matrix(cls_pred):
    # cls_pred is an array of the predicted class-number for
    # all images in the test-set.

    # Get the true classifications for the test-set.
    cls_true = data.valid.cls
    
    # Get the confusion matrix using sklearn.
    cm = confusion_matrix(y_true=cls_true, y_pred=cls_pred)
    
    # Compute the precision, recall and f1 score of the classification
    p, r, f, s = precision_recall_fscore_support(cls_true, cls_pred, average='weighted')
    print('Precision:', p)
    print('Recall:', r)
    print('F1-score:', f)

    # Print the confusion matrix as text.
    print(cm)

    # Plot the confusion matrix as an image.
    plt.matshow(cm)

    # Make various adjustments to the plot.
    plt.colorbar()
    tick_marks = np.arange(num_classes)
    plt.xticks(tick_marks, range(num_classes))
    plt.yticks(tick_marks, range(num_classes))
    plt.xlabel('Predicted')
    plt.ylabel('True')

    # Ensure the plot is shown correctly with multiple plots
    # in a single Notebook cell.
    plt.show() 
Example #30
Source File: pursuit_evade.py    From MADRL with MIT License 5 votes vote down vote up
def save_image(self, file_name):
        plt.cla()
        plt.matshow(self.model_state[0].T, cmap=plt.get_cmap('Greys'), fignum=0)
        x, y = self.pursuer_layer.get_position(0)
        plt.plot(x, y, "r*", markersize=12)
        for i in range(self.pursuer_layer.n_agents()):
            x, y = self.pursuer_layer.get_position(i)
            plt.plot(x, y, "r*", markersize=12)
            if self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#FF9848"))
        for i in range(self.evader_layer.n_agents()):
            x, y = self.evader_layer.get_position(i)
            plt.plot(x, y, "b*", markersize=12)
            if not self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#009ACD"))

        xl, xh = -self.obs_offset - 1, self.xs + self.obs_offset + 1
        yl, yh = -self.obs_offset - 1, self.ys + self.obs_offset + 1
        plt.xlim([xl, xh])
        plt.ylim([yl, yh])
        plt.axis('off')
        plt.savefig(file_name, dpi=200)