Python matplotlib.pylab.clf() Examples

The following are 30 code examples of matplotlib.pylab.clf(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module matplotlib.pylab , or try the search function .
Example #1
Source File: utils.py    From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License 6 votes vote down vote up
def plot_roc(auc_score, name, tpr, fpr, label=None):
    pylab.clf()
    pylab.figure(num=None, figsize=(5, 4))
    pylab.grid(True)
    pylab.plot([0, 1], [0, 1], 'k--')
    pylab.plot(fpr, tpr)
    pylab.fill_between(fpr, tpr, alpha=0.5)
    pylab.xlim([0.0, 1.0])
    pylab.ylim([0.0, 1.0])
    pylab.xlabel('False Positive Rate')
    pylab.ylabel('True Positive Rate')
    pylab.title('ROC curve (AUC = %0.2f) / %s' %
                (auc_score, label), verticalalignment="bottom")
    pylab.legend(loc="lower right")
    filename = name.replace(" ", "_")
    pylab.savefig(
        os.path.join(CHART_DIR, "roc_" + filename + ".png"), bbox_inches="tight") 
Example #2
Source File: data_augmentation.py    From ConvNetQuake with MIT License 6 votes vote down vote up
def plot_true_and_augmented_data(sample,noised_sample,label,n_examples):
    output_dir = os.path.split(FLAGS.output)[0]
    # Save augmented data
    plt.clf()
    fig, ax = plt.subplots(3,1)
    for t in range(noised_sample.shape[1]):
        ax[t].plot(noised_sample[:,t])
        ax[t].set_xlabel('time (samples)')
        ax[t].set_ylabel('amplitude')
    ax[0].set_title('window {:03d}, cluster_id: {}'.format(n_examples,label))
    plt.savefig(os.path.join(output_dir, "augmented_data",
                            'augmented_{:03d}.pdf'.format(n_examples)))
    plt.close()

    # Save true data
    plt.clf()
    fig, ax = plt.subplots(3,1)
    for t in range(sample.shape[1]):
        ax[t].plot(sample[:,t])
        ax[t].set_xlabel('time (samples)')
        ax[t].set_ylabel('amplitude')
    ax[0].set_title('window {:03d}, cluster_id: {}'.format(n_examples,label))
    plt.savefig(os.path.join(output_dir, "true_data",
                            'true__{:03d}.pdf'.format(n_examples)))
    plt.close() 
Example #3
Source File: test_gaussianize.py    From gaussianize with MIT License 6 votes vote down vote up
def test_normality_increase_lambert(self):
        # Generate random data and check that it is more normal after inference
        for i, y in enumerate([np.random.standard_cauchy(size=ns), experimental_data]):
            print('Distribution %d' % i)
            print('Before')
            print(('anderson: %0.3f\tshapiro: %0.3f' % (anderson(y)[0], shapiro(y)[0])).expandtabs(30))
            stats.probplot(y, dist="norm", plot=plt)
            plt.savefig(os.path.join(self.test_dir, '%d_before.png' % i))
            plt.clf()
    
            tau = g.igmm(y)
            x = g.w_t(y, tau)
            print('After')
            print(('anderson: %0.3f\tshapiro: %0.3f' % (anderson(x)[0], shapiro(x)[0])).expandtabs(30))
            stats.probplot(x, dist="norm", plot=plt)
            plt.savefig(os.path.join(self.test_dir, '%d_after.png' % i))
            plt.clf() 
Example #4
Source File: make_dataset.py    From DeepLearningImplementations with MIT License 6 votes vote down vote up
def check_HDF5(size=64):
    """
    Plot images with landmarks to check the processing
    """

    # Get hdf5 file
    hdf5_file = os.path.join(data_dir, "CelebA_%s_data.h5" % size)

    with h5py.File(hdf5_file, "r") as hf:
        data_color = hf["data"]
        for i in range(data_color.shape[0]):
            plt.figure()
            img = data_color[i, :, :, :].transpose(1,2,0)
            plt.imshow(img)
            plt.show()
            plt.clf()
            plt.close() 
Example #5
Source File: make_dataset.py    From DeepLearningImplementations with MIT License 6 votes vote down vote up
def check_HDF5(jpeg_dir, nb_channels):
    """
    Plot images with landmarks to check the processing
    """

    # Get hdf5 file
    file_name = os.path.basename(jpeg_dir.rstrip("/"))
    hdf5_file = os.path.join(data_dir, "%s_data.h5" % file_name)

    with h5py.File(hdf5_file, "r") as hf:
        data_full = hf["train_data_full"]
        data_sketch = hf["train_data_sketch"]
        for i in range(data_full.shape[0]):
            plt.figure()
            img = data_full[i, :, :, :].transpose(1,2,0)
            img2 = data_sketch[i, :, :, :].transpose(1,2,0)
            img = np.concatenate((img, img2), axis=1)
            if nb_channels == 1:
                plt.imshow(img[:, :, 0], cmap="gray")
            else:
                plt.imshow(img)
            plt.show()
            plt.clf()
            plt.close() 
Example #6
Source File: make_dataset.py    From DeepLearningImplementations with MIT License 6 votes vote down vote up
def check_HDF5(size):
    """
    Plot images with landmarks to check the processing
    """

    # Get hdf5 file
    hdf5_file = os.path.join(data_dir, "lfw_%s_data.h5" % size)

    with h5py.File(hdf5_file, "r") as hf:
        data_color = hf["data"]
        label = hf["labels"]
        attrs = label.attrs["label_names"]
        for i in range(data_color.shape[0]):
            plt.figure(figsize=(20, 10))
            img = data_color[i, :, :, :].transpose(1,2,0)[:, :, ::-1]
            # Get the 10 labels with highest values
            idx = label[i].argsort()[-10:]
            plt.xlabel(",  ".join(attrs[idx]), fontsize=12)
            plt.imshow(img)
            plt.show()
            plt.clf()
            plt.close() 
Example #7
Source File: utils.py    From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License 6 votes vote down vote up
def plot_feat_importance(feature_names, clf, name):
    pylab.clf()
    coef_ = clf.coef_
    important = np.argsort(np.absolute(coef_.ravel()))
    f_imp = feature_names[important]
    coef = coef_.ravel()[important]
    inds = np.argsort(coef)
    f_imp = f_imp[inds]
    coef = coef[inds]
    xpos = np.array(range(len(coef)))
    pylab.bar(xpos, coef, width=1)

    pylab.title('Feature importance for %s' % (name))
    ax = pylab.gca()
    ax.set_xticks(np.arange(len(coef)))
    labels = ax.set_xticklabels(f_imp)
    for label in labels:
        label.set_rotation(90)
    filename = name.replace(" ", "_")
    pylab.savefig(os.path.join(
        CHART_DIR, "feat_imp_%s.png" % filename), bbox_inches="tight") 
Example #8
Source File: utils.py    From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License 6 votes vote down vote up
def plot_confusion_matrix(cm, genre_list, name, title):
    pylab.clf()
    pylab.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=1.0)
    ax = pylab.axes()
    ax.set_xticks(range(len(genre_list)))
    ax.set_xticklabels(genre_list)
    ax.xaxis.set_ticks_position("bottom")
    ax.set_yticks(range(len(genre_list)))
    ax.set_yticklabels(genre_list)
    pylab.title(title)
    pylab.colorbar()
    pylab.grid(False)
    pylab.show()
    pylab.xlabel('Predicted class')
    pylab.ylabel('True class')
    pylab.grid(False)
    pylab.savefig(
        os.path.join(CHART_DIR, "confusion_matrix_%s.png" % name), bbox_inches="tight") 
Example #9
Source File: demo_mi.py    From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License 6 votes vote down vote up
def plot_entropy():
    pylab.clf()
    pylab.figure(num=None, figsize=(5, 4))

    title = "Entropy $H(X)$"
    pylab.title(title)
    pylab.xlabel("$P(X=$coin will show heads up$)$")
    pylab.ylabel("$H(X)$")

    pylab.xlim(xmin=0, xmax=1.1)
    x = np.arange(0.001, 1, 0.001)
    y = -x * np.log2(x) - (1 - x) * np.log2(1 - x)
    pylab.plot(x, y)
    # pylab.xticks([w*7*24 for w in [0,1,2,3,4]], ['week %i'%(w+1) for w in
    # [0,1,2,3,4]])

    pylab.autoscale(tight=True)
    pylab.grid(True)

    filename = "entropy_demo.png"
    pylab.savefig(os.path.join(CHART_DIR, filename), bbox_inches="tight") 
Example #10
Source File: 07_dqn_distrib.py    From Deep-Reinforcement-Learning-Hands-On with MIT License 6 votes vote down vote up
def save_transition_images(batch_size, predicted, projected, next_distr, dones, rewards, save_prefix):
    for batch_idx in range(batch_size):
        is_done = dones[batch_idx]
        reward = rewards[batch_idx]
        plt.clf()
        p = np.arange(Vmin, Vmax + DELTA_Z, DELTA_Z)
        plt.subplot(3, 1, 1)
        plt.bar(p, predicted[batch_idx], width=0.5)
        plt.title("Predicted")
        plt.subplot(3, 1, 2)
        plt.bar(p, projected[batch_idx], width=0.5)
        plt.title("Projected")
        plt.subplot(3, 1, 3)
        plt.bar(p, next_distr[batch_idx], width=0.5)
        plt.title("Next state")
        suffix = ""
        if reward != 0.0:
            suffix = suffix + "_%.0f" % reward
        if is_done:
            suffix = suffix + "_done"
        plt.savefig("%s_%02d%s.png" % (save_prefix, batch_idx, suffix)) 
Example #11
Source File: make_dataset.py    From DeepLearningImplementations with MIT License 6 votes vote down vote up
def check_HDF5(size=64):
    """
    Plot images with landmarks to check the processing
    """

    # Get hdf5 file
    hdf5_file = os.path.join(data_dir, "CelebA_%s_data.h5" % size)

    with h5py.File(hdf5_file, "r") as hf:
        data_color = hf["data"]
        for i in range(data_color.shape[0]):
            plt.figure()
            img = data_color[i, :, :, :].transpose(1,2,0)
            plt.imshow(img)
            plt.show()
            plt.clf()
            plt.close() 
Example #12
Source File: 07_dqn_distrib.py    From Deep-Reinforcement-Learning-Hands-On with MIT License 6 votes vote down vote up
def save_state_images(frame_idx, states, net, device="cpu", max_states=200):
    ofs = 0
    p = np.arange(Vmin, Vmax + DELTA_Z, DELTA_Z)
    for batch in np.array_split(states, 64):
        states_v = torch.tensor(batch).to(device)
        action_prob = net.apply_softmax(net(states_v)).data.cpu().numpy()
        batch_size, num_actions, _ = action_prob.shape
        for batch_idx in range(batch_size):
            plt.clf()
            for action_idx in range(num_actions):
                plt.subplot(num_actions, 1, action_idx+1)
                plt.bar(p, action_prob[batch_idx, action_idx], width=0.5)
            plt.savefig("states/%05d_%08d.png" % (ofs + batch_idx, frame_idx))
        ofs += batch_size
        if ofs >= max_states:
            break 
Example #13
Source File: make_dataset.py    From DeepLearningImplementations with MIT License 6 votes vote down vote up
def check_HDF5(size=64):
    """
    Plot images with landmarks to check the processing
    """

    # Get hdf5 file
    hdf5_file = os.path.join(data_dir, "CelebA_%s_data.h5" % size)

    with h5py.File(hdf5_file, "r") as hf:
        data_color = hf["data"]
        for i in range(data_color.shape[0]):
            plt.figure()
            img = data_color[i, :, :, :].transpose(1,2,0)
            plt.imshow(img)
            plt.show()
            plt.clf()
            plt.close() 
Example #14
Source File: utils.py    From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License 6 votes vote down vote up
def plot_feat_importance(feature_names, clf, name):
    pylab.clf()
    coef_ = clf.coef_
    important = np.argsort(np.absolute(coef_.ravel()))
    f_imp = feature_names[important]
    coef = coef_.ravel()[important]
    inds = np.argsort(coef)
    f_imp = f_imp[inds]
    coef = coef[inds]
    xpos = np.array(range(len(coef)))
    pylab.bar(xpos, coef, width=1)

    pylab.title('Feature importance for %s' % (name))
    ax = pylab.gca()
    ax.set_xticks(np.arange(len(coef)))
    labels = ax.set_xticklabels(f_imp)
    for label in labels:
        label.set_rotation(90)
    filename = name.replace(" ", "_")
    pylab.savefig(os.path.join(
        CHART_DIR, "feat_imp_%s.png" % filename), bbox_inches="tight") 
Example #15
Source File: data_utils.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def plot_generated_batch(X_full, X_sketch, generator_model, batch_size, image_data_format, suffix, logging_dir):

    # Generate images
    X_gen = generator_model.predict(X_sketch)

    X_sketch = inverse_normalization(X_sketch)
    X_full = inverse_normalization(X_full)
    X_gen = inverse_normalization(X_gen) 
    
    Xs = X_sketch[:8]
    Xg = X_gen[:8]
    Xr = X_full[:8]
    
    if image_data_format == "channels_last":
        X = np.concatenate((Xs, Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] // 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=1)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=0)

    if image_data_format == "channels_first":
        X = np.concatenate((Xs, Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] // 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=2)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=1)
        Xr = Xr.transpose(1,2,0)

    if Xr.shape[-1] == 1:
        plt.imshow(Xr[:, :, 0], cmap="gray")
    else:
        plt.imshow(Xr)
    plt.axis("off")
    plt.savefig(os.path.join(logging_dir, "figures/current_batch_%s.png" % suffix))
    plt.clf()
    plt.close() 
Example #16
Source File: mode_solver.py    From modesolverpy with MIT License 5 votes vote down vote up
def _plot_n_effs(self, filename_n_effs, filename_te_fractions, xlabel, ylabel, title):
        args = {
            "titl": title,
            "xlab": xlabel,
            "ylab": ylabel,
            "filename_data": filename_n_effs,
            "filename_frac_te": filename_te_fractions,
            "filename_image": None,
            "num_modes": len(self.modes),
        }

        filename_image_prefix, _ = os.path.splitext(filename_n_effs)
        filename_image = filename_image_prefix + ".png"
        args["filename_image"] = filename_image

        if MPL:
            data = np.loadtxt(args["filename_data"], delimiter=",").T
            plt.clf()
            plt.title(title)
            plt.xlabel(args["xlab"])
            plt.ylabel(args["ylab"])
            for i in range(args["num_modes"]):
                plt.plot(data[0], data[i + 1], "-o")
            plt.savefig(args["filename_image"])
        else:
            gp.gnuplot(self._path + "n_effs.gpi", args, silent=False)
            gp.trim_pad_image(filename_image)

        return args 
Example #17
Source File: mode_solver.py    From modesolverpy with MIT License 5 votes vote down vote up
def _plot_fraction(
        self, filename_fraction, xlabel, ylabel, title, mode_list=[]
    ):
        if not mode_list:
            mode_list = range(len(self.modes))
        gp_mode_list = " ".join(str(idx) for idx in mode_list)

        args = {
            "titl": title,
            "xlab": xlabel,
            "ylab": ylabel,
            "filename_data": filename_fraction,
            "filename_image": None,
            "mode_list": gp_mode_list,
        }

        filename_image_prefix, _ = os.path.splitext(filename_fraction)
        filename_image = filename_image_prefix + ".png"
        args["filename_image"] = filename_image

        if MPL:
            data = np.loadtxt(args["filename_data"], delimiter=",").T
            plt.clf()
            plt.title(title)
            plt.xlabel(args["xlab"])
            plt.ylabel(args["ylab"])
            for i, _ in enumerate(self.modes):
                plt.plot(data[0], data[i + 1], "-o")
            plt.savefig(args["filename_image"])
        else:
            gp.gnuplot(self._path + "fractions.gpi", args, silent=False)
            gp.trim_pad_image(filename_image)

        return args 
Example #18
Source File: gaussianize.py    From gaussianize with MIT License 5 votes vote down vote up
def qqplot(self, x: np.ndarray, prefix: Text = 'qq', output_dir: Text = "/tmp/"):
        """Show qq plots compared to normal before and after the transform."""
        x = _update_x(x)
        y = self.transform(x)
        n_dim = y.shape[1]
        for i in range(n_dim):
            stats.probplot(x[:, i], dist="norm", plot=plt)
            plt.savefig(os.path.join(output_dir, prefix + '_%d_before.png' % i))
            plt.clf()
            stats.probplot(y[:, i], dist="norm", plot=plt)
            plt.savefig(os.path.join(output_dir, prefix + '_%d_after.png' % i))
            plt.clf() 
Example #19
Source File: monitor.py    From cortex_old with GNU General Public License v3.0 5 votes vote down vote up
def save(self, out_path):
        '''Saves a figure for the monitor
        
        Args:
            out_path: str
        '''
        
        plt.clf()
        np.set_printoptions(precision=4)
        font = {
            'size': 7
        }
        matplotlib.rc('font', **font)
        y = 2
        x = ((len(self.d) - 1) // y) + 1
        fig, axes = plt.subplots(y, x)
        fig.set_size_inches(20, 8)

        for j, (k, v) in enumerate(self.d.iteritems()):
            ax = axes[j // x, j % x]
            ax.plot(v, label=k)
            if k in self.d_valid.keys():
                ax.plot(self.d_valid[k], label=k + '(valid)')
            ax.set_title(k)
            ax.legend()

        plt.tight_layout()
        plt.savefig(out_path, facecolor=(1, 1, 1))
        plt.close() 
Example #20
Source File: data_utils.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def plot_generated_batch(X_real, generator_model, batch_size, noise_dim, image_data_format, noise_scale=0.5):

    # Generate images
    X_gen = sample_noise(noise_scale, batch_size, noise_dim)
    X_gen = generator_model.predict(X_gen)

    X_real = inverse_normalization(X_real)
    X_gen = inverse_normalization(X_gen)

    Xg = X_gen[:8]
    Xr = X_real[:8]

    if image_data_format == "channels_last":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=1)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=0)

    if image_data_format == "channels_first":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=2)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=1)
        Xr = Xr.transpose(1,2,0)

    if Xr.shape[-1] == 1:
        plt.imshow(Xr[:, :, 0], cmap="gray")
    else:
        plt.imshow(Xr)
    plt.savefig("../../figures/current_batch.png")
    plt.clf()
    plt.close() 
Example #21
Source File: visualization_utils.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def save_image(data, data_format, e, suffix=None):
    """Saves a picture showing the current progress of the model"""

    X_G, X_real = data

    Xg = X_G[:8]
    Xr = X_real[:8]

    if data_format == "NHWC":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=1)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=0)

    if data_format == "NCHW":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=2)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=1)
        Xr = Xr.transpose(1,2,0)

    if Xr.shape[-1] == 1:
        plt.imshow(Xr[:, :, 0], cmap="gray")
    else:
        plt.imshow(Xr)
    plt.axis("off")
    if suffix is None:
        plt.savefig(os.path.join(FLAGS.fig_dir, "current_batch_%s.png" % e))
    else:
        plt.savefig(os.path.join(FLAGS.fig_dir, "current_batch_%s_%s.png" % (suffix, e)))
    plt.clf()
    plt.close() 
Example #22
Source File: data_utils.py    From Pix2Depth with GNU General Public License v3.0 5 votes vote down vote up
def plot_generated_batch(X_full, X_sketch, generator_model, batch_size, image_data_format, suffix, show_plot=False):

    # Generate images
    X_gen = generator_model.predict(X_sketch)

    X_sketch = inverse_normalization(X_sketch)
    X_full = inverse_normalization(X_full)
    X_gen = inverse_normalization(X_gen)

    Xs = X_sketch[:8]
    Xg = X_gen[:8]
    Xr = X_full[:8]

    if image_data_format == "channels_last":
        X = np.concatenate((Xs, Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] // 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=1)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=0)

    if image_data_format == "channels_first":
        X = np.concatenate((Xs, Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] // 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=2)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=1)
        Xr = Xr.transpose(1,2,0)

    if show_plot:
        if Xr.shape[-1] == 1:
            plt.imshow(Xr[:, :, 0], cmap="gray")
        else:
            plt.imshow(Xr)
    plt.axis("off")
    plt.savefig("../../figures/current_batch_%s.png" % suffix)
    plt.clf()
    plt.close() 
Example #23
Source File: data_utils.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def plot_generated_batch(X_real, generator_model, batch_size, cat_dim, cont_dim, noise_dim, image_data_format, noise_scale=0.5):

    # Generate images
    y_cat = sample_cat(batch_size, cat_dim)
    y_cont = sample_noise(noise_scale, batch_size, cont_dim)
    noise_input = sample_noise(noise_scale, batch_size, noise_dim)
    # Produce an output
    X_gen = generator_model.predict([y_cat, y_cont, noise_input],batch_size=batch_size)

    X_real = inverse_normalization(X_real)
    X_gen = inverse_normalization(X_gen)

    Xg = X_gen[:8]
    Xr = X_real[:8]

    if image_data_format == "channels_last":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=1)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=0)

    if image_data_format == "channels_first":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=2)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=1)
        Xr = Xr.transpose(1,2,0)

    if Xr.shape[-1] == 1:
        plt.imshow(Xr[:, :, 0], cmap="gray")
    else:
        plt.imshow(Xr)
    plt.savefig("../../figures/current_batch.png")
    plt.clf()
    plt.close() 
Example #24
Source File: plot_results.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def plot_cifar10(save=True):

    with open("./log/experiment_log_cifar10.json", "r") as f:
        d = json.load(f)

    train_accuracy = 100 * (np.array(d["train_loss"])[:, 1])
    test_accuracy = 100 * (np.array(d["test_loss"])[:, 1])

    fig = plt.figure()
    ax1 = fig.add_subplot(111)
    ax1.set_ylabel('Accuracy')
    ax1.plot(train_accuracy, color="tomato", linewidth=2, label='train_acc')
    ax1.plot(test_accuracy, color="steelblue", linewidth=2, label='test_acc')
    ax1.legend(loc=0)

    train_loss = np.array(d["train_loss"])[:, 0]
    test_loss = np.array(d["test_loss"])[:, 0]

    ax2 = ax1.twinx()
    ax2.set_ylabel('Loss')
    ax2.plot(train_loss, '--', color="tomato", linewidth=2, label='train_loss')
    ax2.plot(test_loss, '--', color="steelblue", linewidth=2, label='test_loss')
    ax2.legend(loc=1)

    ax1.grid(True)

    if save:
        fig.savefig('./figures/plot_cifar10.svg')

    plt.show()
    plt.clf()
    plt.close() 
Example #25
Source File: ClimatologySpark.py    From incubator-sdap-nexus with Apache License 2.0 5 votes vote down vote up
def histogram(vals, variable, n, outFile):
    figFile = os.path.splitext(outFile)[0] + '_hist.png'
    M.clf()
#    M.hist(vals, 47, (-2., 45.))
    M.hist(vals, 94)
    M.xlim(-5, 45)
    M.xlabel('SST in degrees Celsius')
    M.ylim(0, 300000)
    M.ylabel('Count')
    M.title('Histogram of %s %d-day Mean from %s' % (variable.upper(), n, outFile))
    M.show()
    print >>sys.stderr, 'Writing histogram plot to %s' % figFile
    M.savefig(figFile)
    return figFile 
Example #26
Source File: visualization_utils.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def save_image(data, data_format, e, suffix=None):
    """Saves a picture showing the current progress of the model"""

    X_G, X_real = data

    Xg = X_G[:8]
    Xr = X_real[:8]

    if data_format == "NHWC":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=1)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=0)

    if data_format == "NCHW":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=2)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=1)
        Xr = Xr.transpose(1,2,0)

    if Xr.shape[-1] == 1:
        plt.imshow(Xr[:, :, 0], cmap="gray")
    else:
        plt.imshow(Xr)
    plt.axis("off")
    if suffix is None:
        plt.savefig(os.path.join(FLAGS.fig_dir, "current_batch_%s.png" % e))
    else:
        plt.savefig(os.path.join(FLAGS.fig_dir, "current_batch_%s_%s.png" % (suffix, e)))
    plt.clf()
    plt.close() 
Example #27
Source File: make_dataset.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def check_HDF5(size=64):
    """
    Plot images with landmarks to check the processing
    """

    # Get hdf5 file
    hdf5_file = os.path.join(data_dir, "CelebA_%s_data.h5" % size)

    with h5py.File(hdf5_file, "r") as hf:
        data_color = hf["training_color_data"]
        data_lab = hf["training_lab_data"]
        data_black = hf["training_black_data"]
        for i in range(data_color.shape[0]):
            fig = plt.figure()
            gs = gridspec.GridSpec(3, 1)
            for k in range(3):
                ax = plt.subplot(gs[k])
                if k == 0:
                    img = data_color[i, :, :, :].transpose(1,2,0)
                    ax.imshow(img)
                elif k == 1:
                    img = data_lab[i, :, :, :].transpose(1,2,0)
                    img = color.lab2rgb(img)
                    ax.imshow(img)
                elif k == 2:
                    img = data_black[i, 0, :, :] / 255.
                    ax.imshow(img, cmap="gray")
            gs.tight_layout(fig)
            plt.show()
            plt.clf()
            plt.close() 
Example #28
Source File: data_utils.py    From DeepLearningImplementations with MIT License 5 votes vote down vote up
def plot_generated_batch(X_real, generator_model, batch_size, noise_dim, image_data_format, noise_scale=0.5):

    # Generate images
    X_gen = sample_noise(noise_scale, batch_size, noise_dim)
    X_gen = generator_model.predict(X_gen)

    X_real = inverse_normalization(X_real)
    X_gen = inverse_normalization(X_gen)

    Xg = X_gen[:8]
    Xr = X_real[:8]

    if image_data_format == "channels_last":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=1)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=0)

    if image_data_format == "channels_first":
        X = np.concatenate((Xg, Xr), axis=0)
        list_rows = []
        for i in range(int(X.shape[0] / 4)):
            Xr = np.concatenate([X[k] for k in range(4 * i, 4 * (i + 1))], axis=2)
            list_rows.append(Xr)

        Xr = np.concatenate(list_rows, axis=1)
        Xr = Xr.transpose(1,2,0)

    if Xr.shape[-1] == 1:
        plt.imshow(Xr[:, :, 0], cmap="gray")
    else:
        plt.imshow(Xr)
    plt.savefig("../../figures/current_batch.png")
    plt.clf()
    plt.close() 
Example #29
Source File: utils.py    From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License 5 votes vote down vote up
def plot_feat_hist(data_name_list, filename=None):
    pylab.clf()
    num_rows = 1 + (len(data_name_list) - 1) / 2
    num_cols = 1 if len(data_name_list) == 1 else 2
    pylab.figure(figsize=(5 * num_cols, 4 * num_rows))

    for i in range(num_rows):
        for j in range(num_cols):
            pylab.subplot(num_rows, num_cols, 1 + i * num_cols + j)
            x, name = data_name_list[i * num_cols + j]
            pylab.title(name)
            pylab.xlabel('Value')
            pylab.ylabel('Density')
            # the histogram of the data
            max_val = np.max(x)
            if max_val <= 1.0:
                bins = 50
            elif max_val > 50:
                bins = 50
            else:
                bins = max_val
            n, bins, patches = pylab.hist(
                x, bins=bins, normed=1, facecolor='green', alpha=0.75)

            pylab.grid(True)

    if not filename:
        filename = "feat_hist_%s.png" % name

    pylab.savefig(os.path.join(CHART_DIR, filename), bbox_inches="tight") 
Example #30
Source File: utils.py    From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License 5 votes vote down vote up
def plot_pr(auc_score, name, phase, precision, recall, label=None):
    pylab.clf()
    pylab.figure(num=None, figsize=(5, 4))
    pylab.grid(True)
    pylab.fill_between(recall, precision, alpha=0.5)
    pylab.plot(recall, precision, lw=1)
    pylab.xlim([0.0, 1.0])
    pylab.ylim([0.0, 1.0])
    pylab.xlabel('Recall')
    pylab.ylabel('Precision')
    pylab.title('P/R curve (AUC=%0.2f) / %s' % (auc_score, label))
    filename = name.replace(" ", "_")
    pylab.savefig(os.path.join(CHART_DIR, "pr_%s_%s.png" %
                  (filename, phase)), bbox_inches="tight")