Python pylab.close() Examples
The following are 30
code examples of pylab.close().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
pylab
, or try the search function
.
Example #1
Source File: fix_shot_times.py From nba-movement-data with MIT License | 6 votes |
def plot(t, plots, shot_ind): n = len(plots) for i in range(0,n): label, data = plots[i] plt = py.subplot(n, 1, i+1) plt.tick_params(labelsize=8) py.grid() py.xlim([t[0], t[-1]]) py.ylabel(label) py.plot(t, data, 'k-') py.scatter(t[shot_ind], data[shot_ind], marker='*', c='g') py.xlabel("Time") py.show() py.close()
Example #2
Source File: vis_topic.py From corex_topic with Apache License 2.0 | 6 votes |
def output_groups(tcs, alpha, mis, column_label, direction, thresh=0, prefix=''): f = safe_open(prefix + '/groups.txt', 'w+') h = safe_open(prefix + '/topics.txt', 'w+') m, nv = mis.shape annotate = lambda q, s: q if s >= 0 else '~' + q for j in range(m): f.write('Group num: %d, TC(X;Y_j): %0.3f\n' % (j, tcs[j])) # inds = np.where(alpha[j] * mis[j] > thresh)[0] inds = np.where(alpha[j] >= 1.)[0] inds = inds[np.argsort(-alpha[j, inds] * mis[j, inds])] for ind in inds: f.write(column_label[ind] + u', %0.3f, %0.3f, %0.3f\n' % ( mis[j, ind], alpha[j, ind], mis[j, ind] * alpha[j, ind])) #h.write(unicode(j) + u':' + u','.join([annotate(column_label[ind], direction[j,ind]) for ind in inds[:10]]) + u'\n') h.write(str(j) + u':' + u','.join( [annotate(column_label[ind], direction[j, ind]) for ind in inds[:10]]) + u'\n') f.close() h.close()
Example #3
Source File: metric.py From SceneChangeDet with MIT License | 6 votes |
def save_PTZ_metric2disk(metrics,save_path): import json #metric_dict= {} recall_ = list(metrics['metric']['recall']) precision_ = list(metrics['metric']['precision']) f_score = metrics['metric']['MaxF'] try: iu = metrics['metric']['iu'] except KeyError: iu = 0.0 cont_embedding = metrics['contrast_embedding'] metric_ = {'recall':recall_,'precision':precision_,'f-score':f_score,'iu':iu, 'contrast_embedding':cont_embedding} file_ = open(save_path + '/metric.json', 'w') file_.write(json.dumps(metric_, ensure_ascii=False, indent=2)) file_.close()
Example #4
Source File: metric.py From SceneChangeDet with MIT License | 6 votes |
def save_metric2disk(metrics,save_path): import json length = len(metrics) metric_dict= {} for i in range(length): recall_ = list(metrics[i]['metric']['recall']) name = metrics[i]['name'] precision_ = list(metrics[i]['metric']['precision']) f_score = metrics[i]['metric']['MaxF'] try: iu = metrics[i]['metric']['iu'] except KeyError: iu = 0.0 metric_ = {'name':name,'recall':recall_,'precision':precision_,'f-score':f_score,'iu':iu} metric_dict.setdefault(i,metric_) file_ = open(save_path + '/metric.json', 'w') file_.write(json.dumps(metric_dict, ensure_ascii=False, indent=2)) file_.close()
Example #5
Source File: image_ocr.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #6
Source File: image_ocr.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #7
Source File: image_ocr.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #8
Source File: image_ocr.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #9
Source File: image_ocr.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #10
Source File: image_ocr.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #11
Source File: image_ocr.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #12
Source File: image_ocr.py From pCVR with Apache License 2.0 | 6 votes |
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
Example #13
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save(GUI): global txtResultPath if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) for key in spikedetectors: try: nest.raster_plot.from_device(spikedetectors[key], hist=True) pl.savefig(f_name_gen(SAVE_PATH, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) txtResultPath = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spikedetectors: save_spikes(spikedetectors[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
Example #14
Source File: output.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save_voltage(multimeters): import h5py print "Write to HDF5 file" filename = "voltage.hdf5" timestamp = datetime.datetime.now() with h5py.File(filename, "w") as f: f.attrs['default'] = 'entry' f.attrs['file_name'] = filename f.attrs['file_time'] = str(timestamp) f.create_dataset(key, data=nest.GetStatus(multimeters[key], "events")[0]["V_m"]) f.close() print "wrote file:", filename # title = "Membrane potential" # ev = nest.GetStatus(detec, "events")[0] # with open("{0}@voltage_{1}.txt".format(txt_result_path, name), 'w') as f: # f.write("Name: {0}, Title: {1}\n".format(name, title)) # print int(T / multimeter_param['interval']) # for line in range(0, int(T / multimeter_param['interval'])): # for index in range(0, N_volt): # print "{0} {1} ".format(ev["times"][line], ev["V_m"][line]) # #f.write("\n") # print "\n"
Example #15
Source File: output.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save_voltage(multimeters): import h5py print "Write to HDF5 file" filename = "voltage.hdf5" timestamp = datetime.datetime.now() with h5py.File(filename, "w") as f: f.attrs['default'] = 'entry' f.attrs['file_name'] = filename f.attrs['file_time'] = str(timestamp) f.create_dataset(key, data=nest.GetStatus(multimeters[key], "events")[0]["V_m"]) f.close() print "wrote file:", filename # title = "Membrane potential" # ev = nest.GetStatus(detec, "events")[0] # with open("{0}@voltage_{1}.txt".format(txt_result_path, name), 'w') as f: # f.write("Name: {0}, Title: {1}\n".format(name, title)) # print int(T / multimeter_param['interval']) # for line in range(0, int(T / multimeter_param['interval'])): # for index in range(0, N_volt): # print "{0} {1} ".format(ev["times"][line], ev["V_m"][line]) # #f.write("\n") # print "\n"
Example #16
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save_voltage(multimeters): import h5py print "Write to HDF5 file" filename = "voltage.hdf5" timestamp = datetime.datetime.now() with h5py.File(filename, "w") as f: f.attrs['default'] = 'entry' f.attrs['file_name'] = filename f.attrs['file_time'] = str(timestamp) f.create_dataset(key, data=nest.GetStatus(multimeters[key], "events")[0]["V_m"]) f.close() print "wrote file:", filename #title = "Membrane potential" #ev = nest.GetStatus(detec, "events")[0] #with open("{0}@voltage_{1}.txt".format(txt_result_path, name), 'w') as f: # f.write("Name: {0}, Title: {1}\n".format(name, title)) # print int(T / multimeter_param['interval']) # for line in range(0, int(T / multimeter_param['interval'])): # for index in range(0, N_volt): # print "{0} {1} ".format(ev["times"][line], ev["V_m"][line]) # #f.write("\n") # print "\n"
Example #17
Source File: output.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save_voltage(multimeters): import h5py print "Write to HDF5 file" filename = "voltage.hdf5" timestamp = datetime.datetime.now() with h5py.File(filename, "w") as f: f.attrs['default'] = 'entry' f.attrs['file_name'] = filename f.attrs['file_time'] = str(timestamp) f.create_dataset(key, data=nest.GetStatus(multimeters[key], "events")[0]["V_m"]) f.close() print "wrote file:", filename # title = "Membrane potential" # ev = nest.GetStatus(detec, "events")[0] # with open("{0}@voltage_{1}.txt".format(txt_result_path, name), 'w') as f: # f.write("Name: {0}, Title: {1}\n".format(name, title)) # print int(T / multimeter_param['interval']) # for line in range(0, int(T / multimeter_param['interval'])): # for index in range(0, N_volt): # print "{0} {1} ".format(ev["times"][line], ev["V_m"][line]) # #f.write("\n") # print "\n"
Example #18
Source File: output.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save_voltage(multimeters): import h5py print "Write to HDF5 file" filename = "voltage.hdf5" timestamp = datetime.datetime.now() with h5py.File(filename, "w") as f: f.attrs['default'] = 'entry' f.attrs['file_name'] = filename f.attrs['file_time'] = str(timestamp) f.create_dataset(key, data=nest.GetStatus(multimeters[key], "events")[0]["V_m"]) f.close() print "wrote file:", filename # title = "Membrane potential" # ev = nest.GetStatus(detec, "events")[0] # with open("{0}@voltage_{1}.txt".format(txt_result_path, name), 'w') as f: # f.write("Name: {0}, Title: {1}\n".format(name, title)) # print int(T / multimeter_param['interval']) # for line in range(0, int(T / multimeter_param['interval'])): # for index in range(0, N_volt): # print "{0} {1} ".format(ev["times"][line], ev["V_m"][line]) # #f.write("\n") # print "\n"
Example #19
Source File: vis_corex.py From bio_corex with Apache License 2.0 | 6 votes |
def output_groups(tcs, alpha, mis, column_label, thresh=0, prefix=''): f = safe_open(prefix + '/text_files/groups.txt', 'w+') g = safe_open(prefix + '/text_files/groups_no_overlaps.txt', 'w+') m, nv = mis.shape for j in range(m): f.write('Group num: %d, TC(X;Y_j): %0.3f\n' % (j, tcs[j])) g.write('Group num: %d, TC(X;Y_j): %0.3f\n' % (j, tcs[j])) inds = np.where(alpha[j] * mis[j] > thresh)[0] inds = inds[np.argsort(-alpha[j, inds] * mis[j, inds])] for ind in inds: f.write(column_label[ind] + ', %0.3f, %0.3f, %0.3f\n' % ( mis[j, ind], alpha[j, ind], mis[j, ind] * alpha[j, ind])) inds = np.where(alpha[j] == 1)[0] inds = inds[np.argsort(- mis[j, inds])] for ind in inds: g.write(column_label[ind] + ', %0.3f\n' % mis[j, ind]) f.close() g.close()
Example #20
Source File: vis_corex.py From LinearCorex with GNU Affero General Public License v3.0 | 6 votes |
def plot_convergence(history, prefix='', prefix2=''): plt.figure(figsize=(8, 5)) ax = plt.subplot(111) ax.get_xaxis().tick_bottom() ax.get_yaxis().tick_left() plt.plot(history["TC"], '-', lw=2.5, color=tableau20[0]) x = len(history["TC"]) y = np.max(history["TC"]) plt.text(0.5 * x, 0.8 * y, "TC", fontsize=18, fontweight='bold', color=tableau20[0]) if "additivity" in history: plt.plot(history["additivity"], '-', lw=2.5, color=tableau20[1]) plt.text(0.5 * x, 0.3 * y, "additivity", fontsize=18, fontweight='bold', color=tableau20[1]) plt.ylabel('TC', fontsize=12, fontweight='bold') plt.xlabel('# Iterations', fontsize=12, fontweight='bold') plt.suptitle('Convergence', fontsize=12) filename = '{}/summary/convergence{}.pdf'.format(prefix, prefix2) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.savefig(filename, bbox_inches="tight") plt.close('all') return True
Example #21
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save(GUI): global txtResultPath if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) for key in spikedetectors: try: nest.raster_plot.from_device(spikedetectors[key], hist=True) pl.savefig(f_name_gen(SAVE_PATH, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) txtResultPath = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spikedetectors: save_spikes(spikedetectors[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
Example #22
Source File: vis_corex.py From LinearCorex with GNU Affero General Public License v3.0 | 6 votes |
def plot_heatmaps(data, mis, column_label, cont, topk=30, prefix=''): cmap = sns.cubehelix_palette(as_cmap=True, light=.9) m, nv = mis.shape for j in range(m): inds = np.argsort(- mis[j, :])[:topk] if len(inds) >= 2: plt.clf() order = np.argsort(cont[:,j]) subdata = data[:, inds][order].T subdata -= np.nanmean(subdata, axis=1, keepdims=True) subdata /= np.nanstd(subdata, axis=1, keepdims=True) columns = [column_label[i] for i in inds] sns.heatmap(subdata, vmin=-3, vmax=3, cmap=cmap, yticklabels=columns, xticklabels=False, mask=np.isnan(subdata)) filename = '{}/heatmaps/group_num={}.png'.format(prefix, j) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.title("Latent factor {}".format(j)) plt.yticks(rotation=0) plt.savefig(filename, bbox_inches='tight') plt.close('all') #plot_rels(data[:, inds], map(lambda q: column_label[q], inds), colors=cont[:, j], # outfile=prefix + '/relationships/group_num=' + str(j), latent=labels[:, j], alpha=0.1)
Example #23
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 6 votes |
def save(GUI): global txtResultPath if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) for key in spikedetectors: try: nest.raster_plot.from_device(spikedetectors[key], hist=True) pl.savefig(f_name_gen(SAVE_PATH, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) txtResultPath = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spikedetectors: save_spikes(spikedetectors[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
Example #24
Source File: vis_topic.py From corex_topic with Apache License 2.0 | 5 votes |
def plot_heatmaps(data, alpha, mis, column_label, cont, topk=40, athresh=0.2, prefix=''): import seaborn as sns cmap = sns.cubehelix_palette(as_cmap=True, light=.9) import matplotlib.pyplot as plt m, nv = mis.shape for j in range(m): inds = np.where(np.logical_and(alpha[j] > athresh, mis[j] > 0.))[0] inds = inds[np.argsort(- alpha[j, inds] * mis[j, inds])][:topk] if len(inds) >= 2: plt.clf() order = np.argsort(cont[:,j]) if type(data) == np.ndarray: subdata = data[:, inds][order].T else: # assume sparse subdata = data[:, inds].toarray() subdata = subdata[order].T columns = [column_label[i] for i in inds] fig, ax = plt.subplots(figsize=(20, 10)) sns.heatmap(subdata, vmin=0, vmax=1, cmap=cmap, yticklabels=columns, xticklabels=False, ax=ax, cbar_kws={"ticks": [0, 0.5, 1]}) plt.yticks(rotation=0) filename = '{}/heatmaps/group_num={}.png'.format(prefix, j) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.title("Latent factor {}".format(j)) plt.savefig(filename, bbox_inches='tight') plt.close('all') #plot_rels(data[:, inds], map(lambda q: column_label[q], inds), colors=cont[:, j], # outfile=prefix + '/relationships/group_num=' + str(j), latent=labels[:, j], alpha=0.1)
Example #25
Source File: output.py From NEUCOGAR with GNU General Public License v2.0 | 5 votes |
def save(images): """ Save simulation results to txt_result_path folder Args: images: if True, png images will be created Returns: None """ if images: import pylab as pl import nest.raster_plot import nest.voltage_trace N_events_gen = len(g.spike_generators) for key in g.spike_detectors: try: nest.raster_plot.from_device(g.spike_detectors[key], hist=True) pl.savefig(image_name(save_path, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print("From {0} is NOTHING".format(key)) N_events_gen -= 1 for key in g.multimeters: try: nest.voltage_trace.from_device(g.multimeters[key]) pl.savefig(image_name(save_path, "volt_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print("From {0} is NOTHING".format(key)) print "Results {0}/{1}".format(N_events_gen, len(g.spike_detectors)) logger.debug("Saving TEXT into {0}".format(txt_result_path)) for key in g.spike_detectors: save_spikes(g.spike_detectors[key], name=key) # save_voltage(multimeters) with open(txt_result_path + 'timeSimulation.txt', 'w') as f: for item in g.times: f.write(item)
Example #26
Source File: test_models.py From astroNN with MIT License | 5 votes |
def test_bayesian_mnist(self): import pylab as plt # Create a astroNN neural network instance and set the basic parameter net = MNIST_BCNN() net.task = 'classification' net.callbacks = ErrorOnNaN() net.max_epochs = 1 # Train the neural network net.train(x_train, y_train) net.save('mnist_bcnn_test') net.plot_dense_stats() plt.close() # Travis-CI memory error?? net.evaluate(x_test, utils.to_categorical(y_test, 10)) pred, pred_err = net.test(x_test) test_num = y_test.shape[0] assert (np.sum(pred == y_test)) / test_num > 0.9 # assert accuracy net_reloaded = load_folder("mnist_bcnn_test") net_reloaded.mc_num = 3 # prevent memory issue on Tavis CI prediction_loaded = net_reloaded.test(x_test[:200]) net_reloaded.folder_name = None # set to None so it can be saved net_reloaded.save() load_folder(net_reloaded.folder_name) # ignore pycharm warning, its not None
Example #27
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 5 votes |
def save(GUI): global txtResultPath SAVE_PATH = "./results/" if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) if not os.path.exists(SAVE_PATH): os.mkdir(SAVE_PATH) for key in spikedetectors: try: nest.raster_plot.from_device(spikedetectors[key], hist=True) pl.savefig(f_name_gen(SAVE_PATH, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) for key in multimeters: try: nest.voltage_trace.from_device(multimeters[key]) pl.savefig(f_name_gen(SAVE_PATH, "volt_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) txtResultPath = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spikedetectors: save_spikes(spikedetectors[key], name=key) # , hist=True) # for key in multimeters: # save_voltage(multimeters[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
Example #28
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 5 votes |
def save(GUI): global txtResultPath SAVE_PATH = "results/output-{0}/".format(NEURONS) if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) if not os.path.exists(SAVE_PATH): os.mkdir(SAVE_PATH) for key in spikedetectors: try: nest.raster_plot.from_device(spikedetectors[key], hist=True) pl.savefig(f_name_gen(SAVE_PATH, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) for key in multimeters: try: nest.voltage_trace.from_device(multimeters[key]) pl.savefig(f_name_gen(SAVE_PATH, "volt_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) txtResultPath = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spikedetectors: save_spikes(spikedetectors[key], name=key) #, hist=True) #for key in multimeters: # save_voltage(multimeters[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
Example #29
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 5 votes |
def save(GUI): global txt_result_path if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) N_events_gen = len(spike_generators) for key in spike_detectors: try: nest.raster_plot.from_device(spike_detectors[key], hist=True) pl.savefig(f_name_gen(SAVE_PATH, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print("From {0} is NOTHING".format(key)) N_events_gen -= 1 for key in multimeters: try: nest.voltage_trace.from_device(multimeters[key]) pl.savefig(f_name_gen(SAVE_PATH, "volt_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print("From {0} is NOTHING".format(key)) print "Results {0}/{1}".format(N_events_gen, len(spike_detectors)) print "Results {0}/{1}".format(N_events_gen, len(spike_detectors)) txt_result_path = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txt_result_path)) if not os.path.exists(txt_result_path): os.mkdir(txt_result_path) for key in spike_detectors: save_spikes(spike_detectors[key], name=key) #for key in multimeters: # save_voltage(multimeters[key], name=key) with open(txt_result_path + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
Example #30
Source File: func.py From NEUCOGAR with GNU General Public License v2.0 | 5 votes |
def save(GUI): global txtResultPath SAVE_PATH = "/Users/komarovvitaliy/Desktop/testH/results/output-{0}/".format(NEURONS) if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) if not os.path.exists(SAVE_PATH): os.mkdir(SAVE_PATH) for key in spikedetectors: try: nest.raster_plot.from_device(spikedetectors[key], hist=True) pl.savefig(f_name_gen(SAVE_PATH, "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) for key in multimeters: try: nest.voltage_trace.from_device(multimeters[key]) pl.savefig(f_name_gen(SAVE_PATH, "volt_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) txtResultPath = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spikedetectors: save_spikes(spikedetectors[key], name=key) #, hist=True) #for key in multimeters: # save_voltage(multimeters[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)