Python config.G Examples
The following are 9
code examples of config.G().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
config
, or try the search function
.
Example #1
Source File: train.py From disentangling_conditional_gans with MIT License | 5 votes |
def setup_snapshot_image_grid(G, training_set, size = '1080p', # '1080p' = to be viewed on 1080p display, '4k' = to be viewed on 4k display. layout = 'random'): # 'random' = grid contents are selected randomly, 'row_per_class' = each row corresponds to one class label. # Select size. gw = 1; gh = 1 if size == '1080p': gw = np.clip(1920 // G.output_shape[3], 3, 32) gh = np.clip(1080 // G.output_shape[2], 2, 32) if size == '4k': gw = np.clip(3840 // G.output_shape[3], 7, 32) gh = np.clip(2160 // G.output_shape[2], 4, 32) # Fill in reals and labels. reals = np.zeros([gw * gh] + training_set.shape, dtype=training_set.dtype) labels = np.zeros([gw * gh, training_set.label_size], dtype=training_set.label_dtype) masks = np.zeros([gw * gh] + [1, training_set.shape[-1], training_set.shape[-1]], dtype=training_set.dtype) for idx in range(gw * gh): x = idx % gw; y = idx // gw while True: real, label, mask = training_set.get_minibatch_np(1) if layout == 'row_per_class' and training_set.label_size > 0: if label[0, y % training_set.label_size] == 0.0: continue reals[idx] = real[0] labels[idx] = label[0] masks[idx] = mask[0] break # Generate latents. latents = misc.random_latents(gw * gh, G) return (gw, gh), reals, labels, latents, masks #---------------------------------------------------------------------------- # Just-in-time processing of training images before feeding them to the networks.
Example #2
Source File: train.py From transparent_latent_gan with MIT License | 5 votes |
def setup_snapshot_image_grid(G, training_set, size = '1080p', # '1080p' = to be viewed on 1080p display, '4k' = to be viewed on 4k display. layout = 'random'): # 'random' = grid contents are selected randomly, 'row_per_class' = each row corresponds to one class label. # Select size. gw = 1; gh = 1 if size == '1080p': gw = np.clip(1920 // G.output_shape[3], 3, 32) gh = np.clip(1080 // G.output_shape[2], 2, 32) if size == '4k': gw = np.clip(3840 // G.output_shape[3], 7, 32) gh = np.clip(2160 // G.output_shape[2], 4, 32) # Fill in reals and labels. reals = np.zeros([gw * gh] + training_set.shape, dtype=training_set.dtype) labels = np.zeros([gw * gh, training_set.label_size], dtype=training_set.label_dtype) for idx in range(gw * gh): x = idx % gw; y = idx // gw while True: real, label = training_set.get_minibatch_np(1) if layout == 'row_per_class' and training_set.label_size > 0: if label[0, y % training_set.label_size] == 0.0: continue reals[idx] = real[0] labels[idx] = label[0] break # Generate latents. latents = misc.random_latents(gw * gh, G) return (gw, gh), reals, labels, latents #---------------------------------------------------------------------------- # Just-in-time processing of training images before feeding them to the networks.
Example #3
Source File: predict.py From Keras-progressive_growing_of_gans with MIT License | 5 votes |
def load_G_weights(G, path, by_name = True): G_path = os.path.join(path,'Generator.h5') G.load_weights(G_path, by_name = by_name) return G
Example #4
Source File: predict.py From Keras-progressive_growing_of_gans with MIT License | 5 votes |
def predict_gan(): separate_funcs = False drange_net = [-1,1] drange_viz = [-1,1] image_grid_size = None image_grid_type = 'default' resume_network = 'pre-trained_weight' np.random.seed(config.random_seed) if resume_network: print("Resuming weight from:"+resume_network) G = Generator(num_channels=3, resolution=128, label_size=0, **config.G) G = load_G_weights(G,resume_network,True) print(G.summary()) # Misc init. if image_grid_type == 'default': if image_grid_size is None: w, h = G.output_shape[1], G.output_shape[2] print("w:%d,h:%d"%(w,h)) image_grid_size = np.clip(int(1920 // w), 3, 16).astype('int'), np.clip(1080 / h, 2, 16).astype('int') print("image_grid_size:",image_grid_size) else: raise ValueError('Invalid image_grid_type', image_grid_type) result_subdir = misc.create_result_subdir('pre-trained_result', config.run_desc) for i in range(1,6): snapshot_fake_latents = random_latents(np.prod(image_grid_size), G.input_shape) snapshot_fake_images = G.predict_on_batch(snapshot_fake_latents) misc.save_image_grid(snapshot_fake_images, os.path.join(result_subdir, 'pre-trained_%03d.png'%i), drange=drange_viz, grid_size=image_grid_size)
Example #5
Source File: train.py From Keras-progressive_growing_of_gans with MIT License | 5 votes |
def load_GD(path, compile = False): G_path = os.path.join(path,'Generator.h5') D_path = os.path.join(path,'Discriminator.h5') G = load_model(G_path, compile = compile) D = load_model(D_path, compile = compile) return G,D
Example #6
Source File: train.py From Keras-progressive_growing_of_gans with MIT License | 5 votes |
def save_GD(G,D,path,overwrite = False): os.makedirs(path); G_path = os.path.join(path,'Generator.h5') D_path = os.path.join(path,'Discriminator.h5') save_model(G,G_path,overwrite = overwrite) save_model(D,D_path,overwrite = overwrite) print("Save model to %s"%path)
Example #7
Source File: train.py From Keras-progressive_growing_of_gans with MIT License | 5 votes |
def load_GD_weights(G,D,path, by_name = True): G_path = os.path.join(path,'Generator.h5') D_path = os.path.join(path,'Discriminator.h5') G.load_weights(G_path, by_name = by_name) D.load_weights(D_path, by_name = by_name) return G,D
Example #8
Source File: train.py From higan with MIT License | 5 votes |
def setup_snapshot_image_grid(G, training_set, size = '1080p', # '1080p' = to be viewed on 1080p display, '4k' = to be viewed on 4k display. layout = 'random'): # 'random' = grid contents are selected randomly, 'row_per_class' = each row corresponds to one class label. # Select size. gw = 1; gh = 1 if size == '1080p': gw = np.clip(1920 // G.output_shape[3], 3, 32) gh = np.clip(1080 // G.output_shape[2], 2, 32) if size == '4k': gw = np.clip(3840 // G.output_shape[3], 7, 32) gh = np.clip(2160 // G.output_shape[2], 4, 32) # Fill in reals and labels. reals = np.zeros([gw * gh] + training_set.shape, dtype=training_set.dtype) labels = np.zeros([gw * gh, training_set.label_size], dtype=training_set.label_dtype) for idx in range(gw * gh): x = idx % gw; y = idx // gw while True: real, label = training_set.get_minibatch_np(1) if layout == 'row_per_class' and training_set.label_size > 0: if label[0, y % training_set.label_size] == 0.0: continue reals[idx] = real[0] labels[idx] = label[0] break # Generate latents. latents = misc.random_latents(gw * gh, G) return (gw, gh), reals, labels, latents #---------------------------------------------------------------------------- # Just-in-time processing of training images before feeding them to the networks.
Example #9
Source File: train.py From interfacegan with MIT License | 5 votes |
def setup_snapshot_image_grid(G, training_set, size = '1080p', # '1080p' = to be viewed on 1080p display, '4k' = to be viewed on 4k display. layout = 'random'): # 'random' = grid contents are selected randomly, 'row_per_class' = each row corresponds to one class label. # Select size. gw = 1; gh = 1 if size == '1080p': gw = np.clip(1920 // G.output_shape[3], 3, 32) gh = np.clip(1080 // G.output_shape[2], 2, 32) if size == '4k': gw = np.clip(3840 // G.output_shape[3], 7, 32) gh = np.clip(2160 // G.output_shape[2], 4, 32) # Fill in reals and labels. reals = np.zeros([gw * gh] + training_set.shape, dtype=training_set.dtype) labels = np.zeros([gw * gh, training_set.label_size], dtype=training_set.label_dtype) for idx in range(gw * gh): x = idx % gw; y = idx // gw while True: real, label = training_set.get_minibatch_np(1) if layout == 'row_per_class' and training_set.label_size > 0: if label[0, y % training_set.label_size] == 0.0: continue reals[idx] = real[0] labels[idx] = label[0] break # Generate latents. latents = misc.random_latents(gw * gh, G) return (gw, gh), reals, labels, latents #---------------------------------------------------------------------------- # Just-in-time processing of training images before feeding them to the networks.