Python constants.BATCH_SIZE Examples

The following are 26 code examples of constants.BATCH_SIZE(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module constants , or try the search function .
Example #1
Source File: utils.py    From Video-frame-prediction-by-multi-scale-GAN with MIT License 6 votes vote down vote up
def get_train_batch():
	"""
	Loads c.BATCH_SIZE clips from the database of preprocessed training clips.

	@return: An array of shape
			[c.BATCH_SIZE, c.TRAIN_HEIGHT, c.TRAIN_WIDTH, (3 * (c.HIST_LEN + 1))].
	"""
	clips = np.empty([c.BATCH_SIZE, (3 * (c.HIST_LEN + 1)),c.TRAIN_HEIGHT, c.TRAIN_WIDTH],
					 dtype=np.float32)

	print('batchsize', c.BATCH_SIZE)
	print('test dir clips', c.TRAIN_DIR_CLIPS)
	# for i in xrange(c.BATCH_SIZE):
	for i in range(c.BATCH_SIZE):
		path = c.TRAIN_DIR_CLIPS + str(np.random.choice(c.NUM_CLIPS - 1)) + '.npz'
		print('path:', path)
		clip = np.load(path)['arr_0']

		clips[i] = clip

	return clips 
Example #2
Source File: runner.py    From encore.ai with MIT License 6 votes vote down vote up
def train(self):
        """
        Runs a training loop on the model.
        """
        while True:
            inputs, targets = self.data_reader.get_train_batch(c.BATCH_SIZE, c.SEQ_LEN)
            print 'Training model...'

            feed_dict = {self.model.inputs: inputs, self.model.targets: targets}
            global_step, loss, _ = self.sess.run([self.model.global_step,
                                                  self.model.loss,
                                                  self.model.train_op],
                                                 feed_dict=feed_dict)

            print 'Step: %d | loss: %f' % (global_step, loss)
            if global_step % c.MODEL_SAVE_FREQ == 0:
                print 'Saving model...'
                self.saver.save(self.sess, join(c.MODEL_SAVE_DIR, self.artist_name + '.ckpt'),
                                global_step=global_step) 
Example #3
Source File: batcher_test.py    From bootcamp with Apache License 2.0 6 votes vote down vote up
def main2():
    num_utterances_per_speaker = 50
    num_speakers = 100
    num_samples = num_speakers * num_utterances_per_speaker
    kx_train = np.zeros(shape=(num_samples, 32, 64, 1))
    ky_train = np.zeros(shape=(num_samples, num_speakers))
    for i in range(num_samples):
        speaker_id = i % num_speakers
        ky_train[i, speaker_id] = 1
        kx_train[i] = speaker_id
    kx_test = np.array(kx_train)
    ky_test = np.array(ky_train)

    tpshn = TripletBatcherSelectHardNegatives(kx_train, ky_train, kx_test, ky_test, None)
    tp = TripletBatcher(kx_train, ky_train, kx_test, ky_test)
    avg = []
    avg2 = []
    while True:
        bx, by = tp.get_batch(BATCH_SIZE, is_test=False)
        avg.append(float(triplet_loss.deep_speaker_loss(predict(bx), predict(bx))))

        bx, by = tpshn.get_batch(BATCH_SIZE, is_test=False, predict=predict)
        avg2.append(float(triplet_loss.deep_speaker_loss(predict(bx), predict(bx))))

        print(np.mean(avg), np.mean(avg2)) 
Example #4
Source File: test_utils.py    From airdrop with Apache License 2.0 6 votes vote down vote up
def test_recover_sent_airdrops(web3, prepared_contracts, transactions, signed, airdrops,
                               creator):
    """
    Assuming partially sent airdrops, when there's need to sign transactions again
    e.g. when it turned out that too little gas was allowed (unlikely)
    """
    airdropper, omg_token = prepared_contracts

    Sender(web3).send_transactions(signed[:1], transactions[:1])

    # airdrop partially done by now
    check_entirely_airdropped(airdrops[0:BATCH_SIZE], omg_token)

    not_airdropped = Sender(web3).recover_unsent_airdrops(airdrops, signed, airdropper, omg_token)

    assert not_airdropped == airdrops[BATCH_SIZE:]

    unsigned = creator.create_txs(not_airdropped, BATCH_SIZE)
    new_signed = Signer(web3).sign_transactions(unsigned)
    Sender(web3).send_transactions(new_signed, unsigned)

    check_entirely_airdropped(airdrops, omg_token) 
Example #5
Source File: runner.py    From models with Apache License 2.0 6 votes vote down vote up
def train(self):
        """
        Runs a training loop on the model.
        """
        while True:
            inputs, targets = self.data_reader.get_train_batch(c.BATCH_SIZE, c.SEQ_LEN)
            print 'Training model...'

            feed_dict = {self.model.inputs: inputs, self.model.targets: targets}
            global_step, loss, _ = self.sess.run([self.model.global_step,
                                                  self.model.loss,
                                                  self.model.train_op],
                                                 feed_dict=feed_dict)

            print 'Step: %d | loss: %f' % (global_step, loss)
            if global_step % c.MODEL_SAVE_FREQ == 0:
                print 'Saving model...'
                self.saver.save(self.sess, join(c.MODEL_SAVE_DIR, self.artist_name + '.ckpt'),
                                global_step=global_step) 
Example #6
Source File: create_txs.py    From airdrop with Apache License 2.0 6 votes vote down vote up
def create_txs(ipc_path, rpc_host, rpc_port, signer_addr, airdropper_addr, omgtoken_addr, verify_eth,
               processed_file, unsigned_file):

    if ipc_path and (rpc_host or rpc_port):
        raise Exception("both ipc and rpc cannot be specified")
    if ipc_path:
        web3 = Web3(IPCProvider(ipc_path))
    else:
        web3 = Web3(RPCProvider(host=rpc_host,
                                port=rpc_port))

    airdropper, omgToken = get_contracts(web3,
                                         airdropper_addr=airdropper_addr,
                                         omgtoken_addr=omgtoken_addr)

    creator = Creator(signer_addr, airdropper, omgToken, GAS_LIMIT, GAS_PRICE, GAS_RESERVE,
                      verify_eth=verify_eth)

    airdrops = json.loads(processed_file.read())

    unsigned = creator.create_txs(airdrops, BATCH_SIZE)

    unsigned_file.write(json.dumps(unsigned, sort_keys=True)) 
Example #7
Source File: utils.py    From Adversarial_Video_Generation with MIT License 6 votes vote down vote up
def get_train_batch():
    """
    Loads c.BATCH_SIZE clips from the database of preprocessed training clips.

    @return: An array of shape
            [c.BATCH_SIZE, c.TRAIN_HEIGHT, c.TRAIN_WIDTH, (3 * (c.HIST_LEN + 1))].
    """
    clips = np.empty([c.BATCH_SIZE, c.TRAIN_HEIGHT, c.TRAIN_WIDTH, (3 * (c.HIST_LEN + 1))],
                     dtype=np.float32)
    for i in xrange(c.BATCH_SIZE):
        path = c.TRAIN_DIR_CLIPS + str(np.random.choice(c.NUM_CLIPS)) + '.npz'
        clip = np.load(path)['arr_0']

        clips[i] = clip

    return clips 
Example #8
Source File: test_contract.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_list_processing_and_cost(token, airdropper, chain, minted_and_credited):
    beneficiaries = [urandom(20) for _ in xrange(BATCH_SIZE)]
    txn_hash = airdropper.transact().multisend(token.address,
                                               beneficiaries,
                                               [LARGEST_AMOUNT] * len(beneficiaries))

    peracc = chain.web3.eth.getTransactionReceipt(txn_hash)['gasUsed'] / len(beneficiaries)
    for account in beneficiaries:
        assert token.call().balanceOf(account) == LARGEST_AMOUNT

    assert peracc <= 33000  # golden number 
Example #9
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_check_amount_before_send(web3, creator, airdrops, signed):
    """
    as above
    """
    airdrops[0][1] += 1
    different_transactions = creator.create_txs(airdrops, BATCH_SIZE)

    with pytest.raises(AirdropException):
        Sender(web3).send_transactions(signed, different_transactions) 
Example #10
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_check_address_before_send(web3, creator, airdrops, signed):
    """
    Tests whether the final check throws, in case local data differs from signed transactions
    """
    airdrops[0][0] = web3.eth.accounts[0]
    different_transactions = creator.create_txs(airdrops, BATCH_SIZE)

    with pytest.raises(AirdropException):
        Sender(web3).send_transactions(signed, different_transactions) 
Example #11
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_disaster_recovery(web3, prepared_contracts, transactions, signed, airdrops):
    """
    Assuming transactions got sent partially, are we able to resume with confidence?
    """
    _, omg_token = prepared_contracts

    unsent, unsent_unsigned = Sender(web3).recover_unsent(signed, transactions)

    assert unsent == signed
    assert unsent_unsigned == transactions

    Sender(web3).send_transactions(signed[:1], transactions[:1])

    # airdrop partially done by now
    check_entirely_airdropped(airdrops[0:BATCH_SIZE], omg_token)

    # recovery
    unsent, unsent_unsigned = Sender(web3).recover_unsent(signed, transactions)

    assert len(unsent) == 1
    assert len(unsent_unsigned) == 1
    assert unsent[0] == signed[1]
    assert unsent_unsigned[0] == transactions[1]

    Sender(web3).send_transactions(unsent, unsent_unsigned)

    check_entirely_airdropped(airdrops, omg_token) 
Example #12
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_verifiable_eth_account(web3, prepared_contracts, airdrops, mocker):
    """
    Should check that when the eth balance at 3988888 mandates an airdrop, the creation succeeds
    """
    airdropper, omg_token = prepared_contracts

    mocker.patch('web3.eth.Eth.getBalance')

    web3module.eth.Eth.getBalance.side_effect = [4274999801259164787792424L]
    creator = Creator(web3.eth.accounts[0], airdropper, omg_token, GAS_LIMIT, GAS_PRICE, GAS_RESERVE,
                      verify_eth=True)

    creator.create_txs(airdrops[:1], BATCH_SIZE) 
Example #13
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_gas_limit_makes_sense():
    assert theoretical_gas(BATCH_SIZE) < GAS_LIMIT
    assert theoretical_gas(BATCH_SIZE) >= GAS_LIMIT * 0.9 
Example #14
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_gas_expenses(creator, airdrops):
    """
    Tests whether too expensive/too cheap batches are picked up during creation
    """

    with pytest.raises(AirdropException):
        creator.create_txs(airdrops, BATCH_SIZE * 2)
    with pytest.raises(AirdropException):
        creator.create_txs(airdrops, BATCH_SIZE / 2) 
Example #15
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_batch_endings(creator, airdrops):
    """
    Makes sure that the last batch isn't missed
    """
    transactions = creator.create_txs(airdrops, BATCH_SIZE)

    assert len(transactions[0]['rawBatch']) == BATCH_SIZE
    assert len(transactions[1]['rawBatch']) == len(airdrops) - BATCH_SIZE
    assert len(transactions) == 2 
Example #16
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_small_flow(web3, prepared_contracts, creator, airdrops):
    _, omg_token = prepared_contracts

    transactions = creator.create_txs(airdrops, BATCH_SIZE)
    signed = Signer(web3).sign_transactions(transactions)
    Sender(web3).send_transactions(signed, transactions)

    check_entirely_airdropped(airdrops, omg_token) 
Example #17
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_entire_flow(web3, prepared_contracts, creator, input_file):

    airdropper, omg_token = prepared_contracts
    airdrops = process(input_file.read())
    transactions = creator.create_txs(airdrops, BATCH_SIZE)

    # this being a long-running test, the unlocking from web3 fixture might have expired
    web3.personal.unlockAccount(web3.eth.accounts[0], "")

    signed = Signer(web3).sign_transactions(transactions)
    Sender(web3).send_transactions(signed, transactions)

    check_entirely_airdropped(airdrops, omg_token) 
Example #18
Source File: test_utils.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def airdrops():
    """
    uses a pre-prepared json file with processed airdrops (see README.md)

    it is also a truncated list of airdrops, just enough for 2 uneven transactions
    """

    with open("data/processed.json") as f:
        airdrops = json.loads(f.read())

    return airdrops[0:BATCH_SIZE + 10] 
Example #19
Source File: test_contract.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def test_flow(token, airdropper, chain, accounts, minted_and_credited):

    txn_hash = airdropper.transact().multisend(token.address, accounts[1:2], [10])
    chain.wait.for_receipt(txn_hash)

    # return to owner
    remainder = token.call().balanceOf(airdropper.address)
    txn_hash = airdropper.transact().multisend(token.address, [accounts[0]], [remainder])
    chain.wait.for_receipt(txn_hash)

    assert token.call().balanceOf(accounts[0]) == BATCH_SIZE * LARGEST_AMOUNT - 10
    assert token.call().balanceOf(accounts[1]) == 10
    assert token.call().balanceOf(airdropper.address) == 0 
Example #20
Source File: test_contract.py    From airdrop with Apache License 2.0 5 votes vote down vote up
def minted_and_credited(token, airdropper, chain, accounts):
    txn_hash = token.transact().mint(accounts[0], BATCH_SIZE * LARGEST_AMOUNT)
    chain.wait.for_receipt(txn_hash)

    txn_hash = token.transact().transfer(airdropper.address, BATCH_SIZE * LARGEST_AMOUNT)
    chain.wait.for_receipt(txn_hash) 
Example #21
Source File: save_samples.py    From models with Apache License 2.0 5 votes vote down vote up
def save(artist, model_path, num_save):
    sample_save_dir = c.get_dir('../save/samples/')
    sess = tf.Session()

    print artist

    data_reader = DataReader(artist)
    vocab = data_reader.get_vocab()

    print 'Init model...'
    model = LSTMModel(sess,
                      vocab,
                      c.BATCH_SIZE,
                      c.SEQ_LEN,
                      c.CELL_SIZE,
                      c.NUM_LAYERS,
                      test=True)

    saver = tf.train.Saver()
    sess.run(tf.initialize_all_variables())

    saver.restore(sess, model_path)
    print 'Model restored from ' + model_path

    artist_save_dir = c.get_dir(join(sample_save_dir, artist))
    for i in xrange(num_save):
        print i

        path = join(artist_save_dir, str(i) + '.txt')
        sample = model.generate()
        processed_sample = process_sample(sample)

        with open(path, 'w') as f:
            f.write(processed_sample) 
Example #22
Source File: batcher_test.py    From bootcamp with Apache License 2.0 5 votes vote down vote up
def main():
    select = True
    try:
        sys.argv[1]
    except:
        select = False
    print('select', select)

    working_dir = '/media/philippe/8TB/deep-speaker'
    # by construction this  losses should be much higher than the normal losses.
    # we select batches this way.
    batch_input_shape = [None, NUM_FRAMES, NUM_FBANKS, 1]
    print('Testing with the triplet losses.')
    dsm = DeepSpeakerModel(batch_input_shape, include_softmax=False)
    triplet_checkpoint = load_best_checkpoint(CHECKPOINTS_TRIPLET_DIR)
    pre_training_checkpoint = load_best_checkpoint(CHECKPOINTS_SOFTMAX_DIR)
    if triplet_checkpoint is not None:
        print(f'Loading triplet checkpoint: {triplet_checkpoint}.')
        dsm.m.load_weights(triplet_checkpoint)
    elif pre_training_checkpoint is not None:
        print(f'Loading pre-training checkpoint: {pre_training_checkpoint}.')
        # If `by_name` is True, weights are loaded into layers only if they share the
        # same name. This is useful for fine-tuning or transfer-learning models where
        # some of the layers have changed.
        dsm.m.load_weights(pre_training_checkpoint, by_name=True)
    dsm.m.compile(optimizer='adam', loss=deep_speaker_loss)
    kc = KerasFormatConverter(working_dir)
    if select:
        print('TripletBatcherSelectHardNegatives()')
        batcher = TripletBatcherSelectHardNegatives(kc.kx_train, kc.ky_train, kc.kx_test, kc.ky_test, dsm)
    else:
        print('TripletBatcher()')
        batcher = TripletBatcher(kc.kx_train, kc.ky_train, kc.kx_test, kc.ky_test)
    batch_size = BATCH_SIZE
    losses = []
    while True:
        _bx, _by = batcher.get_batch(batch_size, is_test=False)
        losses.append(dsm.m.evaluate(_bx, _by, verbose=0, batch_size=BATCH_SIZE))
        print(np.mean(losses)) 
Example #23
Source File: save_samples.py    From encore.ai with MIT License 5 votes vote down vote up
def save(artist, model_path, num_save):
    sample_save_dir = c.get_dir('../save/samples/')
    sess = tf.Session()

    print artist

    data_reader = DataReader(artist)
    vocab = data_reader.get_vocab()

    print 'Init model...'
    model = LSTMModel(sess,
                      vocab,
                      c.BATCH_SIZE,
                      c.SEQ_LEN,
                      c.CELL_SIZE,
                      c.NUM_LAYERS,
                      test=True)

    saver = tf.train.Saver()
    sess.run(tf.initialize_all_variables())

    saver.restore(sess, model_path)
    print 'Model restored from ' + model_path

    artist_save_dir = c.get_dir(join(sample_save_dir, artist))
    for i in xrange(num_save):
        print i

        path = join(artist_save_dir, str(i) + '.txt')
        sample = model.generate()
        processed_sample = process_sample(sample)

        with open(path, 'w') as f:
            f.write(processed_sample) 
Example #24
Source File: avg_runner.py    From Adversarial_Video_Generation with MIT License 5 votes vote down vote up
def test(self):
        """
        Runs one test step on the generator network.
        """
        batch = get_test_batch(c.BATCH_SIZE, num_rec_out=self.num_test_rec)
        self.g_model.test_batch(
            batch, self.global_step, num_rec_out=self.num_test_rec) 
Example #25
Source File: runner.py    From models with Apache License 2.0 4 votes vote down vote up
def __init__(self, model_load_path, artist_name, test, prime_text):
        """
        Initializes the Lyric Generation Runner.

        @param model_load_path: The path from which to load a previously-saved model.
                                Default = None.
        @param artist_name: The name of the artist on which to train. (Used to grab data).
                            Default = 'kanye_west'
        @param test: Whether to test or train the model. Testing generates a sequence from the
                     provided model and artist. Default = False.
        @param prime_text: The text with which to start the test sequence.
        """

        self.sess = tf.Session()
        self.artist_name = artist_name

        print 'Process data...'
        self.data_reader = DataReader(self.artist_name)
        self.vocab = self.data_reader.get_vocab()

        print 'Init model...'
        self.model = LSTMModel(self.sess,
                               self.vocab,
                               c.BATCH_SIZE,
                               c.SEQ_LEN,
                               c.CELL_SIZE,
                               c.NUM_LAYERS,
                               test=test)

        print 'Init variables...'
        self.saver = tf.train.Saver(max_to_keep=None)
        self.sess.run(tf.global_variables_initializer())

        # if load path specified, load a saved model
        if model_load_path is not None:
            self.saver.restore(self.sess, model_load_path)
            print 'Model restored from ' + model_load_path


        if test:
            self.test(prime_text)
        else:
            self.train() 
Example #26
Source File: runner.py    From encore.ai with MIT License 4 votes vote down vote up
def __init__(self, model_load_path, artist_name, test, prime_text):
        """
        Initializes the Lyric Generation Runner.

        @param model_load_path: The path from which to load a previously-saved model.
                                Default = None.
        @param artist_name: The name of the artist on which to train. (Used to grab data).
                            Default = 'kanye_west'
        @param test: Whether to test or train the model. Testing generates a sequence from the
                     provided model and artist. Default = False.
        @param prime_text: The text with which to start the test sequence.
        """

        self.sess = tf.Session()
        self.artist_name = artist_name

        print 'Process data...'
        self.data_reader = DataReader(self.artist_name)
        self.vocab = self.data_reader.get_vocab()

        print 'Init model...'
        self.model = LSTMModel(self.sess,
                               self.vocab,
                               c.BATCH_SIZE,
                               c.SEQ_LEN,
                               c.CELL_SIZE,
                               c.NUM_LAYERS,
                               test=test)

        print 'Init variables...'
        self.saver = tf.train.Saver(max_to_keep=None)
        self.sess.run(tf.global_variables_initializer())

        # if load path specified, load a saved model
        if model_load_path is not None:
            self.saver.restore(self.sess, model_load_path)
            print 'Model restored from ' + model_load_path


        if test:
            self.test(prime_text)
        else:
            self.train()