Python random.random() Examples

The following are 30 code examples of random.random(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module random , or try the search function .
Example #1
Source File: hiv.py    From indras_net with GNU General Public License v3.0 7 votes vote down vote up
def __init__(self, name, infected, infection_length, initiative,
                 coupling_tendency, condom_use, test_frequency, commitment,
                 coupled=False, coupled_length=0, known=False, partner=None):
        init_state = random.randint(0, 3)
        super().__init__(name, "wandering around", NSTATES, init_state)
        self.coupled = coupled
        self.couple_length = coupled_length
        self.partner = partner
        self.initiative = initiative
        self.infected = infected
        self.known = known
        self.infection_length = infection_length
        self.coupling_tendency = coupling_tendency
        self.condom_use = condom_use
        self.test_frequency = test_frequency
        self.commitment = commitment
        self.state = init_state
        self.update_ntype() 
Example #2
Source File: Constraint.py    From fullrmc with GNU Affero General Public License v3.0 6 votes vote down vote up
def should_step_get_rejected(self, standardError):
        """
        Given a standard error, return whether to keep or reject new
        standard error according to the constraint reject probability.

        :Parameters:
            #. standardError (number): The standard error to compare with
            the Constraint standard error

        :Return:
            #. result (boolean): True to reject step, False to accept
        """
        if self.standardError is None:
            raise Exception(LOGGER.error("must compute data first"))
        if standardError<=self.standardError:
            return False
        return randfloat() < self.__rejectProbability 
Example #3
Source File: anneal.py    From simulated-annealing-tsp with MIT License 6 votes vote down vote up
def anneal(self):
        """
        Execute simulated annealing algorithm.
        """
        # Initialize with the greedy solution.
        self.cur_solution, self.cur_fitness = self.initial_solution()

        print("Starting annealing.")
        while self.T >= self.stopping_temperature and self.iteration < self.stopping_iter:
            candidate = list(self.cur_solution)
            l = random.randint(2, self.N - 1)
            i = random.randint(0, self.N - l)
            candidate[i : (i + l)] = reversed(candidate[i : (i + l)])
            self.accept(candidate)
            self.T *= self.alpha
            self.iteration += 1

            self.fitness_list.append(self.cur_fitness)

        print("Best fitness obtained: ", self.best_fitness)
        improvement = 100 * (self.fitness_list[0] - self.best_fitness) / (self.fitness_list[0])
        print(f"Improvement over greedy heuristic: {improvement : .2f}%") 
Example #4
Source File: util.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def estimate_density(DATA_PATH, feature_size):
    """sample 10 times of a size of 1000 for estimating the density of the sparse dataset"""
    if not os.path.exists(DATA_PATH):
        raise Exception("Data is not there!")
    density = []
    P = 0.01
    for _ in range(10):
        num_non_zero = 0
        num_sample = 0
        with open(DATA_PATH) as f:
            for line in f:
                if (random.random() < P):
                    num_non_zero += len(line.split(" ")) - 1
                    num_sample += 1
        density.append(num_non_zero * 1.0 / (feature_size * num_sample))
    return sum(density) / len(density) 
Example #5
Source File: anneal.py    From simulated-annealing-tsp with MIT License 6 votes vote down vote up
def initial_solution(self):
        """
        Greedy algorithm to get an initial solution (closest-neighbour).
        """
        cur_node = random.choice(self.nodes)  # start from a random node
        solution = [cur_node]

        free_nodes = set(self.nodes)
        free_nodes.remove(cur_node)
        while free_nodes:
            next_node = min(free_nodes, key=lambda x: self.dist(cur_node, x))  # nearest neighbour
            free_nodes.remove(next_node)
            solution.append(next_node)
            cur_node = next_node

        cur_fit = self.fitness(solution)
        if cur_fit < self.best_fitness:  # If best found so far, update best fitness
            self.best_fitness = cur_fit
            self.best_solution = solution
        self.fitness_list.append(cur_fit)
        return solution, cur_fit 
Example #6
Source File: input_helpers.py    From deep-siamese-text-similarity with MIT License 6 votes vote down vote up
def batch_iter(self, data, batch_size, num_epochs, shuffle=True):
        """
        Generates a batch iterator for a dataset.
        """
        data = np.asarray(data)
        print(data)
        print(data.shape)
        data_size = len(data)
        num_batches_per_epoch = int(len(data)/batch_size) + 1
        for epoch in range(num_epochs):
            # Shuffle the data at each epoch
            if shuffle:
                shuffle_indices = np.random.permutation(np.arange(data_size))
                shuffled_data = data[shuffle_indices]
            else:
                shuffled_data = data
            for batch_num in range(num_batches_per_epoch):
                start_index = batch_num * batch_size
                end_index = min((batch_num + 1) * batch_size, data_size)
                yield shuffled_data[start_index:end_index] 
Example #7
Source File: input_helpers.py    From deep-siamese-text-similarity with MIT License 6 votes vote down vote up
def getTsvData(self, filepath):
        print("Loading training data from "+filepath)
        x1=[]
        x2=[]
        y=[]
        # positive samples from file
        for line in open(filepath):
            l=line.strip().split("\t")
            if len(l)<2:
                continue
            if random() > 0.5:
                x1.append(l[0].lower())
                x2.append(l[1].lower())
            else:
                x1.append(l[1].lower())
                x2.append(l[0].lower())
            y.append(int(l[2]))
        return np.asarray(x1),np.asarray(x2),np.asarray(y) 
Example #8
Source File: el_farol.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def discourage(unwanted):
    """
    Discourages extra drinkers from going to the bar by decreasing motivation.
    Chooses drinkers randomly from the drinkers that went to the bar.
    """
    discouraged = 0
    drinkers = get_group(DRINKERS)
    while unwanted:
        if DEBUG:
            user_tell("The members are: " + drinkers.members)
        rand_name = random.choice(list(drinkers.members))
        rand_agent = drinkers[rand_name]

        if DEBUG:
            user_tell("drinker ", rand_agent, " = "
                      + repr(drinkers[rand_agent]))

        rand_agent[MOTIV] = max(rand_agent[MOTIV] - DISC_AMT,
                                MIN_MOTIV)
        discouraged += 1
        unwanted -= 1
    return discouraged 
Example #9
Source File: train.py    From deep-siamese-text-similarity with MIT License 6 votes vote down vote up
def train_step(x1_batch, x2_batch, y_batch):
        """
        A single training step
        """
        if random()>0.5:
            feed_dict = {
                siameseModel.input_x1: x1_batch,
                siameseModel.input_x2: x2_batch,
                siameseModel.input_y: y_batch,
                siameseModel.dropout_keep_prob: FLAGS.dropout_keep_prob,
            }
        else:
            feed_dict = {
                siameseModel.input_x1: x2_batch,
                siameseModel.input_x2: x1_batch,
                siameseModel.input_y: y_batch,
                siameseModel.dropout_keep_prob: FLAGS.dropout_keep_prob,
            }
        _, step, loss, accuracy, dist, sim, summaries = sess.run([tr_op_set, global_step, siameseModel.loss, siameseModel.accuracy, siameseModel.distance, siameseModel.temp_sim, train_summary_op],  feed_dict)
        time_str = datetime.datetime.now().isoformat()
        print("TRAIN {}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
        train_summary_writer.add_summary(summaries, step)
        print(y_batch, dist, sim) 
Example #10
Source File: video_transforms.py    From DDPAE-video-prediction with MIT License 6 votes vote down vote up
def __call__(self, video):
    """
    Args:
        video (np.ndarray): Video to be cropped.
    Returns:
        np.ndarray: Cropped video.
    """
    if self.padding > 0:
      pad = Pad(self.padding, 0)
      video = pad(video)

    w, h = video.shape[-2], video.shape[-3]
    th, tw = self.size
    if w == tw and h == th:
      return video

    x1 = random.randint(0, w-tw)
    y1 = random.randint(0, h-th)
    return video[..., y1:y1+th, x1:x1+tw, :] 
Example #11
Source File: video_transforms.py    From DDPAE-video-prediction with MIT License 6 votes vote down vote up
def __call__(self, video):
    for attempt in range(10):
      area = video.shape[-3]*video.shape[-2]
      target_area = random.uniform(0.08, 1.0)*area
      aspect_ratio = random.uniform(3./4, 4./3)

      w = int(round(math.sqrt(target_area*aspect_ratio)))
      h = int(round(math.sqrt(target_area/aspect_ratio)))

      if random.random() < 0.5:
        w, h = h, w

      if w <= video.shape[-2] and h <= video.shape[-3]:
        x1 = random.randint(0, video.shape[-2]-w)
        y1 = random.randint(0, video.shape[-3]-h)

        video = video[..., y1:y1+h, x1:x1+w, :]

        return resize(video, (self.size, self.size), self.interpolation)

    # Fallback
    scale = Scale(self.size, interpolation=self.interpolation)
    crop = CenterCrop(self.size)
    return crop(scale(video)) 
Example #12
Source File: estimator_utils.py    From EDeN with MIT License 6 votes vote down vote up
def make_train_test_sets(pos_graphs, neg_graphs,
                         test_proportion=.3, random_state=2):
    """make_train_test_sets."""
    random.seed(random_state)
    random.shuffle(pos_graphs)
    random.shuffle(neg_graphs)
    pos_dim = len(pos_graphs)
    neg_dim = len(neg_graphs)
    tr_pos_graphs = pos_graphs[:-int(pos_dim * test_proportion)]
    te_pos_graphs = pos_graphs[-int(pos_dim * test_proportion):]
    tr_neg_graphs = neg_graphs[:-int(neg_dim * test_proportion)]
    te_neg_graphs = neg_graphs[-int(neg_dim * test_proportion):]
    tr_graphs = tr_pos_graphs + tr_neg_graphs
    te_graphs = te_pos_graphs + te_neg_graphs
    tr_targets = [1] * len(tr_pos_graphs) + [0] * len(tr_neg_graphs)
    te_targets = [1] * len(te_pos_graphs) + [0] * len(te_neg_graphs)
    tr_graphs, tr_targets = paired_shuffle(tr_graphs, tr_targets)
    te_graphs, te_targets = paired_shuffle(te_graphs, te_targets)
    return (tr_graphs, np.array(tr_targets)), (te_graphs, np.array(te_targets)) 
Example #13
Source File: moving_mnist.py    From DDPAE-video-prediction with MIT License 6 votes vote down vote up
def __getitem__(self, idx):
    length = self.n_frames_input + self.n_frames_output
    if self.is_train or self.num_objects[0] != 2:
      # Sample number of objects
      num_digits = random.choice(self.num_objects)
      # Generate data on the fly
      images = self.generate_moving_mnist(num_digits)
    else:
      images = self.dataset[:, idx, ...]

    if self.transform is not None:
      images = self.transform(images)
    input = images[:self.n_frames_input]
    if self.n_frames_output > 0:
      output = images[self.n_frames_input:length]
    else:
      output = []

    return input, output 
Example #14
Source File: Utility.py    From fuku-ml with MIT License 6 votes vote down vote up
def random_projection(X):

        data_demension = X.shape[1]

        new_data_demension = random.randint(2, data_demension)

        new_X = np.empty((data_demension, new_data_demension))

        minus_one = 0.1
        positive_one = 0.9

        for i in range(len(new_X)):
            for j in range(len(new_X[i])):
                rand = random.random()
                if rand < minus_one:
                    new_X[i][j] = -1.0
                elif rand >= positive_one:
                    new_X[i][j] = 1.0
                else:
                    new_X[i][j] = 0.0

        new_X = np.inner(X, new_X.T)

        return new_X 
Example #15
Source File: zmirror.py    From zmirror with MIT License 6 votes vote down vote up
def generate_ip_verify_hash(input_dict):
    """
    生成一个标示用户身份的hash
    在 human_ip_verification 功能中使用
    hash一共14位
    hash(前7位+salt) = 后7位 以此来进行验证
    :rtype str
    """
    strbuff = human_ip_verification_answers_hash_str
    for key in input_dict:
        strbuff += key + input_dict[key] + str(random.randint(0, 9000000))
    input_key_hash = hex(zlib.adler32(strbuff.encode(encoding='utf-8')))[2:]
    while len(input_key_hash) < 7:
        input_key_hash += '0'
    output_hash = hex(zlib.adler32((input_key_hash + human_ip_verification_answers_hash_str).encode(encoding='utf-8')))[2:]
    while len(output_hash) < 7:
        output_hash += '0'
    return input_key_hash + output_hash 
Example #16
Source File: image.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def hard_reset(self):
        """Resets the iterator and ignore roll over data"""
        if self.seq is not None and self.shuffle:
            random.shuffle(self.seq)
        if self.imgrec is not None:
            self.imgrec.reset()
        self.cur = 0
        self._allow_read = True
        self._cache_data = None
        self._cache_label = None
        self._cache_idx = None 
Example #17
Source File: transform.py    From DeepLab_v3_plus with MIT License 5 votes vote down vote up
def __call__(self, sample):
        img = sample['image']
        mask = sample['label']
        assert img.size == mask.size

        #w = int(random.uniform(0.8, 2.5) * img.size[0])
        #h = int(random.uniform(0.8, 2.5) * img.size[1])
        scale = random.uniform(0.8, 2.5)
        w = int(scale * img.size[0])
        h = int(scale * img.size[1])

        img, mask = img.resize((w, h), Image.BILINEAR), mask.resize((w, h), Image.NEAREST)
        sample = {'image': img, 'label': mask}

        return self.crop(self.scale(sample)) 
Example #18
Source File: image.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def __call__(self, src):
        """Augmenter body"""
        if random.random() < self.p:
            src = nd.dot(src, self.mat)
        return src 
Example #19
Source File: image.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def reset(self):
        """Resets the iterator to the beginning of the data."""
        if self.seq is not None and self.shuffle:
            random.shuffle(self.seq)
        if self.last_batch_handle != 'roll_over' or \
            self._cache_data is None:
            if self.imgrec is not None:
                self.imgrec.reset()
            self.cur = 0
            if self._allow_read is False:
                self._allow_read = True 
Example #20
Source File: image.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def __call__(self, src):
        """Augmenter body"""
        if random.random() < self.p:
            src = nd.flip(src, axis=1)
        return src 
Example #21
Source File: vis.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def vis_detection(im_orig, detections, class_names, thresh=0.7):
    """visualize [cls, conf, x1, y1, x2, y2]"""
    import matplotlib.pyplot as plt
    import random
    plt.imshow(im_orig)
    colors = [(random.random(), random.random(), random.random()) for _ in class_names]
    for [cls, conf, x1, y1, x2, y2] in detections:
        cls = int(cls)
        if cls > 0 and conf > thresh:
            rect = plt.Rectangle((x1, y1), x2 - x1, y2 - y1,
                                 fill=False, edgecolor=colors[cls], linewidth=3.5)
            plt.gca().add_patch(rect)
            plt.gca().text(x1, y1 - 2, '{:s} {:.3f}'.format(class_names[cls], conf),
                           bbox=dict(facecolor=colors[cls], alpha=0.5), fontsize=12, color='white')
    plt.show() 
Example #22
Source File: transform.py    From DeepLab_v3_plus with MIT License 5 votes vote down vote up
def __call__(self, sample):
        img = sample['image']
        mask = sample['label']
        rotate_degree = random.random() * 2 * self.degree - self.degree
        img = img.rotate(rotate_degree, Image.BILINEAR)
        mask = mask.rotate(rotate_degree, Image.NEAREST)

        return {'image': img,
                'label': mask} 
Example #23
Source File: transform.py    From DeepLab_v3_plus with MIT License 5 votes vote down vote up
def __call__(self, sample):
        img = sample['image']
        mask = sample['label']
        assert img.size == mask.size
        for attempt in range(10):
            area = img.size[0] * img.size[1]
            target_area = random.uniform(0.45, 1.0) * area
            aspect_ratio = random.uniform(0.5, 2)

            w = int(round(math.sqrt(target_area * aspect_ratio)))
            h = int(round(math.sqrt(target_area / aspect_ratio)))

            if random.random() < 0.5:
                w, h = h, w

            if w <= img.size[0] and h <= img.size[1]:
                x1 = random.randint(0, img.size[0] - w)
                y1 = random.randint(0, img.size[1] - h)

                img = img.crop((x1, y1, x1 + w, y1 + h))
                mask = mask.crop((x1, y1, x1 + w, y1 + h))
                assert (img.size == (w, h))

                img = img.resize((self.size, self.size), Image.BILINEAR)
                mask = mask.resize((self.size, self.size), Image.NEAREST)

                return {'image': img,
                        'label': mask}

        # Fallback
        scale = Scale(self.size)
        crop = CenterCrop(self.size)
        sample = crop(scale(sample))
        return sample 
Example #24
Source File: transform.py    From DeepLab_v3_plus with MIT License 5 votes vote down vote up
def __call__(self, sample):
        img = sample['image']
        mask = sample['label']
        if random.random() < 0.5:
            img = img.transpose(Image.FLIP_LEFT_RIGHT)
            mask = mask.transpose(Image.FLIP_LEFT_RIGHT)

        return {'image': img,
                'label': mask} 
Example #25
Source File: transform.py    From DeepLab_v3_plus with MIT License 5 votes vote down vote up
def __call__(self, sample):
        img, mask = sample['image'], sample['label']

        if self.padding > 0:
            img = ImageOps.expand(img, border=self.padding, fill=0)
            mask = ImageOps.expand(mask, border=self.padding, fill=0)

        assert img.size == mask.size
        w, h = img.size
        th, tw = self.size # target size
        if w == tw and h == th:
            return {'image': img,
                    'label': mask}

        if w < tw or h < th:
            img = img.resize((tw, th), Image.BILINEAR)
            mask = mask.resize((tw, th), Image.NEAREST)
            return {'image': img,
                    'label': mask}

        x1 = random.randint(0, w - tw)
        y1 = random.randint(0, h - th)
        img = img.crop((x1, y1, x1 + tw, y1 + th))
        mask = mask.crop((x1, y1, x1 + tw, y1 + th))

        return {'image': img,
                'label': mask} 
Example #26
Source File: Utility.py    From fuku-ml with MIT License 5 votes vote down vote up
def excute(self):

        for model in self.models:

            avg_error = 0

            validate_num = int(math.ceil(len(model.train_Y) / 10))

            model.train_Y = np.reshape(model.train_Y, (-1, 1))
            dataset = np.concatenate((model.train_X, model.train_Y), axis=1)
            np.random.shuffle(dataset)

            error = 0

            for i in range(10):

                model.train_X = np.concatenate((dataset[(i + 1) * validate_num:, :-1], dataset[:i * validate_num, :-1]), axis=0)
                model.train_Y = np.concatenate((dataset[(i + 1) * validate_num:, -1], dataset[:i * validate_num, -1]), axis=0)
                model.init_W()
                model.train()
                validate_X = dataset[i * validate_num:(i + 1) * validate_num, :-1]
                validate_Y = dataset[i * validate_num:(i + 1) * validate_num, -1]

                if hasattr(model, 'class_list'):
                    error = error + model.calculate_avg_error_all_class(validate_X, validate_Y, model.W)
                else:
                    error = error + model.calculate_avg_error(validate_X, validate_Y, model.W)

            model.train_X = dataset[:, :-1]
            model.train_Y = dataset[:, -1]

            dataset = None
            avg_error = error / 10
            self.avg_errors.append(avg_error)

        return self.avg_errors 
Example #27
Source File: Utility.py    From fuku-ml with MIT License 5 votes vote down vote up
def gen_lin_separable_overlap_data():
        # generate training data in the 2-d case
        mean1 = np.array([0, 2])
        mean2 = np.array([2, 0])
        cov = np.array([[1.5, 1.0], [1.0, 1.5]])
        X1 = np.random.multivariate_normal(mean1, cov, 100)
        y1 = np.ones(len(X1))
        X2 = np.random.multivariate_normal(mean2, cov, 100)
        y2 = np.ones(len(X2)) * -1
        return X1, y1, X2, y2 
Example #28
Source File: Utility.py    From fuku-ml with MIT License 5 votes vote down vote up
def gen_non_lin_separable_data():
        mean1 = [-1, 2]
        mean2 = [1, -1]
        mean3 = [4, -4]
        mean4 = [-4, 4]
        cov = [[1.0, 0.8], [0.8, 1.0]]
        X1 = np.random.multivariate_normal(mean1, cov, 50)
        X1 = np.vstack((X1, np.random.multivariate_normal(mean3, cov, 50)))
        y1 = np.ones(len(X1))
        X2 = np.random.multivariate_normal(mean2, cov, 50)
        X2 = np.vstack((X2, np.random.multivariate_normal(mean4, cov, 50)))
        y2 = np.ones(len(X2)) * -1

        return X1, y1, X2, y2 
Example #29
Source File: Utility.py    From fuku-ml with MIT License 5 votes vote down vote up
def gen_lin_separable_data():
        # generate training data in the 2-d case
        mean1 = np.array([0, 2])
        mean2 = np.array([2, 0])
        cov = np.array([[0.8, 0.6], [0.6, 0.8]])
        X1 = np.random.multivariate_normal(mean1, cov, 100)
        y1 = np.ones(len(X1))
        X2 = np.random.multivariate_normal(mean2, cov, 100)
        y2 = np.ones(len(X2)) * -1
        return X1, y1, X2, y2 
Example #30
Source File: housekeeping.py    From open-sesame with Apache License 2.0 5 votes vote down vote up
def unk_replace_tokens(tokens, replaced, vocdict, unkprob, unktoken):
    """
    replaces singleton tokens in the train set with UNK with a probability UNK_PROB
    :param tokens: original token IDs
    :param replaced: replaced token IDs
    :return:
    """
    for t in tokens:
        if vocdict.is_singleton(t) and random.random() < unkprob:
            replaced.append(unktoken)
        else:
            replaced.append(t)