Python time.perf_counter() Examples

The following are 30 code examples for showing how to use time.perf_counter(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module time , or try the search function .

Example 1
Project: query-exporter   Author: albertodonato   File: db.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _setup_query_latency_tracking(self):
        engine = self._engine.sync_engine

        @event.listens_for(engine, "before_cursor_execute")
        def before_cursor_execute(
            conn, cursor, statement, parameters, context, executemany
        ):
            conn.info["query_start_time"] = perf_counter()

        @event.listens_for(engine, "after_cursor_execute")
        def after_cursor_execute(
            conn, cursor, statement, parameters, context, executemany
        ):
            conn.info["query_latency"] = perf_counter() - conn.info.pop(
                "query_start_time"
            ) 
Example 2
Project: AerialDetection   Author: dingjiansw101   File: geometry_test.py    License: Apache License 2.0 6 votes vote down vote up
def setUp(self):
        self.num_bboxes1 = 200
        self.num_bboxes2 = 20000
        self.image_width = 1024
        self.image_height = 1024

        self.bboxes1 = np.zeros([self.num_bboxes1, 4])
        self.bboxes1[:, [0, 2]] = np.sort(np.random.rand(self.num_bboxes1, 2) * (self.image_width - 1))
        self.bboxes1[:, [1, 3]] = np.sort(np.random.rand(self.num_bboxes1, 2) * (self.image_height - 1))

        self.bboxes2 = np.zeros([self.num_bboxes2, 4])
        self.bboxes2[:, [0, 2]] = np.sort(np.random.rand(self.num_bboxes2, 2) * (self.image_width - 1))
        self.bboxes2[:, [1, 3]] = np.sort(np.random.rand(self.num_bboxes2, 2) * (self.image_height - 1))

        self.bboxes1_tensor = torch.from_numpy(self.bboxes1)
        self.bboxes2_tensor = torch.from_numpy(self.bboxes2)

        start = time.perf_counter()
        self.ious = bbox_overlaps(self.bboxes1_tensor, self.bboxes2_tensor).numpy()
        elapsed = (time.perf_counter() - start)
        print('bbox_overlaps time: ', elapsed) 
Example 3
Project: entmax   Author: deep-spin   File: bench_grad_alpha.py    License: MIT License 6 votes vote down vote up
def bench(f_):
    timings_fwd = []
    timings_bck = []
    for _ in range(100):

        with f_ as f:
            tic = time.perf_counter()
            f.forward()
            torch.cuda.synchronize()
            toc = time.perf_counter()
            timings_fwd.append(toc - tic)

            tic = time.perf_counter()
            f.backward()
            torch.cuda.synchronize()
            toc = time.perf_counter()
            timings_bck.append(toc - tic)

    return (np.percentile(timings_fwd, [25, 50, 75]),
            np.percentile(timings_bck, [25, 50, 75])) 
Example 4
Project: discord.py   Author: Rapptz   File: gateway.py    License: MIT License 6 votes vote down vote up
def __init__(self, *args, **kwargs):
        ws = kwargs.pop('ws', None)
        interval = kwargs.pop('interval', None)
        shard_id = kwargs.pop('shard_id', None)
        threading.Thread.__init__(self, *args, **kwargs)
        self.ws = ws
        self._main_thread_id = ws.thread_id
        self.interval = interval
        self.daemon = True
        self.shard_id = shard_id
        self.msg = 'Keeping websocket alive with sequence %s.'
        self.block_msg = 'Heartbeat blocked for more than %s seconds.'
        self.behind_msg = 'Can\'t keep up, websocket is %.1fs behind.'
        self._stop_ev = threading.Event()
        self._last_ack = time.perf_counter()
        self._last_send = time.perf_counter()
        self.latency = float('inf')
        self.heartbeat_timeout = ws._max_heartbeat_timeout 
Example 5
Project: news-popularity-prediction   Author: MKLab-ITI   File: utility.py    License: Apache License 2.0 6 votes vote down vote up
def youtube_daemon_worker(id, youtube_queue, social_context_queue, youtube_module_communication, youtube_oauth_credentials_folder):
    while True:
        if social_context_queue.qsize() > 50:
            time.sleep(10.0)
        else:
            url_counter, url, upper_timestamp = youtube_queue.get()

            try:
                # start_time = time.perf_counter()
                social_context = youtube_social_context.collect(url, youtube_module_communication + "_" + str(id), youtube_oauth_credentials_folder)
                # elapsed_time = time.perf_counter() - start_time
                # if social_context is not None:
                #     social_context["elapsed_time"] = elapsed_time
            except KeyError:
                social_context = None

            social_context_queue.put((url_counter, social_context))
            youtube_queue.task_done() 
Example 6
Project: news-popularity-prediction   Author: MKLab-ITI   File: utility.py    License: Apache License 2.0 6 votes vote down vote up
def reddit_daemon_worker(id, reddit_queue, social_context_queue, reddit_oauth_credentials_path):
    while True:
        if social_context_queue.qsize() > 50:
            time.sleep(10.0)
        else:
            url_counter, url, upper_timestamp = reddit_queue.get()

            try:
                # start_time = time.perf_counter()
                social_context = reddit_social_context.collect(url, reddit_oauth_credentials_path)
                # elapsed_time = time.perf_counter() - start_time
                # if social_context is not None:
                #     social_context["elapsed_time"] = elapsed_time
            except KeyError:
                social_context = None

            social_context_queue.put((url_counter, social_context))
            reddit_queue.task_done() 
Example 7
Project: news-popularity-prediction   Author: MKLab-ITI   File: utility.py    License: Apache License 2.0 6 votes vote down vote up
def form_graphs(social_context_generator, assessment_timestamp):
    # fp = open("/home/georgerizos/Documents/fetch_times/build_graph_time" + ".txt", "a")
    for social_context_dict in social_context_generator:
        # start_time = time.perf_counter()
        snapshots,\
        targets,\
        title = get_snapshot_graphs(social_context_dict["social_context"],
                                    # social_context_dict["tweet_timestamp"],
                                    assessment_timestamp,
                                    social_context_dict["platform_name"])
        # elapsed_time = time.perf_counter() - start_time
        # fp.write(repr(elapsed_time) + "\n")
        if snapshots is None:
            continue

        if len(snapshots) > 1:
            graph_dict = social_context_dict
            graph_dict["snapshots"] = snapshots
            graph_dict["targets"] = targets
            graph_dict["title"] = title
            yield graph_dict 
Example 8
Project: pytorch_geometric   Author: rusty1s   File: train.py    License: MIT License 6 votes vote down vote up
def train_runtime(model, data, epochs, device):
    optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
    model = model.to(device)
    data = data.to(device)
    model.train()
    mask = data.train_mask if 'train_mask' in data else data.train_idx
    y = data.y[mask] if 'train_mask' in data else data.train_y

    if torch.cuda.is_available():
        torch.cuda.synchronize()
    t_start = time.perf_counter()

    for epoch in range(epochs):
        optimizer.zero_grad()
        out = model(data)
        loss = F.nll_loss(out[mask], y)
        loss.backward()
        optimizer.step()

    if torch.cuda.is_available():
        torch.cuda.synchronize()
    t_end = time.perf_counter()

    return t_end - t_start 
Example 9
Project: stdpopsim   Author: popsim-consortium   File: validation.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _onepop_expgrowth(
        engine_id, out_dir, seed, N0=5000, N1=500, T=1000, **sim_kwargs):
    growth_rate = - np.log(N1 / N0) / T
    species = stdpopsim.get_species("DroMel")
    contig = species.get_contig("chr2R", length_multiplier=0.01)  # ~250 kb
    contig = irradiate(contig)
    model = _PiecewiseSize(N0, growth_rate, (T, N1, 0))
    model.generation_time = species.generation_time
    samples = model.get_samples(100)
    engine = stdpopsim.get_engine(engine_id)
    t0 = time.perf_counter()
    ts = engine.simulate(model, contig, samples, seed=seed, **sim_kwargs)
    t1 = time.perf_counter()
    out_file = out_dir / f"{seed}.trees"
    ts.dump(out_file)
    return out_file, t1 - t0 
Example 10
Project: stdpopsim   Author: popsim-consortium   File: validation.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _twopop_IM(
        engine_id, out_dir, seed,
        NA=1000, N1=500, N2=5000, T=1000, M12=0, M21=0, pulse=None, samples=None,
        **sim_kwargs):
    species = stdpopsim.get_species("AraTha")
    contig = species.get_contig("chr5", length_multiplier=0.01)  # ~270 kb
    contig = irradiate(contig)
    model = stdpopsim.IsolationWithMigration(
            NA=NA, N1=N1, N2=N2, T=T, M12=M12, M21=M21)
    if pulse is not None:
        model.demographic_events.append(pulse)
        model.demographic_events.sort(key=lambda x: x.time)
    # XXX: AraTha has species.generation_time == 1, but there is the potential
    # for this to mask bugs related to generation_time scaling, so we use 3 here.
    model.generation_time = 3
    if samples is None:
        samples = model.get_samples(50, 50, 0)
    engine = stdpopsim.get_engine(engine_id)
    t0 = time.perf_counter()
    ts = engine.simulate(model, contig, samples, seed=seed, **sim_kwargs)
    t1 = time.perf_counter()
    out_file = out_dir / f"{seed}.trees"
    ts.dump(out_file)
    return out_file, t1 - t0 
Example 11
Project: tsinfer   Author: tskit-dev   File: inference.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _run_synchronous(self, progress):
        a = np.zeros(self.num_sites, dtype=np.int8)
        for t, focal_sites in self.descriptors:
            before = time.perf_counter()
            s, e = self.ancestor_builder.make_ancestor(focal_sites, a)
            duration = time.perf_counter() - before
            logger.debug(
                "Made ancestor in {:.2f}s at timepoint {} (epoch {}) "
                "from {} to {} (len={}) with {} focal sites ({})".format(
                    duration,
                    t,
                    self.timepoint_to_epoch[t],
                    s,
                    e,
                    e - s,
                    focal_sites.shape[0],
                    focal_sites,
                )
            )
            self.ancestor_data.add_ancestor(
                start=s, end=e, time=t, focal_sites=focal_sites, haplotype=a[s:e]
            )
            progress.update() 
Example 12
Project: pyDcop   Author: Orange-OpenSource   File: agents.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def metrics(self):
        if self._run_t is None:
            activity_ratio = 0
        else:
            total_t = perf_counter() - self._run_t
            activity_ratio = self.t_active / (total_t)
        own_computations = { c.name for c in self.computations(include_technical=True)}
        m = {
            'count_ext_msg': {k: v
                              for k, v in self._messaging.count_ext_msg.items()
                              if k in own_computations},
            'size_ext_msg': {k: v
                             for k, v in self._messaging.size_ext_msg.items()
                             if k in own_computations},
            # 'last_msg_time': self._messaging.last_msg_time,
            'activity_ratio': activity_ratio,
            'cycles': {c.name: c.cycle_count for c in self.computations()}
        }
        return m 
Example 13
Project: smatch   Author: snowblink14   File: smatch-table.py    License: MIT License 5 votes vote down vote up
def main(arguments):
    global verbose
    (ids, names, result) = check_args(arguments)
    if arguments.v:
        verbose = True
    if not result:
        return 0
    acc_time = 0
    len_name = len(names)
    table = []
    for i in range(0, len_name + 1):
        table.append([])
    table[0].append("")
    for i in range(0, len_name):
        table[0].append(names[i])
    for i in range(0, len_name):
        table[i+1].append(names[i])
        for j in range(0, len_name):
            if i != j:
                start = time.perf_counter()
                table[i+1].append(compute_files(names[i], names[j], ids, args.fd, args.r))
                end = time.perf_counter()
                if table[i+1][-1] != -1.0:
                    acc_time += end-start
            else:
                table[i+1].append("")
    # check table
    for i in range(0, len_name + 1):
        for j in range(0, len_name + 1):
            if i != j:
                if table[i][j] != table[j][i]:
                    if table[i][j] > table[j][i]:
                        table[j][i] = table[i][j]
                    else:
                        table[i][j] = table[j][i]
    pprint_table(table)
    return acc_time 
Example 14
Project: Dumb-Cogs   Author: irdumbs   File: ping.py    License: MIT License 5 votes vote down vote up
def pingt(self,ctx):
        """pseudo-ping time"""
        channel = ctx.message.channel
        t1 = time.perf_counter()
        await self.bot.send_typing(channel)
        t2 = time.perf_counter()
        await self.bot.say("pseudo-ping: {}ms".format(round((t2-t1)*1000))) 
Example 15
Project: MicroWebSrv2   Author: jczic   File: XAsyncSockets.py    License: MIT License 5 votes vote down vote up
def perf_counter() :
        return ticks_ms() / 1000

# ============================================================================
# ===( XAsyncSocketsPool )====================================================
# ============================================================================ 
Example 16
Project: MicroWebSrv2   Author: jczic   File: XAsyncSockets.py    License: MIT License 5 votes vote down vote up
def _processWaitEvents(self) :
        self._incThreadsCount()
        timeSec = perf_counter()
        while self._processing :
            try :
                try :
                    rd, wr, ex = select( self._readList,
                                         self._writeList,
                                         self._readList,
                                         self._CHECK_SEC_INTERVAL )
                except KeyboardInterrupt as ex :
                    raise ex
                except :
                    continue
                if not self._processing :
                    break
                for socketsList in ex, wr, rd :
                    for socket in socketsList :
                        asyncSocket = self._asyncSockets.get(id(socket), None)
                        if asyncSocket and self._socketListAdd(socket, self._handlingList) :
                            if socketsList is ex :
                                asyncSocket.OnExceptionalCondition()
                            elif socketsList is wr :
                                asyncSocket.OnReadyForWriting()
                            else :
                                asyncSocket.OnReadyForReading()
                            self._socketListRemove(socket, self._handlingList)
                sec = perf_counter()
                if sec > timeSec + self._CHECK_SEC_INTERVAL :
                    timeSec = sec
                    for asyncSocket in list(self._asyncSockets.values()) :
                        if asyncSocket.ExpireTimeSec and \
                           timeSec > asyncSocket.ExpireTimeSec :
                            asyncSocket._close(XClosedReason.Timeout)
            except KeyboardInterrupt :
                self._processing = False
        self._decThreadsCount()

    # ------------------------------------------------------------------------ 
Example 17
Project: MicroWebSrv2   Author: jczic   File: XAsyncSockets.py    License: MIT License 5 votes vote down vote up
def _setExpireTimeout(self, timeoutSec) :
        try :
            if timeoutSec and timeoutSec > 0 :
                self._expireTimeSec = perf_counter() + timeoutSec
        except :
            raise XAsyncSocketException('"timeoutSec" is incorrect to set expire timeout.')

    # ------------------------------------------------------------------------ 
Example 18
Project: IGMC   Author: muhanzhang   File: train_eval.py    License: MIT License 5 votes vote down vote up
def test_once(test_dataset,
              model,
              batch_size,
              logger=None, 
              ensemble=False, 
              checkpoints=None):

    test_loader = DataLoader(test_dataset, batch_size, shuffle=False)
    model.to(device)
    t_start = time.perf_counter()
    if ensemble and checkpoints:
        rmse = eval_rmse_ensemble(model, checkpoints, test_loader, device, show_progress=True)
    else:
        rmse = eval_rmse(model, test_loader, device, show_progress=True)
    t_end = time.perf_counter()
    duration = t_end - t_start
    print('Test Once RMSE: {:.6f}, Duration: {:.6f}'.format(rmse, duration))
    epoch_info = 'test_once' if not ensemble else 'ensemble'
    eval_info = {
        'epoch': epoch_info,
        'train_loss': 0,
        'test_rmse': rmse,
        }
    if logger is not None:
        logger(eval_info, None, None)
    return rmse 
Example 19
Project: hiku   Author: vmagamedov   File: prometheus.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _observe_fields(self, node_name):
        by_field = {}

        def observe(start_time, field_names):
            duration = time.perf_counter() - start_time
            for name in field_names:
                try:
                    field_metric = by_field[name]
                except KeyError:
                    field_metric = by_field[name] = \
                        self._metric.labels(self._name, node_name, name)
                field_metric.observe(duration)
        return observe 
Example 20
Project: hiku   Author: vmagamedov   File: prometheus.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def subquery_wrapper(self, observe, subquery):
        def wrapper(field_names, *args):
            start_time = time.perf_counter()
            result_proc = subquery(*args)

            def proc_wrapper():
                result = result_proc()
                observe(start_time, field_names)
                return result
            return proc_wrapper
        return wrapper 
Example 21
Project: hiku   Author: vmagamedov   File: prometheus.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def field_wrapper(self, observe, func):
        def wrapper(field_names, *args):
            start_time = time.perf_counter()
            result = func(*args)
            observe(start_time, field_names)
            return result
        return wrapper 
Example 22
Project: hiku   Author: vmagamedov   File: prometheus.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def link_wrapper(self, observe, func):
        def wrapper(link_name, *args):
            start_time = time.perf_counter()
            result = func(*args)
            observe(start_time, [link_name])
            return result
        return wrapper 
Example 23
Project: hiku   Author: vmagamedov   File: prometheus.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def field_wrapper(self, observe, func):
        async def wrapper(field_names, *args):
            start_time = time.perf_counter()
            result = await func(*args)
            observe(start_time, field_names)
            return result
        return wrapper 
Example 24
Project: stoq   Author: PUNCH-Cyber   File: utils.py    License: Apache License 2.0 5 votes vote down vote up
def ratelimited():
    """
    Thread safe decorator to rate limit a function

    """

    lock = threading.Lock()

    def decorator(func):
        last_call = time.perf_counter()

        @wraps(func)
        async def ratelimit(*args, **kwargs):
            limit = kwargs.get("ratelimit", None)
            if limit:
                count, seconds = limit.split("/")
                interval = int(seconds) / int(count)
                lock.acquire()
                nonlocal last_call
                elapsed = time.perf_counter() - last_call
                left_to_wait = interval - elapsed

                if left_to_wait > 0:
                    time.sleep(left_to_wait)

                last_call = time.perf_counter()

                lock.release()

            try:
                kwargs.pop("ratelimit")
            except KeyError:
                pass

            return await func(*args, **kwargs)

        return ratelimit

    return decorator 
Example 25
Project: whynot   Author: zykls   File: ols.py    License: MIT License 5 votes vote down vote up
def estimate_treatment_effect(covariates, treatment, outcome):
    """Run ordinary least squares to estimate causal effect of treatment variable on the outcome Y.

    Parameters
    ----------
        covariates: `np.ndarray`
            Array of shape [num_samples, num_features] of features
        treatment:  `np.ndarray`
            Binary array of shape [num_samples]  indicating treatment status for each
            sample.
        outcome:  `np.ndarray`
            Array of shape [num_samples] containing the observed outcome for each sample.

    Returns
    -------
        result: `whynot.framework.InferenceResult`
            InferenceResult object for this procedure

    """
    features = np.copy(covariates)
    treatment = treatment.reshape(-1, 1)
    features = np.concatenate([treatment, features], axis=1)
    features = sm.add_constant(features, prepend=True, has_constant="add")

    # Only time model fitting, not preprocessing
    start_time = perf_counter()
    model = sm.OLS(outcome, features)
    results = model.fit()
    stop_time = perf_counter()

    # Treatment is the second variable (first is the constant offset)
    ate = results.params[1]
    stderr = results.bse[1]
    conf_int = (ate - 1.96 * stderr, ate + 1.96 * stderr)
    return InferenceResult(
        ate=ate,
        stderr=stderr,
        ci=conf_int,
        individual_effects=None,
        elapsed_time=stop_time - start_time,
    ) 
Example 26
Project: whynot   Author: zykls   File: benchmark.py    License: MIT License 5 votes vote down vote up
def benchmark_dynamic_simulator(simulator, name, num_trials=10):
    """Run dynamical system simulator repeatedly and report execution time statistics."""
    timings = []
    for _ in range(num_trials):
        initial_state = simulator.State()
        config = simulator.Config()
        start_time = time.perf_counter()
        simulator.simulate(initial_state, config)
        timings.append(time.perf_counter() - start_time)
    print(
        f"{name.upper()}:\t average={np.mean(timings):.3f} s, std={np.std(timings):.3f} s, "
        f"max={np.max(timings):.3f} s, (trials={num_trials})"
    ) 
Example 27
Project: whynot   Author: zykls   File: benchmark.py    License: MIT License 5 votes vote down vote up
def benchmark_agent_based_model(simulator, name, num_agents=100, num_trials=10):
    """Run the agent-based model repeatedly and report execution time statistics."""
    timings = []
    for _ in range(num_trials):
        agents = [simulator.Agent() for _ in range(num_agents)]
        config = simulator.Config()
        start_time = time.perf_counter()
        simulator.simulate(agents, config)
        timings.append(time.perf_counter() - start_time)
    print(
        f"{name.upper()}:\t average={np.mean(timings):.3f} s, std={np.std(timings):.3f} s, "
        f"max={np.max(timings):.3f} s, (trials={num_trials})"
    ) 
Example 28
Project: AerialDetection   Author: dingjiansw101   File: geometry_test.py    License: Apache License 2.0 5 votes vote down vote up
def test_bbox_overlaps_cy(self):

        start = time.perf_counter()
        # ious_cy = bbox_overlaps_cy(self.bboxes1, self.bboxes2)
        ious_cy = bbox_overlaps_cy(self.bboxes1_tensor, self.bboxes2_tensor)
        self.assertTrue(type(ious_cy) == torch.Tensor)
        ious_cy = ious_cy.numpy()
        elapsed = (time.perf_counter() - start)
        print('cython time: ', elapsed)
        np.testing.assert_array_almost_equal(self.ious, ious_cy) 
Example 29
Project: AerialDetection   Author: dingjiansw101   File: geometry_test.py    License: Apache License 2.0 5 votes vote down vote up
def test_bbox_overlaps_cy2(self):

        start = time.perf_counter()
        ious_cy2 = bbox_overlaps_cy2(self.bboxes1_tensor, self.bboxes2_tensor).numpy()
        elapsed = (time.perf_counter() - start)
        print('cython 2 time: ', elapsed)
        np.testing.assert_array_almost_equal(self.ious, ious_cy2) 
Example 30
Project: AerialDetection   Author: dingjiansw101   File: geometry_test.py    License: Apache License 2.0 5 votes vote down vote up
def test_bbox_overlaps_np(self):

        start = time.perf_counter()
        ious_np = bbox_overlaps_np(self.bboxes1_tensor, self.bboxes2_tensor).numpy()
        elapsed = (time.perf_counter() - start)
        print('numpy time: ', elapsed)
        np.testing.assert_array_almost_equal(self.ious, ious_np)

    # def test_bbox_overlaps_np_v2(self):
    #
    #     start = time.perf_counter()
    #     ious_np_v2 = bbox_overlaps_np_v2(self.bboxes1, self.bboxes2)
    #     elapsed = (time.perf_counter() - start)
    #     print('numpy v2 time: ', elapsed)
    #     np.testing.assert_array_almost_equal(self.ious, ious_np_v2)