Python sqlalchemy.sql.functions.count() Examples

The following are 30 code examples of sqlalchemy.sql.functions.count(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module sqlalchemy.sql.functions , or try the search function .
Example #1
Source File: test_demo_mode.py    From dokomoforms with GNU General Public License v3.0 6 votes vote down vote up
def test_logging_in_creates_user_no_https(self):
        dokomoforms.handlers.demo.options.https = False
        no_user = (
            self.session
            .query(count(Administrator.id))
            .filter_by(name='demo_user')
            .scalar()
        )
        self.assertEqual(no_user, 0)
        self.fetch('/demo/login', _logged_in_user=None)
        user = (
            self.session
            .query(count(Administrator.id))
            .filter_by(name='demo_user')
            .scalar()
        )
        self.assertEqual(user, 1) 
Example #2
Source File: test_recursion.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def test_bug(self):
        '''
        so why does this work, without 'recursive'?

        EDIT: the 'recursive' is optional in sqlite!  see the very last line at
        https://www.sqlite.org/lang_with.html

        damn.  and all that trouble to make a nice bug report.
        '''
        q_counts = self.session.query(Node.id.label('id'), count(Connect.input_id).label('count')). \
            outerjoin(Connect, Node.id == Connect.output_id). \
            group_by(Node.id).order_by(Node.id).subquery()
        q_missing = self.session.query(Node.id.label('id')). \
            join(q_counts, q_counts.c.id == Node.id). \
            filter(Node.n_input != q_counts.c.count).cte()
        q_missing = q_missing.union_all(self.session.query(Node.id).
                                        join(Connect, Node.id == Connect.output_id).
                                        join(q_missing, Connect.input_id == q_missing.c.id))
        print('\nbug\n%s\n' % q_missing.select())
        self.assertEqual([(5,), (7,), (8,)],
                         self.session.query(Node.id).filter(Node.id.in_(q_missing.select())).order_by(Node.id).all()) 
Example #3
Source File: test_recursion.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def test_chained_node(self):
        '''
        we can move one step along the chain by looking for nodes whose inputs will be deleted.
        '''
        q_counts = self.session.query(Node.id.label('id'), count(Connect.input_id).label('count')). \
            outerjoin(Connect, Node.id == Connect.output_id). \
            group_by(Node.id).order_by(Node.id).subquery()
        q_missing = self.session.query(Node.id.label('id')). \
            join(q_counts, q_counts.c.id == Node.id). \
            filter(Node.n_input != q_counts.c.count)
        q_chained = self.session.query(Node.id). \
            join(Connect, Node.id == Connect.output_id). \
            filter(Connect.input_id.in_(q_missing))
        q_all = union(q_missing, q_chained)
        print('\nchained node\n%s\n' % q_all.select())
        self.assertEqual([(5,), (7,)],
                         self.session.query(Node.id).filter(Node.id.in_(q_all.select())).order_by(Node.id).all()) 
Example #4
Source File: source.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def clean(cls, s):
        log.debug('Searching for invalid composites')
        # see test_recursive
        q_input_counts = s.query(Composite.id,
                                 count(CompositeComponent.input_source_id).label('count')). \
            outerjoin(CompositeComponent, CompositeComponent.output_source_id == Composite.id). \
            group_by(Composite.id).cte()
        q_bad_nodes = s.query(Composite.id). \
            join(q_input_counts, q_input_counts.c.id == Composite.id). \
            filter(Composite.n_components != q_input_counts.c.count)
        q_count = s.query(count(Composite.id)).filter(Composite.id.in_(q_bad_nodes))
        log.debug(q_count)
        if q_count.scalar():
            log.warning('Need to clean expired composite sources (may take some time)')
            q_bad_nodes = q_bad_nodes.cte(recursive=True)
            q_all_nodes = q_bad_nodes. \
                union_all(s.query(Composite.id).
                          join(CompositeComponent,
                               CompositeComponent.output_source_id == Composite.id).
                          join(q_bad_nodes,
                               CompositeComponent.input_source_id == q_bad_nodes.c.id)).select()
            log.debug(f'Executing {q_all_nodes}')
            s.flush()
            with timing('GC of composite sources'):
                s.query(Source).filter(Source.id.in_(q_all_nodes)).delete(synchronize_session=False) 
Example #5
Source File: source.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def missing_dates(cls, s, expected, schedule, interval_owner, statistic_owner=None, start=None, finish=None):
        '''
        Previous approach was way too complicated and not thread-safe.  Instead, just enumerate intervals and test.
        '''
        stats_start_time, stats_finish_time = cls._raw_statistics_time_range(s, statistic_owner)
        stats_start = time_to_local_date(stats_start_time)
        stats_finish = time_to_local_date(stats_finish_time)
        log.debug('Statistics (in general) exist %s - %s' % (stats_start, stats_finish))
        start = schedule.start_of_frame(start if start else stats_start)
        finish = finish if finish else schedule.next_frame(stats_finish)
        while start < finish:
            next = schedule.next_frame(start)
            existing = s.query(Interval). \
                filter(Interval.start == start,
                       Interval.schedule == schedule,
                       Interval.owner == interval_owner).count()
            if existing != expected:
                yield start, next
            start = next 
Example #6
Source File: monitor.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def _write_diff(self, s, df):
        steps = StatisticName.add_if_missing(s, T.STEPS, StatisticJournalType.INTEGER, Units.STEPS_UNITS,
                                             None, self.owner_out, description=STEPS_DESCRIPTION)
        times = df.loc[(df[NEW_STEPS] != df[N.STEPS]) & ~df[N.STEPS].isna()].index.astype(np.int64) / 1e9
        if len(times):
            n = s.query(func.count(StatisticJournal.id)). \
                filter(StatisticJournal.time.in_(times),
                       StatisticJournal.statistic_name == steps).scalar()
            log.warning(f'Deleting {n} {N.STEPS} entries')
            s.query(StatisticJournal.id). \
                filter(StatisticJournal.time.in_(times),
                       StatisticJournal.statistic_name == steps).delete(synchronize_session=False)
        loader = self._get_loader(s, owner=self.owner_out, add_serial=False)
        for time, row in df.loc[(df[NEW_STEPS] != df[N.STEPS]) & ~df[NEW_STEPS].isna()].iterrows():
            loader.add(T.STEPS, Units.STEPS_UNITS, None, row[N.SOURCE], int(row[NEW_STEPS]),
                       time, StatisticJournalInteger, description=STEPS_DESCRIPTION)
        loader.load() 
Example #7
Source File: monitor.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def _fix_pair(self, s, a, b):
        # a starts before b (from query)
        if b.finish <= a.finish:
            # b completely enclosed in a
            log.warning(f'Deleting monitor journal entry that completely overlaps another')
            log.debug(f'{a.start} - {a.finish} ({a.id}) encloses {b.start} - {b.finish} ({b.id})')
            # be careful to delete superclass...
            s.query(Source).filter(Source.id == b.id).delete()
        else:
            # otherwise, shorten a so it finishes where b starts
            q = s.query(StatisticJournal). \
                filter(StatisticJournal.source == a,
                       StatisticJournal.time >= b.start)
            count = q.count()
            if count:
                # not really a warning because we expect this
                log.debug(f'Shifting edge of overlapping monitor journals ({count} statistic values)')
                log.debug(f'{a.start} - {a.finish} ({a.id}) overlaps {b.start} - {b.finish} ({b.id})')
                q.delete()
            # update monitor whether statistics were changed or not
            log.debug(f'Shift monitor finish back from {a.finish} to {b.start}')
            a.finish = b.start
            s.flush()  # not sure this is needed 
Example #8
Source File: monitor.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def _read_data(self, s, file_scan):
        records = self.parse_records(read_fit(file_scan.path))
        first_timestamp = self.read_first_timestamp(file_scan.path, records)
        last_timestamp = self.read_last_timestamp(file_scan.path, records)
        if first_timestamp == last_timestamp:
            log.warning('File %s is empty (no timespan)' % file_scan)
            raise AbortImportButMarkScanned()
        if not first_timestamp:
            raise Exception('Missing timestamp in %s' % file_scan)

        log.info(f'Importing monitor data from {file_scan} '
                 f'for {format_time(first_timestamp)} - {format_time(last_timestamp)}')
        if self.force:
            log.debug(f'Deleting previous entry')
            s.query(MonitorJournal).filter(MonitorJournal.file_hash == file_scan.file_hash).delete()
        else:
            if s.query(MonitorJournal).filter(MonitorJournal.file_hash == file_scan.file_hash).count():
                raise Exception(f'Duplicate for {file_scan.path}')  # should never happen
        # adding 0.1s to the end time makes the intervals semi-open which simplifies cleanup later
        mjournal = add(s, MonitorJournal(start=first_timestamp,
                                         finish=last_timestamp + dt.timedelta(seconds=0.1),
                                         file_hash_id=file_scan.file_hash.id))
        return mjournal, (first_timestamp, last_timestamp, mjournal, records) 
Example #9
Source File: pipeline.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def run_pipeline(data, type, like=tuple(), unlike=tuple(), id=None, progress=None, **extra_kargs):
    with data.db.session_context() as s:
        if id is None:  # don't run for each worker
            if type in (PipelineType.CALCULATE, PipelineType.READ_ACTIVITY, PipelineType.READ_MONITOR):
                Interval.clean(s)
        local_progress = ProgressTree(Pipeline.count(s, type, like=like, unlike=unlike, id=id), parent=progress)
        for pipeline in Pipeline.all(s, type, like=like, unlike=unlike, id=id):
            kargs = dict(pipeline.kargs)
            kargs.update(extra_kargs)
            msg = f'Ran {short_cls(pipeline.cls)}'
            if 'activity_group' in kargs: msg += f' ({kargs["activity_group"]})'
            log.debug(f'Running {pipeline.cls}({pipeline.args}, {kargs})')
            with timing(msg):
                before = None if id else count_statistics(s)
                pipeline.cls(data, *pipeline.args, id=pipeline.id, progress=local_progress, **kargs).run()
                after = None if id else count_statistics(s)
            if before or after:
                log.info(f'{msg}: statistic count {before} -> {after} (change of {after - before})') 
Example #10
Source File: response.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def __missing_sources(self, s):
        log.debug('Searching for missing sources')
        available = s.query(count(distinct(Source.id))). \
            join(StatisticJournal). \
            join(StatisticName). \
            filter(StatisticName.name == self.prefix + SPACE + N.HR_IMPULSE_10)
        used = s.query(count(distinct(Source.id))). \
            join(CompositeComponent, CompositeComponent.input_source_id == Source.id). \
            join(Composite, Composite.id == CompositeComponent.output_source_id). \
            join(StatisticJournal, StatisticJournal.source_id == Composite.id). \
            join(StatisticName). \
            filter(StatisticName.owner == self.owner_out,
                   Source.type == SourceType.ACTIVITY)
        n_avaialble = available.scalar()
        n_used = used.scalar()
        log.debug(f'Using {n_used} of {n_avaialble} sources')
        return n_used != n_avaialble 
Example #11
Source File: response.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def _delete_from(self, s, start=None):
        composite_ids = s.query(Composite.id). \
            join(StatisticJournal, Composite.id == StatisticJournal.source_id). \
            join(StatisticName, StatisticJournal.statistic_name_id == StatisticName.id). \
            filter(StatisticName.owner == self.owner_out)
        if start:
            composite_ids = composite_ids.filter(StatisticJournal.time >= start)
        log.debug(f'Delete query: {composite_ids}')
        n = s.query(count(Source.id)). \
            filter(Source.id.in_(composite_ids)). \
            scalar()
        if n:
            log.warning(f'Deleting {n} Composite sources ({start} onwards)')
            s.query(Source). \
                filter(Source.id.in_(composite_ids)). \
                delete(synchronize_session=False)
            s.commit()
            Composite.clean(s) 
Example #12
Source File: utils.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def _delete(self, s):
        start, finish = self._start_finish(type=local_time_to_time)
        s.commit()   # so that we don't have any risk of having something in the session that can be deleted
        statistic_names = s.query(StatisticName.id).filter(StatisticName.owner == self.owner_out)
        activity_journals = self._delimit_query(s.query(self._journal_type.id))
        statistic_journals = s.query(StatisticJournal.id). \
            filter(StatisticJournal.statistic_name_id.in_(statistic_names.cte()),
                   StatisticJournal.source_id.in_(activity_journals))
        for repeat in range(2):
            if repeat:
                s.query(StatisticJournal).filter(StatisticJournal.id.in_(statistic_journals.cte())). \
                    delete(synchronize_session=False)
                Timestamp.clear_keys(s, activity_journals.cte(), self.owner_out, constraint=None)
            else:
                n = s.query(count(StatisticJournal.id)). \
                    filter(StatisticJournal.id.in_(statistic_journals.cte())).scalar()
                if n:
                    log.warning(f'Deleting {n} statistics for {long_cls(self.owner_out)} from {start} to {finish}')
                else:
                    log.warning(f'No statistics to delete for {long_cls(self.owner_out)} from {start} to {finish}')
        s.commit() 
Example #13
Source File: monitor.py    From choochoo with GNU General Public License v2.0 6 votes vote down vote up
def _delete_time_range(self, s, start, finish):
        composite_ids = s.query(Composite.id). \
            join(StatisticJournal, Composite.id == StatisticJournal.source_id). \
            join(StatisticName, StatisticJournal.statistic_name_id == StatisticName.id). \
            filter(StatisticName.owner == self.owner_out)
        if start:
            composite_ids = composite_ids.filter(StatisticJournal.time >= start)
        if finish:
            composite_ids = composite_ids.filter(StatisticJournal.time <= finish)
        log.debug(f'Delete query: {composite_ids}')
        n = s.query(count(Source.id)). \
            filter(Source.id.in_(composite_ids)). \
            scalar()
        if n:
            log.warning(f'Deleting {n} Composite sources ({start} - {finish})')
            s.query(Source). \
                filter(Source.id.in_(composite_ids)). \
                delete(synchronize_session=False)
            s.commit() 
Example #14
Source File: test_handlers.py    From dokomoforms with GNU General Public License v3.0 6 votes vote down vote up
def test_get_logged_in_admin(self):
        num_surveys = (
            self.session
            .query(count(models.Survey.id))
            .filter_by(creator_id='b7becd02-1a3f-4c1d-a0e1-286ba121aef4')
            .scalar()
        )
        response = self.fetch('/', method='GET')
        response_soup = BeautifulSoup(response.body, 'html.parser')
        links = response_soup.select('a.btn-login.btn-large')
        self.assertEqual(len(links), 0, msg=response.body)
        self.assertIn(
            'Account Overview', response.body.decode(), msg=response.body
        )
        survey_dropdown = (
            response_soup.find('ul', {'aria-labelledby': 'SurveysDropdown'})
        )
        self.assertEqual(
            len(survey_dropdown.findAll('li')),
            min(num_surveys, BaseHandler.num_surveys_for_menu),
            msg=survey_dropdown
        ) 
Example #15
Source File: test_handlers.py    From dokomoforms with GNU General Public License v3.0 6 votes vote down vote up
def test_login_success(self):
        dokomoforms.handlers.auth.options.https = False
        with patch.object(handlers.Login, '_async_post') as p:
            dummy = lambda: None
            dummy.body = json_encode(
                {'status': 'okay', 'email': 'test_creator@fixtures.com'}
            )
            p.return_value = tornado.gen.Task(
                lambda callback=None: callback(dummy)
            )
            response = self.fetch(
                '/user/login?assertion=woah', method='POST', body='',
                _logged_in_user=None
            )
        self.assertEqual(response.code, 200, msg=response.body)
        self.assertEqual(
            response.headers['Set-Cookie'].lower().count('secure'),
            1
        ) 
Example #16
Source File: test_demo_mode.py    From dokomoforms with GNU General Public License v3.0 6 votes vote down vote up
def test_logging_in_creates_user_https(self):
        dokomoforms.handlers.demo.options.https = True
        no_user = (
            self.session
            .query(count(Administrator.id))
            .filter_by(name='demo_user')
            .scalar()
        )
        self.assertEqual(no_user, 0)
        self.fetch('/demo/login', _logged_in_user=None)
        user = (
            self.session
            .query(count(Administrator.id))
            .filter_by(name='demo_user')
            .scalar()
        )
        self.assertEqual(user, 1) 
Example #17
Source File: models.py    From pygameweb with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def tag_counts(self):
        """ Return a list of counts for the tags this project has.

        [('arcade', 2), ('opengl', 1)]
        """

        tags = [t.value for t in self.tags]
        cnt = count(Tags.value)

        tag_counts = (inspect(self).session
                      .query(Tags.value, cnt)
                      .group_by(Tags.value)
                      .filter(Tags.value.in_(tags))
                      .order_by(cnt.desc())).all()
        return [(tag, cnt, (int(10 + min(24, sqrt(cnt) * 24 / 5))))
                for tag, cnt in tag_counts] 
Example #18
Source File: test_recursion.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_non_zero_inputs(self):
        '''
        the most simple query i can think of, which returns the number of inputs for nodes
        with more than one input
        '''
        q_n_inputs = self.session.query(Connect.output_id.label('id'), count(Connect.input_id).label('count')). \
            group_by(Connect.output_id).order_by(Connect.output_id)
        self.assertEqual([(3, 2), (5, 1), (7, 2), (8, 1)], q_n_inputs.all()) 
Example #19
Source File: test_activities.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_activities(self):

        with TemporaryDirectory() as base:

            bootstrap_dir(base, m(V), '5')

            bootstrap_dir(base, m(V), '5', mm(DEV), configurator=default)

            args, data = bootstrap_dir(base, m(V), '5', 'constants', 'set', 'SRTM1.dir',
                                          '/home/andrew/archive/srtm1', mm(FORCE))
            constants(args, data)

            args, data = bootstrap_dir(base, m(V), '5', mm(DEV), 'read',
                                          'data/test/source/personal/2018-08-27-rec.fit')
            read(args, data)

            # run('sqlite3 %s ".dump"' % f.name, shell=True)

            run_pipeline(data, PipelineType.CALCULATE, force=True, start='2018-01-01', n_cpu=1)

            # run('sqlite3 %s ".dump"' % f.name, shell=True)

            with data.db.session_context() as s:
                n_raw = s.query(count(StatisticJournalFloat.id)). \
                    join(StatisticName). \
                    filter(StatisticName.name == N.RAW_ELEVATION).scalar()
                self.assertEqual(2099, n_raw)
                n_fix = s.query(count(StatisticJournalFloat.id)). \
                    join(StatisticName). \
                    filter(StatisticName.name == N.ELEVATION).scalar()
                self.assertEqual(2099, n_fix)
                # WHY does this jump around?
                n = s.query(count(StatisticJournal.id)).scalar()
                # self.assertEqual(50403, n)
                self.assertTrue(n > 30000)
                self.assertTrue(n < 100000)
                journal = s.query(ActivityJournal).one()
                self.assertNotEqual(journal.start, journal.finish) 
Example #20
Source File: test_recursion.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_all_inputs(self):
        '''
        we can improve th eabove to include nodes with zero inputs.
        '''
        q_n_inputs = self.session.query(Node.id.label('id'), count(Connect.input_id).label('count')). \
            outerjoin(Connect, Node.id == Connect.output_id). \
            group_by(Node.id).order_by(Node.id)
        print(q_n_inputs)
        self.assertEqual([(1, 0), (2, 0), (3, 2), (4, 0), (5, 1), (6, 0), (7, 2), (8, 1)], q_n_inputs.all()) 
Example #21
Source File: test_recursion.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_missing_input(self):
        '''
        using the query above as a sub-query, we can compare the actual number of inputs with
        what was expected and find nodes with missing inputs.
        '''
        q_n_inputs = self.session.query(Node.id.label('id'), count(Connect.input_id).label('count')). \
            outerjoin(Connect, Node.id == Connect.output_id). \
            group_by(Node.id).order_by(Node.id).subquery()
        q_missing = self.session.query(Node.id.label('id')). \
            join(q_n_inputs, q_n_inputs.c.id == Node.id). \
            filter(Node.n_input != q_n_inputs.c.count).order_by(Node.id)
        print(q_missing)
        self.assertEqual([(5,)], q_missing.all()) 
Example #22
Source File: massping.py    From pajbot with MIT License 5 votes vote down vote up
def count_known_users(usernames):
        if len(usernames) < 1:
            return 0
        with DBManager.create_session_scope() as db_session:

            # quick EXPLAIN ANALYZE for this query:
            #
            # pajbot=# EXPLAIN ANALYZE SELECT count(*) AS count_1
            # FROM "user"
            # WHERE ("user".login IN ('randers', 'lul', 'xd', 'penis', 'asd', 'hello', 'world') OR lower("user".name) IN ('randers', 'lul', 'xd', 'penis', 'asd', 'hello', 'world')) AND "user".last_seen IS NOT NULL AND now() - "user".last_seen <= make_interval(weeks := 2);
            #                                                                              QUERY PLAN
            # --------------------------------------------------------------------------------------------------------------------------------------------------------------------
            #  Aggregate  (cost=37.45..37.46 rows=1 width=8) (actual time=0.113..0.113 rows=1 loops=1)
            #    ->  Bitmap Heap Scan on "user"  (cost=21.53..37.43 rows=5 width=0) (actual time=0.110..0.110 rows=1 loops=1)
            #          Recheck Cond: ((login = ANY ('{randers,lul,xd,penis,asd,hello,world}'::text[])) OR (lower(name) = ANY ('{randers,lul,xd,penis,asd,hello,world}'::text[])))
            #          Filter: ((last_seen IS NOT NULL) AND ((now() - last_seen) <= '14 days'::interval))
            #          Heap Blocks: exact=6
            #          ->  BitmapOr  (cost=21.53..21.53 rows=14 width=0) (actual time=0.101..0.101 rows=0 loops=1)
            #                ->  Bitmap Index Scan on user_login_idx  (cost=0.00..10.76 rows=7 width=0) (actual time=0.054..0.054 rows=1 loops=1)
            #                      Index Cond: (login = ANY ('{randers,lul,xd,penis,asd,hello,world}'::text[]))
            #                ->  Bitmap Index Scan on user_lower_idx  (cost=0.00..10.76 rows=7 width=0) (actual time=0.046..0.047 rows=6 loops=1)
            #                      Index Cond: (lower(name) = ANY ('{randers,lul,xd,penis,asd,hello,world}'::text[]))
            #  Planning Time: 0.092 ms
            #  Execution Time: 0.140 ms
            # (12 rows)

            return (
                db_session.query(User)
                .with_entities(count())
                .filter(or_(User.login.in_(usernames), func.lower(User.name).in_(usernames)))
                .filter(and_(User.last_seen.isnot(None), (func.now() - User.last_seen) <= timedelta(weeks=2)))
                .scalar()
            ) 
Example #23
Source File: test_constant.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_constant(self):
        with TemporaryDirectory() as f:
            args, data = bootstrap_dir(f, m(V), '5')
            bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default)
            with data.db.session_context() as s:
                n = s.query(count(Constant.id)).scalar()
                self.assertEqual(n, 14)
            args, data = bootstrap_dir(f, m(V), '5', 'constants', 'set', 'fthr:%', '154', mm(FORCE))
            constants(args, data)
            with data.db.session_context() as s:
                n = s.query(count(Constant.id)).scalar()
                self.assertEqual(n, 14)
                # todo - maybe test for value?
                # todo - now that this is defined anyway, change the test? 
Example #24
Source File: test_timestamp.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_set(self):
        with TemporaryDirectory() as f:
            args, data = bootstrap_dir(f, m(V), '5')
            args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default)
            with data.db.session_context() as s:
                source = add(s, Source())
                n = s.query(count(Timestamp.id)).scalar()
                self.assertEqual(n, 0)
                Timestamp.set(s, TestTimestamp, source=source)
                n = s.query(count(Timestamp.id)).scalar()
                self.assertEqual(n, 1)
                t = s.query(Timestamp).filter(Timestamp.owner == TestTimestamp).one()
                self.assertAlmostEqual(t.time.timestamp(), dt.datetime.now().timestamp(), 1) 
Example #25
Source File: test_timestamp.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_context(self):
        with TemporaryDirectory() as f:
            args, data = bootstrap_dir(f, m(V), '5')
            args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default)
            with data.db.session_context() as s:
                with Timestamp(owner=TestTimestamp).on_success(s):
                    n = s.query(count(Timestamp.id)).scalar()
                    self.assertEqual(n, 0)
                n = s.query(count(Timestamp.id)).scalar()
                self.assertEqual(n, 1) 
Example #26
Source File: test_timestamp.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_context_error(self):
        with TemporaryDirectory() as f:
            args, data = bootstrap_dir(f, m(V), '5')
            args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default)
            with data.db.session_context() as s:
                try:
                    with Timestamp(owner=TestTimestamp).on_success(s):
                        n = s.query(count(Timestamp.id)).scalar()
                        self.assertEqual(n, 0)
                        raise Exception('foo')
                except Exception as e:
                    self.assertEqual(str(e), 'foo')  # for some weird reason assertRaisesRegex was not working
                n = s.query(count(Timestamp.id)).scalar()
                self.assertEqual(n, 0) 
Example #27
Source File: test_inheritance.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_delete_child_sql(self):
        s = self.database().session()
        s.query(Child).delete()
        # !!! this is because the ondelete cascade goes "the other way"
        self.assertEqual(s.query(count(Parent.id)).scalar(), 1) 
Example #28
Source File: test_inheritance.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_delete_parent_instance(self):
        s = self.database().session()
        p = s.query(Parent).one()
        s.delete(p)
        self.assertEqual(s.query(count(Child.id)).scalar(), 0) 
Example #29
Source File: test_inheritance.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_delete_parent_sql(self):
        s = self.database().session()
        s.query(Parent).delete()
        self.assertEqual(s.query(count(Child.id)).scalar(), 0) 
Example #30
Source File: test_inheritance.py    From choochoo with GNU General Public License v2.0 5 votes vote down vote up
def test_delete_child_ref_instance(self):
        db = self.database()
        s = db.session()
        o = s.query(Child).one().child_ref
        s.delete(o)
        s.commit()
        s = db.session()
        # !!! this is because SQL deletes the child via cascade, but not the parent (see test_delete_child_sql)
        # (we don't have a backref on Other, so SQLAlchemy isn't going to do anything - have no idea if it would)
        self.assertEqual(s.query(count(Parent.id)).scalar(), 1)
        self.assertEqual(s.query(count(Child.id)).scalar(), 0)