Python django.db.connection.cursor() Examples
The following are 30
code examples of django.db.connection.cursor().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
django.db.connection
, or try the search function
.
Example #1
Source File: stats.py From Servo with BSD 2-Clause "Simplified" License | 8 votes |
def devices(request): data = prep_view(request) data['form'] = DeviceStatsForm() start_date = data['initial']['start_date'] end_date = data['initial']['end_date'] if request.method == 'POST': form = DeviceStatsForm(request.POST) if form.is_valid(): start_date = form.cleaned_data['start_date'] end_date = form.cleaned_data['end_date'] cursor = connection.cursor() query = '''SELECT d.description device, count(o) AS orders, count(r) AS repairs FROM servo_device d, servo_order o, servo_repair r, servo_orderdevice od WHERE d.id = od.device_id AND o.id = od.order_id AND r.order_id = o.id AND (o.created_at, o.created_at) OVERLAPS (%s, %s) GROUP BY d.description'''; cursor.execute(query, [start_date, end_date]) data['results'] = cursor.fetchall() data['title'] = _('Device statistics') return render(request, "stats/devices.html", data)
Example #2
Source File: report_db_accessor_base.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def create_new_temp_table(self, table_name, columns): """Create a temporary table and return the table name.""" temp_table_name = table_name + "_" + str(uuid.uuid4()).replace("-", "_") base_sql = f"CREATE TEMPORARY TABLE {temp_table_name} " column_types = "" for column in columns: for name, column_type in column.items(): column_types += f"{name} {column_type}, " column_types = column_types.strip().rstrip(",") column_sql = f"({column_types})" table_sql = base_sql + column_sql with connection.cursor() as cursor: cursor.db.set_schema(self.schema) cursor.execute(table_sql) return temp_table_name
Example #3
Source File: test_basic.py From sentry-python with BSD 2-Clause "Simplified" License | 6 votes |
def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): sentry_init( integrations=[DjangoIntegration()], send_default_pii=True, _experiments={"record_sql_params": True}, ) from django.db import connections if "postgres" not in connections: pytest.skip("postgres tests disabled") import psycopg2.sql sql = connections["postgres"].cursor() events = capture_events() with pytest.raises(ProgrammingError): sql.execute(query(psycopg2.sql), {"my_param": 10}) capture_message("HI") (event,) = events crumb = event["breadcrumbs"][-1] assert crumb["message"] == ('SELECT %(my_param)s FROM "foobar"') assert crumb["data"]["db.params"] == {"my_param": 10}
Example #4
Source File: models.py From GSoC-Contribution-Leaderboard with MIT License | 6 votes |
def get_dict(self): from django.db import connection with connection.cursor() as cursor: cursor.execute(""" SELECT r.id, u.login, re.repo, r.lastMergedPR, r.lastOpenPR, r.lastIssue FROM core_relation r, core_user u, core_repository re WHERE r.user_id = u.id AND r.repo_id = re.id """) all_user = {} for row in cursor.fetchall(): last = { 'lastMergedPR': row[3], 'lastOpenPR': row[4], 'lastIssue': row[5], } user = row[1] repo = row[2] if not user in all_user: all_user[user] = {} all_user[user][repo] = last return all_user
Example #5
Source File: base.py From django-anonymizer with MIT License | 6 votes |
def _run(anonymizer, objs): values = {} replacer_attr = tuple(r[0] for r in anonymizer.replacers) for obj in objs.iterator(): retval = anonymizer.alter_object(obj) if retval is False: continue values[obj.pk] = {attname: getattr(obj, attname) for attname in replacer_attr} query = anonymizer.create_query(replacer_attr) query_args = anonymizer.create_query_args(values, replacer_attr) with transaction.atomic(): with connection.cursor() as cursor: if connection.vendor == 'postgresql': cursor.execute('SET CONSTRAINTS ALL DEFERRED') cursor.executemany(query, query_args)
Example #6
Source File: models_raw.py From safe-relay-service with MIT License | 6 votes |
def get_total_volume(self, from_date: datetime.datetime, to_date: datetime.datetime) -> Optional[int]: from .models import EthereumTxCallType query = """ SELECT SUM(IT.value) AS value FROM relay_safecontract SC JOIN relay_internaltx IT ON SC.address=IT."_from" OR SC.address=IT."to" JOIN relay_ethereumtx ET ON IT.ethereum_tx_id=ET.tx_hash JOIN relay_ethereumblock EB ON ET.block_id=EB.number WHERE IT.call_type != {0} AND error IS NULL AND EB.timestamp BETWEEN %s AND %s """.format(EthereumTxCallType.DELEGATE_CALL.value) with connection.cursor() as cursor: cursor.execute(query, [from_date, to_date]) value = cursor.fetchone()[0] if value is not None: return int(value) return None
Example #7
Source File: fixturize.py From django-template with MIT License | 6 votes |
def reset_db(): """ Reset database to a blank state by removing all the tables and recreating them. """ with connection.cursor() as cursor: cursor.execute("select tablename from pg_tables where schemaname = 'public'") tables = [row[0] for row in cursor.fetchall()] # Can't use query parameters here as they'll add single quotes which are not # supported by postgres for table in tables: cursor.execute('drop table "' + table + '" cascade') # Call migrate so that post-migrate hooks such as generating a default Site object # are run management.call_command("migrate", "--noinput", stdout=StringIO())
Example #8
Source File: models_raw.py From safe-relay-service with MIT License | 6 votes |
def get_average_deploy_time(self, from_date: datetime.datetime, to_date: datetime.datetime) -> datetime.timedelta: query = """ SELECT AVG(A.created - B.first_transfer) FROM (SELECT address, timestamp as created FROM relay_safecontract S JOIN relay_internaltx I ON S.address = I.contract_address JOIN relay_ethereumtx E ON I.ethereum_tx_id = E.tx_hash JOIN relay_ethereumblock B ON E.block_id = B.number) A JOIN (SELECT address, MIN(timestamp) as first_transfer FROM relay_safecontract S JOIN relay_internaltx I ON S.address = I.to JOIN relay_ethereumtx E ON I.ethereum_tx_id = E.tx_hash JOIN relay_ethereumblock B ON E.block_id = B.number GROUP BY address) B ON A.address = B.address WHERE A.created BETWEEN %s AND %s """ with connection.cursor() as cursor: cursor.execute(query, [from_date, to_date]) return cursor.fetchone()[0]
Example #9
Source File: models.py From crowdata with MIT License | 6 votes |
def amount_on_field(self): """ Sums the total on verified field on the amount """ query = """ SELECT SUM(field_entry.value::DOUBLE PRECISION) FROM crowdataapp_documentsetfieldentry field_entry INNER JOIN crowdataapp_documentsetformentry form_entry ON form_entry.id = field_entry.entry_id INNER JOIN crowdataapp_document document ON document.id = form_entry.document_id WHERE document.document_set_id = %d AND field_entry.verified = TRUE AND field_entry.field_id = %d""" % ( self.id, self.tosum_field.id) cursor = connection.cursor() cursor.execute(query) amount = cursor.fetchall()[0][0] return amount
Example #10
Source File: test_aws_report_db_accessor.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def test_populate_awstags_summary_table(self): """Test that the AWS tags summary table is populated.""" tags_summary_name = AWS_CUR_TABLE_MAP["tags_summary"] query = self.accessor._get_db_obj_query(tags_summary_name) with schema_context(self.schema): tags = query.all() tag_keys = list({tag.key for tag in tags}) with schema_context(self.schema): with connection.cursor() as cursor: cursor.execute( """SELECT DISTINCT jsonb_object_keys(tags) FROM reporting_awscostentrylineitem_daily""" ) expected_tag_keys = cursor.fetchall() expected_tag_keys = [tag[0] for tag in expected_tag_keys] self.assertEqual(sorted(tag_keys), sorted(expected_tag_keys))
Example #11
Source File: views.py From PrivacyScore with GNU General Public License v3.0 | 6 votes |
def faq(request: HttpRequest): num_scans = Site.objects.filter(scans__isnull=False).count() num_scanning_sites = Scan.objects.filter(end__isnull=True).count() # query = '''SELECT # COUNT(jsonb_array_length("result"->'leaks')) # FROM backend_scanresult # WHERE backend_scanresult.scan_id IN ( # SELECT backend_site.last_scan_id # FROM backend_site # WHERE backend_site.last_scan_id IS NOT NULL) # AND jsonb_array_length("result"->'leaks') > 0''' # # with connection.cursor() as cursor: # cursor.execute(query) # num_sites_failing_serverleak = cursor.fetchone()[0] return render(request, 'frontend/faq.html', { 'num_scanning_sites': num_scanning_sites, 'num_scans': num_scans, 'num_sites': Site.objects.count(), # 'num_sites_failing_serverleak': num_sites_failing_serverleak })
Example #12
Source File: test_tasks.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def test_vacuum_schemas(self, mock_vacuum): """Test that the vacuum_schemas scheduled task runs for all schemas.""" schema_one = "acct123" schema_two = "acct456" with connection.cursor() as cursor: cursor.execute( """ INSERT INTO api_tenant (schema_name) VALUES (%s), (%s) """, [schema_one, schema_two], ) tasks.vacuum_schemas() for schema_name in [schema_one, schema_two]: mock_vacuum.delay.assert_any_call(schema_name)
Example #13
Source File: models.py From crowdata with MIT License | 6 votes |
def get_verified_documents_rankings(self, document_set, ranking_id): """ Get all documents that have an entry with canon """ ranking_definition = DocumentSetRankingDefinition.objects.get(id=ranking_id) q = """ SELECT distinct(document.id) AS document_id, document.name as document_name, (SELECT value FROM crowdataapp_documentsetfieldentry WHERE entry_id = field_entry.entry_id AND field_id = {2}) FROM crowdataapp_documentsetfieldentry field_entry LEFT OUTER JOIN crowdataapp_canonicalfieldentrylabel canonical_label ON canonical_label.id = field_entry.canonical_label_id INNER JOIN crowdataapp_documentsetformentry form_entry ON form_entry.id = field_entry.entry_id INNER JOIN crowdataapp_document document ON document.id = form_entry.document_id WHERE document.document_set_id = {0} AND field_entry.verified = TRUE AND canonical_label.id = {1} """.format(document_set.id, self.id, ranking_definition.magnitude_field_id) cursor = connection.cursor() cursor.execute(q) return cursor.fetchall()
Example #14
Source File: remover.py From arches with GNU Affero General Public License v3.0 | 6 votes |
def clear_resources(): """Removes all resource instances from your db and elasticsearch resource index""" se = SearchEngineFactory().create() match_all_query = Query(se) match_all_query.delete(index="terms") match_all_query.delete(index="resources") match_all_query.delete(index="resource_relations") print("deleting", Resource.objects.exclude(resourceinstanceid=settings.RESOURCE_INSTANCE_ID).count(), "resources") Resource.objects.exclude(resourceinstanceid=settings.RESOURCE_INSTANCE_ID).delete() print(Resource.objects.exclude(resourceinstanceid=settings.RESOURCE_INSTANCE_ID).count(), "resources remaining") print("deleting", models.ResourceXResource.objects.count(), "resource relationships") cursor = connection.cursor() cursor.execute("TRUNCATE public.resource_x_resource CASCADE;") print(models.ResourceXResource.objects.count(), "resource relationships remaining")
Example #15
Source File: concept.py From arches with GNU Affero General Public License v3.0 | 6 votes |
def check_if_concept_in_use(self): """Checks if a concept or any of its subconcepts is in use by a resource instance""" in_use = False cursor = connection.cursor() for value in self.values: sql = ( """ SELECT count(*) from tiles t, jsonb_each_text(t.tiledata) as json_data WHERE json_data.value = '%s' """ % value.id ) cursor.execute(sql) rows = cursor.fetchall() if rows[0][0] > 0: in_use = True break if in_use is not True: for subconcept in self.subconcepts: in_use = subconcept.check_if_concept_in_use() if in_use == True: return in_use return in_use
Example #16
Source File: auth_tests.py From arches with GNU Affero General Public License v3.0 | 6 votes |
def setUpClass(cls): cls.factory = RequestFactory() cls.client = Client() cls.user = User.objects.create_user("test", "test@archesproject.org", "password") rdm_admin_group = Group.objects.get(name="RDM Administrator") cls.user.groups.add(rdm_admin_group) cls.anonymous_user = User.objects.get(username="anonymous") cls.token = "abc" cls.oauth_client_id = OAUTH_CLIENT_ID cls.oauth_client_secret = OAUTH_CLIENT_SECRET sql_str = CREATE_TOKEN_SQL.format(token=cls.token, user_id=cls.user.pk) cursor = connection.cursor() cursor.execute(sql_str)
Example #17
Source File: tile_model_tests.py From arches with GNU Affero General Public License v3.0 | 6 votes |
def tearDownClass(cls): sql = """ DELETE FROM public.node_groups WHERE nodegroupid = '99999999-0000-0000-0000-000000000001' OR nodegroupid = '32999999-0000-0000-0000-000000000000' OR nodegroupid = '19999999-0000-0000-0000-000000000000' OR nodegroupid = '21111111-0000-0000-0000-000000000000'; DELETE FROM public.resource_instances WHERE resourceinstanceid = '40000000-0000-0000-0000-000000000000'; """ cursor = connection.cursor() cursor.execute(sql) delete_terms_index() delete_concepts_index() delete_search_index()
Example #18
Source File: 0028_add_data_location.py From resolwe with Apache License 2.0 | 6 votes |
def set_data_location(apps, schema_editor): """Create DataLocation for each Data.""" Data = apps.get_model("flow", "Data") DataLocation = apps.get_model("flow", "DataLocation") for data in Data.objects.all(): if os.path.isdir( os.path.join(settings.FLOW_EXECUTOR["DATA_DIR"], str(data.id)) ): with transaction.atomic(): # Manually set DataLocation id to preserve data directory. data_location = DataLocation.objects.create( id=data.id, subpath=str(data.id) ) data_location.data.add(data) # Increment DataLocation id's sequence if DataLocation.objects.exists(): max_id = DataLocation.objects.order_by("id").last().id with connection.cursor() as cursor: cursor.execute( "ALTER SEQUENCE flow_datalocation_id_seq RESTART WITH {};".format( max_id + 1 ) )
Example #19
Source File: models.py From crowdata with MIT License | 6 votes |
def get_verified_documents(self, document_set): """ Get all documents that have an entry with canon """ q = """ SELECT distinct(document.id) AS document_id, document.name as document_name FROM crowdataapp_documentsetfieldentry field_entry LEFT OUTER JOIN crowdataapp_canonicalfieldentrylabel canonical_label ON canonical_label.id = field_entry.canonical_label_id INNER JOIN crowdataapp_documentsetformentry form_entry ON form_entry.id = field_entry.entry_id INNER JOIN crowdataapp_document document ON document.id = form_entry.document_id WHERE document.document_set_id = %d AND field_entry.verified = TRUE AND canonical_label.id = %d """ % (document_set.id, self.id) cursor = connection.cursor() cursor.execute(q) return cursor.fetchall()
Example #20
Source File: status.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def database_status(self): """Collect database connection information. :returns: A dict of db connection info. """ try: with connection.cursor() as cursor: cursor.execute( """ SELECT datname AS database, numbackends as database_connections FROM pg_stat_database """ ) raw = cursor.fetchall() # get pg_stat_database column names names = [desc[0] for desc in cursor.description] except (InterfaceError, NotSupportedError, OperationalError, ProgrammingError) as exc: LOG.warning("Unable to connect to DB: %s", str(exc)) return {"ERROR": str(exc)} # transform list-of-lists into list-of-dicts including column names. result = [dict(zip(names, row)) for row in raw] return result
Example #21
Source File: helpers.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def _populate_volume_label_summary_table(self): """Populate volume label key and values.""" agg_sql = pkgutil.get_data("masu.database", "sql/reporting_ocpstoragevolumelabel_summary.sql") agg_sql = agg_sql.decode("utf-8") agg_sql_params = {"schema": connection.schema_name} agg_sql, agg_sql_params = JinjaSql().prepare_query(agg_sql, agg_sql_params) with connection.cursor() as cursor: cursor.execute(agg_sql)
Example #22
Source File: persistence_hit_log.py From aswan with GNU Lesser General Public License v2.1 | 5 votes |
def table_exists(table_name): with connection.cursor() as cursor: # 判断数据表是否存在 all_table_names = set( connection.introspection.table_names(cursor=cursor)) return table_name in all_table_names
Example #23
Source File: helpers.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def _populate_azure_tag_summary(self): """Populate the Azure tag summary table.""" agg_sql = pkgutil.get_data("masu.database", "sql/reporting_ocpazuretags_summary.sql") agg_sql = agg_sql.decode("utf-8") agg_sql_params = {"schema": connection.schema_name} agg_sql, agg_sql_params = JinjaSql().prepare_query(agg_sql, agg_sql_params) with connection.cursor() as cursor: cursor.execute(agg_sql)
Example #24
Source File: helpers.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def _tag_summary(self): """Populate AzureTagsSummary.""" agg_sql = pkgutil.get_data("masu.database", "sql/reporting_azuretags_summary.sql") agg_sql = agg_sql.decode("utf-8") agg_sql_params = {"schema": connection.schema_name} agg_sql, agg_sql_params = JinjaSql().prepare_query(agg_sql, agg_sql_params) with connection.cursor() as cursor: cursor.execute(agg_sql)
Example #25
Source File: helpers.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def _populate_pod_label_summary_table(self): """Populate pod label key and values.""" agg_sql = pkgutil.get_data("masu.database", "sql/reporting_ocpusagepodlabel_summary.sql") agg_sql = agg_sql.decode("utf-8") agg_sql_params = {"schema": connection.schema_name} agg_sql, agg_sql_params = JinjaSql().prepare_query(agg_sql, agg_sql_params) with connection.cursor() as cursor: cursor.execute(agg_sql)
Example #26
Source File: status.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def database_status(self): """Collect database connection information. :returns: A dict of db connection info. """ try: with connection.cursor() as cursor: cursor.execute( """ SELECT datname AS database, numbackends as database_connections FROM pg_stat_database """ ) raw = cursor.fetchall() # get pg_stat_database column names names = [desc[0] for desc in cursor.description] except (InterfaceError, NotSupportedError, OperationalError, ProgrammingError) as exc: LOG.warning("Unable to connect to DB: %s", str(exc)) return {"ERROR": str(exc)} # transform list-of-lists into list-of-dicts including column names. result = [dict(zip(names, row)) for row in raw] return result
Example #27
Source File: helpers.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def _populate_tag_summary_table(self): """Populate pod label key and values.""" agg_sql = pkgutil.get_data("masu.database", "sql/reporting_awstags_summary.sql") agg_sql = agg_sql.decode("utf-8") agg_sql_params = {"schema": connection.schema_name} agg_sql, agg_sql_params = JinjaSql().prepare_query(agg_sql, agg_sql_params) with connection.cursor() as cursor: cursor.execute(agg_sql)
Example #28
Source File: report_db_accessor_base.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def create_temp_table(self, table_name, drop_column=None): """Create a temporary table and return the table name.""" temp_table_name = table_name + "_" + str(uuid.uuid4()).replace("-", "_") with connection.cursor() as cursor: cursor.db.set_schema(self.schema) cursor.execute(f"CREATE TEMPORARY TABLE {temp_table_name} (LIKE {table_name})") if drop_column: cursor.execute(f"ALTER TABLE {temp_table_name} DROP COLUMN {drop_column}") return temp_table_name
Example #29
Source File: helpers.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def _populate_ocpaws_tag_summary(self): """Populate the AWS tag summary table.""" agg_sql = pkgutil.get_data("masu.database", "sql/reporting_ocpawstags_summary.sql") agg_sql = agg_sql.decode("utf-8") agg_sql_params = {"schema": connection.schema_name} agg_sql, agg_sql_params = JinjaSql().prepare_query(agg_sql, agg_sql_params) with connection.cursor() as cursor: cursor.execute(agg_sql)
Example #30
Source File: test_basic.py From sentry-python with BSD 2-Clause "Simplified" License | 5 votes |
def test_sql_queries(sentry_init, capture_events, with_integration): sentry_init( integrations=[DjangoIntegration()] if with_integration else [], send_default_pii=True, _experiments={"record_sql_params": True}, ) from django.db import connection sentry_init( integrations=[DjangoIntegration()], send_default_pii=True, _experiments={"record_sql_params": True}, ) events = capture_events() sql = connection.cursor() with pytest.raises(OperationalError): # table doesn't even exist sql.execute("""SELECT count(*) FROM people_person WHERE foo = %s""", [123]) capture_message("HI") (event,) = events if with_integration: crumb = event["breadcrumbs"][-1] assert crumb["message"] == "SELECT count(*) FROM people_person WHERE foo = %s" assert crumb["data"]["db.params"] == [123]