Python psycopg2.extras.Json() Examples
The following are 21
code examples of psycopg2.extras.Json().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
psycopg2.extras
, or try the search function
.
Example #1
Source File: test_extended_types.py From aiopg with BSD 2-Clause "Simplified" License | 6 votes |
def test_json(make_connection): conn = await make_connection() data = {'a': 1, 'b': 'str'} cur = await conn.cursor() try: await cur.execute("DROP TABLE IF EXISTS tbl") await cur.execute("""CREATE TABLE tbl ( id SERIAL, val JSON)""") await cur.execute( "INSERT INTO tbl (val) VALUES (%s)", [Json(data)]) await cur.execute("SELECT * FROM tbl") item = await cur.fetchone() assert (1, {'b': 'str', 'a': 1}) == item finally: cur.close()
Example #2
Source File: test_types_extras.py From syntheticmass with Apache License 2.0 | 6 votes |
def test_adapt_subclass(self): from psycopg2.extras import json, Json class DecimalEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Decimal): return float(obj) return json.JSONEncoder.default(self, obj) class MyJson(Json): def dumps(self, obj): return json.dumps(obj, cls=DecimalEncoder) curs = self.conn.cursor() obj = Decimal('123.45') self.assertEqual(curs.mogrify("%s", (MyJson(obj),)), b("'123.45'"))
Example #3
Source File: postgres.py From zentral with Apache License 2.0 | 6 votes |
def _serialize_event(self, event): metadata = event.metadata doc = {'machine_serial_number': metadata.machine_serial_number, 'event_type': event.event_type, 'uuid': metadata.uuid, 'index': metadata.index, 'created_at': metadata.created_at} if metadata.request is not None: doc['user_agent'] = metadata.request.user_agent doc['ip'] = metadata.request.ip user = metadata.request.user if user: doc['user'] = Json(user.serialize()) else: doc['user_agent'] = None doc['ip'] = None doc['user'] = None doc['payload'] = Json(event.payload) return doc
Example #4
Source File: jsonb.py From openhgsenti with Apache License 2.0 | 5 votes |
def get_prep_lookup(self, lookup_type, value): if lookup_type in ('has_key', 'has_keys', 'has_any_keys'): return value if isinstance(value, (dict, list)): return Json(value) return super(JSONField, self).get_prep_lookup(lookup_type, value)
Example #5
Source File: test_types_extras.py From syntheticmass with Apache License 2.0 | 5 votes |
def test_str(self): snowman = u"\u2603" obj = {'a': [1, 2, snowman]} j = psycopg2.extensions.adapt(psycopg2.extras.Json(obj)) s = str(j) self.assert_(isinstance(s, str)) # no pesky b's self.assert_(s.startswith("'")) self.assert_(s.endswith("'"))
Example #6
Source File: test_types_extras.py From syntheticmass with Apache License 2.0 | 5 votes |
def test_adapt_dumps(self): from psycopg2.extras import json, Json class DecimalEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Decimal): return float(obj) return json.JSONEncoder.default(self, obj) curs = self.conn.cursor() obj = Decimal('123.45') dumps = lambda obj: json.dumps(obj, cls=DecimalEncoder) self.assertEqual(curs.mogrify("%s", (Json(obj, dumps=dumps),)), b("'123.45'"))
Example #7
Source File: test_types_extras.py From syntheticmass with Apache License 2.0 | 5 votes |
def test_adapt(self): from psycopg2.extras import json, Json objs = [None, "te'xt", 123, 123.45, u'\xe0\u20ac', ['a', 100], {'a': 100} ] curs = self.conn.cursor() for obj in enumerate(objs): self.assertEqual(curs.mogrify("%s", (Json(obj),)), psycopg2.extensions.QuotedString(json.dumps(obj)).getquoted())
Example #8
Source File: test_types_extras.py From syntheticmass with Apache License 2.0 | 5 votes |
def test_customizable_with_module_not_available(self): from psycopg2.extras import Json class MyJson(Json): def dumps(self, obj): assert obj is None return "hi" self.assertEqual(MyJson(None).getquoted(), "'hi'")
Example #9
Source File: test_types_extras.py From syntheticmass with Apache License 2.0 | 5 votes |
def test_module_not_available(self): from psycopg2.extras import Json self.assertRaises(ImportError, Json(None).getquoted)
Example #10
Source File: unit_tests.py From cccatalog-api with MIT License | 5 votes |
def test_accuracy_filter(): tags = [ { 'name': 'inaccurate', 'accuracy': 0.5 }, { 'name': 'accurate', 'accuracy': 0.999 } ] result = str(CleanupFunctions.cleanup_tags(tags)) expected = str(Json([{'name': 'accurate', 'accuracy': 0.999}])) assert result == expected
Example #11
Source File: unit_tests.py From cccatalog-api with MIT License | 5 votes |
def test_tag_blacklist(): tags = [ { 'name': 'cc0' }, { 'name': ' cc0' }, { 'name': 'valid', 'accuracy': 0.99 }, { 'name': 'valid_no_accuracy' }, { 'name': 'garbage:=metacrap', } ] result = str(CleanupFunctions.cleanup_tags(tags)) expected = str(Json([ {'name': 'valid', 'accuracy': 0.99}, {'name': 'valid_no_accuracy'} ])) assert result == expected
Example #12
Source File: cleanup.py From cccatalog-api with MIT License | 5 votes |
def cleanup_tags(tags): """ Delete tags because they have low accuracy or because they are in the blacklist. If no change is made, return None. :return: A SQL fragment if an update is required or None """ update_required = False tag_output = [] if not tags: return None for tag in tags: below_threshold = False if 'accuracy' in tag and tag['accuracy'] < TAG_MIN_CONFIDENCE: below_threshold = True if 'name' in tag: lower_tag = tag['name'].lower() should_filter = _tag_blacklisted(lower_tag) or below_threshold else: log.warning(f'Filtering malformed tag "{tag}" in "{tags}"') should_filter = True if should_filter: update_required = True else: tag_output.append(tag) if update_required: fragment = Json(tag_output) return fragment else: return None # Define which tables, sources, and fields require cleanup. Map the field # to a cleanup function that returns either a cleaned version of the field # or 'None' to signal that no update is required.
Example #13
Source File: postgres_ext.py From Quiver-alfred with MIT License | 5 votes |
def contains(self, other): clone = self.as_json(True) if isinstance(other, (list, dict)): return Expression(clone, OP.JSONB_CONTAINS, Json(other)) return Expression(clone, OP.JSONB_EXISTS, other)
Example #14
Source File: jsonb.py From openhgsenti with Apache License 2.0 | 5 votes |
def get_prep_value(self, value): if value is not None: return Json(value) return value
Example #15
Source File: committee_metadata.py From policy_diffusion with MIT License | 5 votes |
def parse_committee_metadata(committee_metadata): id_ = committee_metadata['id'] state = committee_metadata['state'] chamber = committee_metadata['chamber'] committee = committee_metadata['committee'] subcommittee = committee_metadata['subcommittee'] if len(committee_metadata['members']) > 0: members = Json(committee_metadata['members'][0]) else: members = None sources = committee_metadata['sources'][0]['url'] parent_id = committee_metadata['parent_id'] created_at = committee_metadata['created_at'] updated_at = committee_metadata['updated_at'] if len(committee_metadata['all_ids']) > 0: all_ids = committee_metadata['all_ids'][0] else: all_ids = None if 'level' in committee_metadata: level = committee_metadata['level'] else: level = None return((id_, state, chamber, committee, subcommittee, members, sources, parent_id, created_at, updated_at, all_ids, level)) # GRAB COMMITTEE METADATA FROM FILES AND PUSH TO DATABASE
Example #16
Source File: database.py From lopocs with GNU Lesser General Public License v2.1 | 5 votes |
def update_metadata(cls, table, column, srid, scale_x, scale_y, scale_z, offset_x, offset_y, offset_z): ''' Add an entry to the lopocs metadata tables to use. To be used after a fresh pc table creation. ''' pcid = cls.query(""" select pcid from pointcloud_columns where "schema" = %s and "table" = %s and "column" = %s """, (table.split('.')[0], table.split('.')[1], column) )[0][0] bbox = cls.compute_boundingbox(table, column) # compute bbox with offset and scale applied bbox_scaled = [0] * 6 bbox_scaled[0] = (bbox['xmin'] - offset_x) / scale_x bbox_scaled[1] = (bbox['ymin'] - offset_y) / scale_y bbox_scaled[2] = (bbox['zmin'] - offset_z) / scale_z bbox_scaled[3] = (bbox['xmax'] - offset_x) / scale_x bbox_scaled[4] = (bbox['ymax'] - offset_y) / scale_y bbox_scaled[5] = (bbox['zmax'] - offset_z) / scale_z res = cls.query(""" delete from pointcloud_lopocs where schematable = %s and "column" = %s; insert into pointcloud_lopocs (schematable, "column", srid, bbox) values (%s, %s, %s, %s) returning id """, (table, column, table, column, srid, bbox)) plid = res[0][0] scales = scale_x, scale_y, scale_z offsets = offset_x, offset_y, offset_z json_schema = cls.patch2greyhoundschema(table, column) cls.execute(""" insert into pointcloud_lopocs_outputs (id, pcid, scales, offsets, stored, bbox, point_schema) values (%s, %s, %s, %s, True, %s, %s) """, ( plid, pcid, iterable2pgarray(scales), iterable2pgarray(offsets), iterable2pgarray(bbox_scaled), Json(json_schema)))
Example #17
Source File: postgres_ext.py From Quiver-alfred with MIT License | 5 votes |
def contained_by(self, other): return Expression(self, OP.JSONB_CONTAINED_BY, Json(other))
Example #18
Source File: postgres_ext.py From Quiver-alfred with MIT License | 5 votes |
def contains(self, other): if isinstance(other, (list, dict)): return Expression(self, OP.JSONB_CONTAINS, Json(other)) return Expression(self, OP.JSONB_EXISTS, Passthrough(other))
Example #19
Source File: postgres_ext.py From Quiver-alfred with MIT License | 5 votes |
def db_value(self, value): if value is None: return value if not isinstance(value, Json): return Json(value, dumps=self.dumps) return value
Example #20
Source File: postgres_ext.py From Quiver-alfred with MIT License | 5 votes |
def __init__(self, dumps=None, *args, **kwargs): if Json is None: raise Exception('Your version of psycopg2 does not support JSON.') self.dumps = dumps super(JSONField, self).__init__(*args, **kwargs)
Example #21
Source File: database.py From lopocs with GNU Lesser General Public License v2.1 | 4 votes |
def add_output_schema(cls, table, column, scale_x, scale_y, scale_z, offset_x, offset_y, offset_z, srid, schema, compression='none'): """ Adds a new schema used to stream points. The new point format will be added to the database if it doesn't exists """ bbox = cls.compute_boundingbox(table, column) # compute bbox with offset and scale applied bbox_scaled = [0] * 6 bbox_scaled[0] = (bbox['xmin'] - offset_x) / scale_x bbox_scaled[1] = (bbox['ymin'] - offset_y) / scale_y bbox_scaled[2] = (bbox['zmin'] - offset_z) / scale_z bbox_scaled[3] = (bbox['xmax'] - offset_x) / scale_x bbox_scaled[4] = (bbox['ymax'] - offset_y) / scale_y bbox_scaled[5] = (bbox['zmax'] - offset_z) / scale_z scales = scale_x, scale_y, scale_z offsets = offset_x, offset_y, offset_z xmlschema = create_pointcloud_schema(schema, scales, offsets) # check if the schema already exists res = Session.query( """ select pcid from pointcloud_formats where srid = %s and schema = %s """, (srid, xmlschema) ) if not res: # insert schema res = cls.query( """ with tmp as ( select max(pcid) + 1 as pcid from pointcloud_formats ) insert into pointcloud_formats select pcid, %s, %s from tmp returning pcid """, (srid, xmlschema) ) pcid = res[0][0] # check if lopocs already contains this configuration plid = cls.query(""" select id from pointcloud_lopocs where schematable = %s and "column" = %s; """, (table, column))[0][0] cls.execute(""" insert into pointcloud_lopocs_outputs (id, pcid, scales, offsets, stored, bbox, point_schema) values (%s, %s, %s, %s, False, %s, %s) """, ( plid, pcid, iterable2pgarray(scales), iterable2pgarray(offsets), iterable2pgarray(bbox_scaled), Json(schema))) return pcid, bbox_scaled