Python sqlalchemy.types.Text() Examples
The following are 23
code examples of sqlalchemy.types.Text().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sqlalchemy.types
, or try the search function
.
Example #1
Source File: whoosh_backend.py From flask-msearch with BSD 3-Clause "New" or "Revised" License | 6 votes |
def fields_map(self, field_type): if field_type == "primary": return ID(stored=True, unique=True) type_map = { 'date': types.Date, 'datetime': types.DateTime, 'boolean': types.Boolean, 'integer': types.Integer, 'float': types.Float } if isinstance(field_type, str): field_type = type_map.get(field_type, types.Text) if not isinstance(field_type, type): field_type = field_type.__class__ if issubclass(field_type, (types.DateTime, types.Date)): return DATETIME(stored=True, sortable=True) elif issubclass(field_type, types.Integer): return NUMERIC(stored=True, numtype=int) elif issubclass(field_type, types.Float): return NUMERIC(stored=True, numtype=float) elif issubclass(field_type, types.Boolean): return BOOLEAN(stored=True) return TEXT(stored=True, analyzer=self.analyzer, sortable=False)
Example #2
Source File: convert.py From dvhb-hybrid with MIT License | 6 votes |
def _convert_type(self, dj_field, sa_type): kwargs = {} if sa_type is SA_ARRAY: internal_type = dj_field.base_field.get_internal_type() kwargs['item_type'] = self._types.get(internal_type) if kwargs['item_type'] is None: raise ConversionError( 'Unable convert array: ' 'item type "%s" not found' % internal_type ) elif sa_type is Geometry: kwargs['geometry_type'] = 'POINT' kwargs['srid'] = dj_field.srid elif sa_type is sa_types.Numeric: kwargs['scale'] = dj_field.decimal_places, kwargs['precision'] = dj_field.max_digits elif sa_type in (sa_types.String, sa_types.Text): kwargs['length'] = dj_field.max_length elif sa_type is SA_UUID: kwargs['as_uuid'] = True return sa_type(**kwargs)
Example #3
Source File: test_reflection.py From sqlalchemy with MIT License | 6 votes |
def test_max_ident_in_varchar_not_present(self): """test [ticket:3504]. Here we are testing not just that the "max" token comes back as None, but also that these types accept "max" as the value of "length" on construction, which isn't a directly documented pattern however is likely in common use. """ metadata = self.metadata Table( "t", metadata, Column("t1", types.String), Column("t2", types.Text("max")), Column("t3", types.Text("max")), Column("t4", types.LargeBinary("max")), Column("t5", types.VARBINARY("max")), ) metadata.create_all() for col in inspect(testing.db).get_columns("t"): is_(col["type"].length, None) in_("max", str(col["type"].compile(dialect=testing.db.dialect)))
Example #4
Source File: test_converter.py From graphene-sqlalchemy with MIT License | 6 votes |
def test_should_composite_convert(): registry = Registry() class CompositeClass: def __init__(self, col1, col2): self.col1 = col1 self.col2 = col2 @convert_sqlalchemy_composite.register(CompositeClass, registry) def convert_composite_class(composite, registry): return graphene.String(description=composite.doc) field = convert_sqlalchemy_composite( composite(CompositeClass, (Column(types.Unicode(50)), Column(types.Unicode(50))), doc="Custom Help Text"), registry, mock_resolver, ) assert isinstance(field, graphene.String)
Example #5
Source File: dbtools.py From mittn with Apache License 2.0 | 5 votes |
def add_false_positive(context, issue): """Add a finding into the database as a new finding :param context: The Behave context :param response: An issue data structure (see steps.py) """ dbconn = open_database(context) if dbconn is None: # There is no false positive db in use, and we cannot store the data, # so we will assert a failure. assert False, "Issues were found in scan, but no false positive database is in use." # Add the finding into the database db_insert = context.headlessscanner_issues.insert().values( new_issue=True, # Boolean # The result from Burp Extender does not include a timestamp, # so we add the current time timestamp=datetime.datetime.utcnow(), # DateTime test_runner_host=socket.gethostbyname(socket.getfqdn()), # Text scenario_id=issue['scenario_id'], # Text url=issue['url'], # Text severity=issue['severity'], # Text issuetype=issue['issuetype'], # Text issuename=issue['issuename'], # Text issuedetail=issue['issuedetail'], # Text confidence=issue['confidence'], # Text host=issue['host'], # Text port=issue['port'], # Text protocol=issue['protocol'], # Text messages=json.dumps(issue['messages'])) # Blob dbconn.execute(db_insert) dbconn.close()
Example #6
Source File: elasticsearch_backend.py From flask-msearch with BSD 3-Clause "New" or "Revised" License | 5 votes |
def fields_map(self, field_type): if field_type == "primary": return {'type': 'keyword'} type_map = { 'date': types.Date, 'datetime': types.DateTime, 'boolean': types.Boolean, 'integer': types.Integer, 'float': types.Float, 'binary': types.Binary } if isinstance(field_type, str): field_type = type_map.get(field_type, types.Text) if not isinstance(field_type, type): field_type = field_type.__class__ if issubclass(field_type, (types.DateTime, types.Date)): return {'type': 'date'} elif issubclass(field_type, types.Integer): return {'type': 'long'} elif issubclass(field_type, types.Float): return {'type': 'float'} elif issubclass(field_type, types.Boolean): return {'type': 'boolean'} elif issubclass(field_type, types.Binary): return {'type': 'binary'} return {'type': 'string'} # https://medium.com/@federicopanini/elasticsearch-6-0-removal-of-mapping-types-526a67ff772
Example #7
Source File: convert.py From dvhb-hybrid with MIT License | 5 votes |
def __init__(self): self._types = { # Django internal type => SQLAlchemy type 'ArrayField': SA_ARRAY, 'AutoField': sa_types.Integer, 'BigAutoField': sa_types.BigInteger, 'BigIntegerField': sa_types.BigInteger, 'BooleanField': sa_types.Boolean, 'CharField': sa_types.String, 'DateField': sa_types.Date, 'DateTimeField': sa_types.DateTime, 'DecimalField': sa_types.Numeric, 'DurationField': sa_types.Interval, 'FileField': sa_types.String, 'FilePathField': sa_types.String, 'FloatField': sa_types.Float, 'GenericIPAddressField': sa_types.String, 'IntegerField': sa_types.Integer, 'JSONField': SA_JSONB, 'NullBooleanField': sa_types.Boolean, 'PointField': Geometry, 'PositiveIntegerField': sa_types.Integer, 'PositiveSmallIntegerField': sa_types.SmallInteger, 'SlugField': sa_types.String, 'SmallIntegerField': sa_types.SmallInteger, 'TextField': sa_types.Text, 'TimeField': sa_types.Time, 'UUIDField': SA_UUID, # TODO: Add missing GIS fields }
Example #8
Source File: test_sqlalchemy_types.py From senlin with Apache License 2.0 | 5 votes |
def test_load_dialect_impl(self): dialect = mysql_base.MySQLDialect() impl = self.sqltype.load_dialect_impl(dialect) self.assertNotEqual(types.Text, type(impl)) dialect = sqlite_base.SQLiteDialect() impl = self.sqltype.load_dialect_impl(dialect) self.assertEqual(types.Text, type(impl))
Example #9
Source File: test_sqlalchemy_types.py From senlin with Apache License 2.0 | 5 votes |
def test_load_dialect_impl(self): dialect = mysql_base.MySQLDialect() impl = self.sqltype.load_dialect_impl(dialect) self.assertNotEqual(types.Text, type(impl)) dialect = sqlite_base.SQLiteDialect() impl = self.sqltype.load_dialect_impl(dialect) self.assertEqual(types.Text, type(impl))
Example #10
Source File: types.py From oslo.db with Apache License 2.0 | 5 votes |
def __init__(self, mysql_as_long=False, mysql_as_medium=False): """Initialize JSON-encoding type.""" super(JsonEncodedType, self).__init__() if mysql_as_long and mysql_as_medium: raise TypeError("mysql_as_long and mysql_as_medium are mutually " "exclusive") if mysql_as_long: self.impl = Text().with_variant(mysql.LONGTEXT(), 'mysql') elif mysql_as_medium: self.impl = Text().with_variant(mysql.MEDIUMTEXT(), 'mysql')
Example #11
Source File: test_converter.py From graphene-sqlalchemy with MIT License | 5 votes |
def test_should_text_convert_string(): assert get_field(types.Text()).type == graphene.String
Example #12
Source File: __init__.py From quay with Apache License 2.0 | 5 votes |
def load_dialect_impl(self, dialect): if dialect.name == "mysql": return dialect.type_descriptor( LONGTEXT(charset="utf8mb4", collation="utf8mb4_unicode_ci") ) else: return dialect.type_descriptor(Text())
Example #13
Source File: test_converter.py From graphene-sqlalchemy with MIT License | 5 votes |
def get_field(sqlalchemy_type, **column_kwargs): class Model(declarative_base()): __tablename__ = 'model' id_ = Column(types.Integer, primary_key=True) column = Column(sqlalchemy_type, doc="Custom Help Text", **column_kwargs) column_prop = inspect(Model).column_attrs['column'] return convert_sqlalchemy_column(column_prop, get_global_registry(), mock_resolver)
Example #14
Source File: test_dbtools.py From mittn with Apache License 2.0 | 4 votes |
def test_add_false_positive(self): # Add a false positive to database and check that all fields # get populated and can be compared back originals response = {'scenario_id': '1', 'req_headers': 'headers', 'req_body': 'body', 'url': 'url', 'timestamp': datetime.datetime.utcnow(), 'req_method': 'method', 'server_protocol_error': None, 'server_timeout': False, 'server_error_text_detected': False, 'server_error_text_matched': 'matched_text', 'resp_statuscode': 'statuscode', 'resp_headers': 'resp_headers', 'resp_body': 'resp_body', 'resp_history': 'resp_history'} dbtools.add_false_positive(self.context, response) # Connect directly to the database and check the data is there db_engine = sqlalchemy.create_engine(self.context.dburl) dbconn = db_engine.connect() db_metadata = sqlalchemy.MetaData() httpfuzzer_issues = Table('httpfuzzer_issues', db_metadata, Column('new_issue', types.Boolean), Column('issue_no', types.Integer, primary_key=True, nullable=False), Column('timestamp', types.DateTime(timezone=True)), Column('test_runner_host', types.Text), Column('scenario_id', types.Text), Column('url', types.Text), Column('server_protocol_error', types.Text), Column('server_timeout', types.Boolean), Column('server_error_text_detected', types.Boolean), Column('server_error_text_matched', types.Text), Column('req_method', types.Text), Column('req_headers', types.LargeBinary), Column('req_body', types.LargeBinary), Column('resp_statuscode', types.Text), Column('resp_headers', types.LargeBinary), Column('resp_body', types.LargeBinary), Column('resp_history', types.LargeBinary)) db_select = sqlalchemy.sql.select([httpfuzzer_issues]) db_result = dbconn.execute(db_select) result = db_result.fetchone() for key, value in response.iteritems(): self.assertEqual(result[key], value, '%s not found in database after add' % key) self.assertEqual(result['test_runner_host'], socket.gethostbyname(socket.getfqdn()), 'Test runner host name not correct in database') self.assertLessEqual(result['timestamp'], datetime.datetime.utcnow(), 'Timestamp not correctly stored in database') dbconn.close()
Example #15
Source File: dbtools.py From mittn with Apache License 2.0 | 4 votes |
def open_database(context): """Opens the database specified in the feature file and creates tables if not already created :param context: The Behave context :return: A database handle, or None if no database in use """ if hasattr(context, 'dburl') is False: return None # No false positives database is in use dbconn = None # Try to connect to the database try: db_engine = create_engine(context.dburl) dbconn = db_engine.connect() except (IOError, exc.OperationalError): assert False, "Cannot connect to database '%s'" % context.dburl # Set up the database table to store new findings and false positives. # We use LargeBinary to store the message, because it can potentially # be big. db_metadata = MetaData() db_metadata.bind = db_engine context.headlessscanner_issues = Table( 'headlessscanner_issues', db_metadata, Column('new_issue', types.Boolean), Column('issue_no', types.Integer, primary_key=True, nullable=False), # Implicit autoincrement Column('timestamp', types.DateTime(timezone=True)), Column('test_runner_host', types.Text), Column('scenario_id', types.Text), Column('url', types.Text), Column('severity', types.Text), Column('issuetype', types.Text), Column('issuename', types.Text), Column('issuedetail', types.Text), Column('confidence', types.Text), Column('host', types.Text), Column('port', types.Text), Column('protocol', types.Text), Column('messages', types.LargeBinary) ) # Create the table if it doesn't exist # and otherwise no effect db_metadata.create_all(db_engine) return dbconn
Example #16
Source File: dbtools.py From mittn with Apache License 2.0 | 4 votes |
def add_false_positive(context, response): """Add a finding into the database as a new finding :param context: The Behave context :param response: The response data structure (see httptools.py) """ # These keys may not be present because they aren't part of # the response dict Requests produces, but instead added by us. # If this function is called without them, default to False. if 'server_error_text_detected' not in response: response['server_error_text_detected'] = False if 'server_error_text_matched' not in response: response['server_error_text_matched'] = '' dbconn = open_database(context) if dbconn is None: # There is no false positive db in use, and we cannot store the data, # so we will assert a failure. Long assert messages seem to fail, # so we truncate uri and submission to 200 bytes. truncated_submission = ( response['req_body'][:200] + "... (truncated)") if len( response['req_body']) > 210 else response['req_body'] truncated_uri = (response['url'][:200] + "... (truncated)") if len( response['url']) > 210 else response['url'] assert False, "Response from server failed a check, and no errors " \ "database is in use. Scenario id = %s, error = %s, " \ "timeout = %s, status = %s, URI = %s, req_method = %s, " \ "submission = %s" % ( response['scenario_id'], response['server_protocol_error'], response['server_timeout'], response['resp_statuscode'], truncated_uri, response['req_method'], truncated_submission) # Add the finding into the database db_insert = context.httpfuzzer_issues.insert().values( new_issue=True, # Boolean timestamp=response['timestamp'], # DateTime test_runner_host=socket.gethostbyname(socket.getfqdn()), # Text scenario_id=str(response['scenario_id']), # Text req_headers=str(response['req_headers']), # Blob req_body=str(response['req_body']), # Blob url=str(response['url']), # Text req_method=str(response['req_method']), # Text server_protocol_error=response['server_protocol_error'], # Text server_timeout=response['server_timeout'], # Boolean server_error_text_detected=response['server_error_text_detected'], # Boolean server_error_text_matched=response['server_error_text_matched'], # Text resp_statuscode=str(response['resp_statuscode']), # Text resp_headers=str(response['resp_headers']), # Blob resp_body=str(response['resp_body']), # Blob resp_history=str(response['resp_history'])) # Blob dbconn.execute(db_insert) dbconn.close()
Example #17
Source File: test_dbtools.py From mittn with Apache License 2.0 | 4 votes |
def test_add_false_positive(self): # Add a false positive to database and check that all fields # get populated and can be compared back originals issue = {'scenario_id': '1', 'url': 'testurl', 'severity': 'testseverity', 'issuetype': 'testissuetype', 'issuename': 'testissuename', 'issuedetail': 'testissuedetail', 'confidence': 'testconfidence', 'host': 'testhost', 'port': 'testport', 'protocol': 'testprotocol', 'messages': '{foo=bar}'} dbtools.add_false_positive(self.context, issue) # Connect directly to the database and check the data is there db_engine = sqlalchemy.create_engine(self.context.dburl) dbconn = db_engine.connect() db_metadata = sqlalchemy.MetaData() headlessscanner_issues = Table( 'headlessscanner_issues', db_metadata, Column('new_issue', types.Boolean), Column('issue_no', types.Integer, primary_key=True, nullable=False), # Implicit autoincrement Column('timestamp', types.DateTime(timezone=True)), Column('test_runner_host', types.Text), Column('scenario_id', types.Text), Column('url', types.Text), Column('severity', types.Text), Column('issuetype', types.Text), Column('issuename', types.Text), Column('issuedetail', types.Text), Column('confidence', types.Text), Column('host', types.Text), Column('port', types.Text), Column('protocol', types.Text), Column('messages', types.LargeBinary) ) db_select = sqlalchemy.sql.select([headlessscanner_issues]) db_result = dbconn.execute(db_select) result = db_result.fetchone() for key, value in issue.iteritems(): if key == 'messages': self.assertEqual(result[key], json.dumps(value)) else: self.assertEqual(result[key], value, '%s not found in database after add' % key) self.assertEqual(result['test_runner_host'], socket.gethostbyname(socket.getfqdn()), 'Test runner host name not correct in database') self.assertLessEqual(result['timestamp'], datetime.datetime.utcnow(), 'Timestamp not correctly stored in database') dbconn.close()
Example #18
Source File: dbtools.py From mittn with Apache License 2.0 | 4 votes |
def known_false_positive(context, response): """Check whether a finding already exists in the database (usually a "false positive" if it does exist) :param context: The Behave context :param response: The server response data structure (see httptools.py) :return: True or False, depending on whether this is a known issue """ # These keys may not be present because they aren't part of # the response dict Requests produces, but instead added by us. # If this function is called without them, default to False. if 'server_error_text_detected' not in response: response['server_error_text_detected'] = False if 'server_error_text_matched' not in response: response['server_error_text_matched'] = '' dbconn = open_database(context) if dbconn is None: # No false positive db is in use, all findings are treated as new return False # Check whether we already know about this. A finding is a duplicate if: # - It has the same protocol level error message (or None) from Requests AND # - It has the same scenario id, AND # - It has the same return status code from the server, AND # - It has the same timeout boolean value, AND # - It has the same server error text detection boolean value. # Because each fuzz case is likely to be separate, we cannot store # all those. Two different fuzz cases that elicit a similar response are # indistinguishable in this regard and only the one triggering payload # gets stored here. This does not always model reality. If fuzzing a # field triggers an issue, you should thoroughly fuzz-test that field # separately. db_select = sql.select([context.httpfuzzer_issues]).where( and_( context.httpfuzzer_issues.c.scenario_id == response['scenario_id'], # Text context.httpfuzzer_issues.c.server_protocol_error == response['server_protocol_error'], # Text context.httpfuzzer_issues.c.resp_statuscode == str(response['resp_statuscode']), # Text context.httpfuzzer_issues.c.server_timeout == response['server_timeout'], # Bool context.httpfuzzer_issues.c.server_error_text_detected == response['server_error_text_detected'])) # Bool db_result = dbconn.execute(db_select) # If none found with these criteria, we did not know about this if len(db_result.fetchall()) == 0: return False # No, we did not know about this db_result.close() dbconn.close() return True
Example #19
Source File: dbtools.py From mittn with Apache License 2.0 | 4 votes |
def open_database(context): """Opens the database specified in the feature file and creates tables if not already created :param context: The Behave context :return: A database handle, or None if no database in use """ if hasattr(context, 'dburl') is False: return None # No false positives database is in use dbconn = None # Try to connect to the database try: db_engine = create_engine(context.dburl) dbconn = db_engine.connect() except (IOError, exc.OperationalError): assert False, "Cannot connect to database '%s'" % context.dburl # Set up the database table to store new findings and false positives. # We use LargeBinary to store those fields that could contain somehow # bad Unicode, just in case some component downstream tries to parse # a string provided as Unicode. db_metadata = MetaData() db_metadata.bind = db_engine context.httpfuzzer_issues = Table('httpfuzzer_issues', db_metadata, Column('new_issue', types.Boolean), Column('issue_no', types.Integer, primary_key=True, nullable=False), Column('timestamp', types.DateTime(timezone=True)), Column('test_runner_host', types.Text), Column('scenario_id', types.Text), Column('url', types.Text), Column('server_protocol_error', types.Text), Column('server_timeout', types.Boolean), Column('server_error_text_detected', types.Boolean), Column('server_error_text_matched', types.Text), Column('req_method', types.Text), Column('req_headers', types.LargeBinary), Column('req_body', types.LargeBinary), Column('resp_statuscode', types.Text), Column('resp_headers', types.LargeBinary), Column('resp_body', types.LargeBinary), Column('resp_history', types.LargeBinary)) # Create the table if it doesn't exist # and otherwise no effect db_metadata.create_all(db_engine) return dbconn
Example #20
Source File: stock.py From data_integration_celery with GNU General Public License v3.0 | 4 votes |
def merge_stock_info(): """ 合并 wind,ifind 数据到对应名称的表中 :return: """ table_name = 'stock_info' logging.info("更新 %s 开始", table_name) has_table = engine_md.has_table(table_name) ifind_table_name = 'ifind_{table_name}'.format(table_name=table_name) wind_table_name = 'wind_{table_name}'.format(table_name=table_name) # ifind_model = TABLE_MODEL_DIC[ifind_table_name] # wind_model = TABLE_MODEL_DIC[wind_table_name] # with with_db_session(engine_md) as session: # session.query(ifind_model, wind_model).filter(ifind_model.c.ths_code == wind_model.c.wind_code) ifind_sql_str = "select * from {table_name}".format(table_name=ifind_table_name) wind_sql_str = "select * from {table_name}".format(table_name=wind_table_name) ifind_df = pd.read_sql(ifind_sql_str, engine_md) # , index_col='ths_code' wind_df = pd.read_sql(wind_sql_str, engine_md) # , index_col='wind_code' joined_df = pd.merge(ifind_df, wind_df, how='outer', left_on='ths_code', right_on='wind_code', indicator='indicator_column') col_merge_dic = { 'unique_code': (String(20), prefer_left, {'left_key': 'ths_code', 'right_key': 'wind_code'}), 'sec_name': (String(20), prefer_left, {'left_key': 'ths_stock_short_name_stock', 'right_key': 'sec_name'}), 'cn_name': (String(100), get_value, {'key': 'ths_corp_cn_name_stock'}), 'en_name': (String(100), get_value, {'key': 'ths_corp_name_en_stock'}), 'delist_date': (Date, prefer_left, {'left_key': 'ths_delist_date_stock', 'right_key': 'delist_date'}), 'ipo_date': (Date, prefer_left, {'left_key': 'ths_ipo_date_stock', 'right_key': 'ipo_date'}), 'pre_name': (Text, prefer_left, {'left_key': 'ths_corp_name_en_stock', 'right_key': 'prename'}), 'established_date': (Date, get_value, {'key': 'ths_established_date_stock'}), 'exch_city': (String(20), get_value, {'key': 'exch_city'}), 'exch_cn': (String(20), get_value, {'key': 'ths_listing_exchange_stock'}), 'exch_eng': (String(20), get_value, {'key': 'exch_eng'}), 'stock_code': (String(20), prefer_left, {'left_key': 'ths_stock_code_stock', 'right_key': 'trade_code'}), 'mkt': (String(20), get_value, {'key': 'mkt'}), } col_merge_rule_dic = { key: (val[1], val[2]) for key, val in col_merge_dic.items() } dtype = { key: val[0] for key, val in col_merge_dic.items() } data_df = merge_data(joined_df, col_merge_rule_dic) data_count = bunch_insert_on_duplicate_update(data_df, table_name, engine_md, dtype) logger.info('%s 新增或更新记录 %d 条', table_name, data_count) if not has_table and engine_md.has_table(table_name): alter_table_2_myisam(engine_md, [table_name]) build_primary_key([table_name]) return data_df
Example #21
Source File: test_sqlite.py From sqlalchemy with MIT License | 4 votes |
def _fixed_lookup_fixture(self): return [ (sqltypes.String(), sqltypes.VARCHAR()), (sqltypes.String(1), sqltypes.VARCHAR(1)), (sqltypes.String(3), sqltypes.VARCHAR(3)), (sqltypes.Text(), sqltypes.TEXT()), (sqltypes.Unicode(), sqltypes.VARCHAR()), (sqltypes.Unicode(1), sqltypes.VARCHAR(1)), (sqltypes.UnicodeText(), sqltypes.TEXT()), (sqltypes.CHAR(3), sqltypes.CHAR(3)), (sqltypes.NUMERIC, sqltypes.NUMERIC()), (sqltypes.NUMERIC(10, 2), sqltypes.NUMERIC(10, 2)), (sqltypes.Numeric, sqltypes.NUMERIC()), (sqltypes.Numeric(10, 2), sqltypes.NUMERIC(10, 2)), (sqltypes.DECIMAL, sqltypes.DECIMAL()), (sqltypes.DECIMAL(10, 2), sqltypes.DECIMAL(10, 2)), (sqltypes.INTEGER, sqltypes.INTEGER()), (sqltypes.BIGINT, sqltypes.BIGINT()), (sqltypes.Float, sqltypes.FLOAT()), (sqltypes.TIMESTAMP, sqltypes.TIMESTAMP()), (sqltypes.DATETIME, sqltypes.DATETIME()), (sqltypes.DateTime, sqltypes.DATETIME()), (sqltypes.DateTime(), sqltypes.DATETIME()), (sqltypes.DATE, sqltypes.DATE()), (sqltypes.Date, sqltypes.DATE()), (sqltypes.TIME, sqltypes.TIME()), (sqltypes.Time, sqltypes.TIME()), (sqltypes.BOOLEAN, sqltypes.BOOLEAN()), (sqltypes.Boolean, sqltypes.BOOLEAN()), ( sqlite.DATE(storage_format="%(year)04d%(month)02d%(day)02d"), sqltypes.DATE(), ), ( sqlite.TIME( storage_format="%(hour)02d%(minute)02d%(second)02d" ), sqltypes.TIME(), ), ( sqlite.DATETIME( storage_format="%(year)04d%(month)02d%(day)02d" "%(hour)02d%(minute)02d%(second)02d" ), sqltypes.DATETIME(), ), ]
Example #22
Source File: test_reflection.py From sqlalchemy with MIT License | 4 votes |
def test_basic_reflection(self): meta = self.metadata users = Table( "engine_users", meta, Column("user_id", types.INT, primary_key=True), Column("user_name", types.VARCHAR(20), nullable=False), Column("test1", types.CHAR(5), nullable=False), Column("test2", types.Float(5), nullable=False), Column("test2.5", types.Float(), nullable=False), Column("test3", types.Text()), Column("test4", types.Numeric, nullable=False), Column("test4.5", types.Numeric(10, 2), nullable=False), Column("test5", types.DateTime), Column( "parent_user_id", types.Integer, ForeignKey("engine_users.user_id"), ), Column("test6", types.DateTime, nullable=False), Column("test7", types.Text()), Column("test8", types.LargeBinary()), Column("test_passivedefault2", types.Integer, server_default="5"), Column("test9", types.BINARY(100)), Column("test_numeric", types.Numeric()), ) addresses = Table( "engine_email_addresses", meta, Column("address_id", types.Integer, primary_key=True), Column( "remote_user_id", types.Integer, ForeignKey(users.c.user_id) ), Column("email_address", types.String(20)), ) meta.create_all() meta2 = MetaData() reflected_users = Table( "engine_users", meta2, autoload=True, autoload_with=testing.db ) reflected_addresses = Table( "engine_email_addresses", meta2, autoload=True, autoload_with=testing.db, ) self.assert_tables_equal(users, reflected_users) self.assert_tables_equal(addresses, reflected_addresses)
Example #23
Source File: column.py From AnyBlok with Mozilla Public License 2.0 | 4 votes |
def native_type(self, registry): if self.encrypt_key: return types.Text return self.sqlalchemy_type