Python sqlalchemy.engine.reflection.Inspector.from_engine() Examples
The following are 27
code examples of sqlalchemy.engine.reflection.Inspector.from_engine().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sqlalchemy.engine.reflection.Inspector
, or try the search function
.
Example #1
Source File: reflection.py From android_universal with MIT License | 6 votes |
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. This method differs from direct a direct constructor call of :class:`.Inspector` in that the :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to provide a dialect-specific :class:`.Inspector` instance, which may provide additional methods. See the example at :class:`.Inspector`. """ if hasattr(bind.dialect, 'inspector'): return bind.dialect.inspector(bind) return Inspector(bind)
Example #2
Source File: compare.py From jbox with MIT License | 6 votes |
def _produce_net_changes(autogen_context, upgrade_ops): connection = autogen_context.connection include_schemas = autogen_context.opts.get('include_schemas', False) inspector = Inspector.from_engine(connection) default_schema = connection.dialect.default_schema_name if include_schemas: schemas = set(inspector.get_schema_names()) # replace default schema name with None schemas.discard("information_schema") # replace the "default" schema with None schemas.add(None) schemas.discard(default_schema) else: schemas = [None] comparators.dispatch("schema", autogen_context.dialect.name)( autogen_context, upgrade_ops, schemas )
Example #3
Source File: 082_unique_ns_record.py From designate with Apache License 2.0 | 6 votes |
def upgrade(migrate_engine): meta.bind = migrate_engine pool_ns_records_table = Table('pool_ns_records', meta, autoload=True) # Only apply it if it's not there (It's been backported to L) insp = Inspector.from_engine(migrate_engine) unique_constraints = insp.get_unique_constraints('pool_ns_records') unique_constraint_names = [i['name'] for i in unique_constraints] if CONSTRAINT_NAME not in unique_constraint_names: # We define the constraint here if not it shows in the list above. constraint = UniqueConstraint('pool_id', 'hostname', name=CONSTRAINT_NAME, table=pool_ns_records_table) try: constraint.create() except exc.IntegrityError as e: LOG.error(explanation, e) # Use sys.exit so we don't blow up with a huge trace sys.exit(1)
Example #4
Source File: reflection.py From jarvis with GNU General Public License v2.0 | 6 votes |
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. This method differs from direct a direct constructor call of :class:`.Inspector` in that the :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to provide a dialect-specific :class:`.Inspector` instance, which may provide additional methods. See the example at :class:`.Inspector`. """ if hasattr(bind.dialect, 'inspector'): return bind.dialect.inspector(bind) return Inspector(bind)
Example #5
Source File: reflection.py From sqlalchemy with MIT License | 6 votes |
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. This method differs from direct a direct constructor call of :class:`_reflection.Inspector` in that the :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to provide a dialect-specific :class:`_reflection.Inspector` instance, which may provide additional methods. See the example at :class:`_reflection.Inspector`. """ return cls._construct(cls._init_legacy, bind)
Example #6
Source File: 1507a7289a2f_create_is_encrypted.py From airflow with Apache License 2.0 | 6 votes |
def upgrade(): # noqa: D103 # first check if the user already has this done. This should only be # true for users who are upgrading from a previous version of Airflow # that predates Alembic integration conn = op.get_bind() inspector = Inspector.from_engine(conn) # this will only be true if 'connection' already exists in the db, # but not if alembic created it in a previous migration if 'connection' in inspector.get_table_names(): col_names = [c['name'] for c in inspector.get_columns('connection')] if 'is_encrypted' in col_names: return op.add_column( 'connection', sa.Column('is_encrypted', sa.Boolean, unique=False, default=False)) conn = op.get_bind() conn.execute( connectionhelper.update().values(is_encrypted=False) )
Example #7
Source File: reflection.py From planespotter with MIT License | 6 votes |
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. This method differs from direct a direct constructor call of :class:`.Inspector` in that the :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to provide a dialect-specific :class:`.Inspector` instance, which may provide additional methods. See the example at :class:`.Inspector`. """ if hasattr(bind.dialect, 'inspector'): return bind.dialect.inspector(bind) return Inspector(bind)
Example #8
Source File: reflection.py From planespotter with MIT License | 5 votes |
def __init__(self, bind): """Initialize a new :class:`.Inspector`. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. For a dialect-specific instance of :class:`.Inspector`, see :meth:`.Inspector.from_engine` """ # this might not be a connection, it could be an engine. self.bind = bind # set the engine if hasattr(bind, 'engine'): self.engine = bind.engine else: self.engine = bind if self.engine is bind: # if engine, ensure initialized bind.connect().close() self.dialect = self.engine.dialect self.info_cache = {}
Example #9
Source File: api.py From android_universal with MIT License | 5 votes |
def inspector(self): return Inspector.from_engine(self.connection)
Example #10
Source File: database.py From android_universal with MIT License | 5 votes |
def inspect(self): """Get a SQLAlchemy inspector.""" return Inspector.from_engine(self.executable)
Example #11
Source File: reflection.py From android_universal with MIT License | 5 votes |
def _insp(bind): return Inspector.from_engine(bind)
Example #12
Source File: reflection.py From android_universal with MIT License | 5 votes |
def __init__(self, bind): """Initialize a new :class:`.Inspector`. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. For a dialect-specific instance of :class:`.Inspector`, see :meth:`.Inspector.from_engine` """ # this might not be a connection, it could be an engine. self.bind = bind # set the engine if hasattr(bind, 'engine'): self.engine = bind.engine else: self.engine = bind if self.engine is bind: # if engine, ensure initialized bind.connect().close() self.dialect = self.engine.dialect self.info_cache = {}
Example #13
Source File: reflection.py From jarvis with GNU General Public License v2.0 | 5 votes |
def _insp(bind): return Inspector.from_engine(bind)
Example #14
Source File: reflection.py From jarvis with GNU General Public License v2.0 | 5 votes |
def __init__(self, bind): """Initialize a new :class:`.Inspector`. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. For a dialect-specific instance of :class:`.Inspector`, see :meth:`.Inspector.from_engine` """ # this might not be a connection, it could be an engine. self.bind = bind # set the engine if hasattr(bind, 'engine'): self.engine = bind.engine else: self.engine = bind if self.engine is bind: # if engine, ensure initialized bind.connect().close() self.dialect = self.engine.dialect self.info_cache = {}
Example #15
Source File: reflection.py From sqlalchemy with MIT License | 5 votes |
def __init__(self, bind): """Initialize a new :class:`_reflection.Inspector`. :param bind: a :class:`~sqlalchemy.engine.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. For a dialect-specific instance of :class:`_reflection.Inspector`, see :meth:`_reflection.Inspector.from_engine` """ return self._init_legacy(bind)
Example #16
Source File: reflection.py From planespotter with MIT License | 5 votes |
def _insp(bind): return Inspector.from_engine(bind)
Example #17
Source File: fb9ad4a050f8_drop_container_actions_foreign_key.py From zun with Apache License 2.0 | 5 votes |
def upgrade(): bind = op.get_bind() inspector = insp.from_engine(bind) foreign_keys = inspector.get_foreign_keys(CONTAINER_ACTIONS) for foreign_key in foreign_keys: if foreign_key.get('referred_table') == CONTAINER: op.drop_constraint(foreign_key.get('name'), CONTAINER_ACTIONS, type_="foreignkey") op.create_foreign_key( None, CONTAINER_ACTIONS, CONTAINER, ['container_uuid'], ['uuid'], ondelete='CASCADE')
Example #18
Source File: cf5dc11e79ad_drop_user_and_chart.py From airflow with Apache License 2.0 | 5 votes |
def upgrade(): # noqa: D103 # We previously had a KnownEvent's table, but we deleted the table without # a down migration to remove it (so we didn't delete anyone's data if they # were happing to use the feature. # # But before we can delete the users table we need to drop the FK conn = op.get_bind() inspector = Inspector.from_engine(conn) if 'known_event' in inspector.get_table_names() != 'sqlite': op.drop_constraint('known_event_user_id_fkey', 'known_event') op.drop_table("chart") op.drop_table("users")
Example #19
Source File: views.py From pdfhook with MIT License | 5 votes |
def make_sure_there_is_a_working_database(*args, **kwargs): if current_app.config.get('ENV') != 'dev': return inspector = Inspector.from_engine(db.engine) tables = inspector.get_table_names() required_tables = [models.PDFForm.__tablename__] if not (set(required_tables) < set(tables)): current_app.logger.warning( "database tables {} not found. Creating tables".format(required_tables)) db.create_all()
Example #20
Source File: api.py From jbox with MIT License | 5 votes |
def inspector(self): return Inspector.from_engine(self.connection)
Example #21
Source File: webapp.py From ara-archive with GNU General Public License v3.0 | 5 votes |
def configure_db(app): """ 0.10 is the first version of ARA that ships with a stable database schema. We can identify a database that originates from before this by checking if there is an alembic revision available. If there is no alembic revision available, assume we are running the first revision which contains the latest state of the database prior to this. """ db.init_app(app) log = logging.getLogger(app.logger_name) if app.config.get('ARA_AUTOCREATE_DATABASE'): with app.app_context(): migrations = app.config['DB_MIGRATIONS'] flask_migrate.Migrate(app, db, directory=migrations) config = app.extensions['migrate'].migrate.get_config(migrations) # Verify if the database tables have been created at all inspector = Inspector.from_engine(db.engine) if len(inspector.get_table_names()) == 0: log.info('Initializing new DB from scratch') flask_migrate.upgrade(directory=migrations) # Get current alembic head revision script = ScriptDirectory.from_config(config) head = script.get_current_head() # Get current revision, if available connection = db.engine.connect() context = MigrationContext.configure(connection) current = context.get_current_revision() if not current: log.info('Unstable DB schema, stamping original revision') flask_migrate.stamp(directory=migrations, revision='da9459a1f71c') if head != current: log.info('DB schema out of date, upgrading') flask_migrate.upgrade(directory=migrations)
Example #22
Source File: 50829990c965_add_ondelete_to_container_actions_.py From zun with Apache License 2.0 | 5 votes |
def upgrade(): bind = op.get_bind() inspector = insp.from_engine(bind) foreign_keys = inspector.get_foreign_keys(CONTAINER_ACTIONS_EVENTS) for foreign_key in foreign_keys: if foreign_key.get('referred_table') == CONTAINER_ACTIONS: op.drop_constraint(foreign_key.get('name'), CONTAINER_ACTIONS_EVENTS, type_="foreignkey") op.create_foreign_key( None, CONTAINER_ACTIONS_EVENTS, CONTAINER_ACTIONS, ['action_id'], ['id'], ondelete='CASCADE')
Example #23
Source File: 8b0082d9e7c1_drop_foreign_key_of_container_actions.py From zun with Apache License 2.0 | 5 votes |
def upgrade(): bind = op.get_bind() inspector = insp.from_engine(bind) foreign_keys = inspector.get_foreign_keys(CONTAINER_ACTIONS) for foreign_key in foreign_keys: if foreign_key.get('referred_table') == CONTAINER: op.drop_constraint(foreign_key.get('name'), CONTAINER_ACTIONS, type_="foreignkey")
Example #24
Source File: cc1e65623dc7_add_max_tries_column_to_task_instance.py From airflow with Apache License 2.0 | 4 votes |
def upgrade(): # noqa: D103 op.add_column('task_instance', sa.Column('max_tries', sa.Integer, server_default="-1")) # Check if table task_instance exist before data migration. This check is # needed for database that does not create table until migration finishes. # Checking task_instance table exists prevent the error of querying # non-existing task_instance table. connection = op.get_bind() inspector = Inspector.from_engine(connection) tables = inspector.get_table_names() if 'task_instance' in tables: # Get current session sessionmaker = sa.orm.sessionmaker() session = sessionmaker(bind=connection) dagbag = DagBag(settings.DAGS_FOLDER) query = session.query(sa.func.count(TaskInstance.max_tries)).filter( TaskInstance.max_tries == -1 ) # Separate db query in batch to prevent loading entire table # into memory and cause out of memory error. while query.scalar(): tis = session.query(TaskInstance).filter( TaskInstance.max_tries == -1 ).limit(BATCH_SIZE).all() for ti in tis: dag = dagbag.get_dag(ti.dag_id) if not dag or not dag.has_task(ti.task_id): # task_instance table might not have the up-to-date # information, i.e dag or task might be modified or # deleted in dagbag but is reflected in task instance # table. In this case we do not retry the task that can't # be parsed. ti.max_tries = ti.try_number else: task = dag.get_task(ti.task_id) if task.retries: ti.max_tries = task.retries else: ti.max_tries = ti.try_number session.merge(ti) session.commit() # Commit the current session. session.commit()
Example #25
Source File: 882b2d84cb1b_attribute_constraints_relaxing.py From ironic-inspector with Apache License 2.0 | 4 votes |
def upgrade(): connection = op.get_bind() inspector = insp.from_engine(connection) pk_constraint = (inspector.get_pk_constraint(ATTRIBUTES).get('name') or naming_convention['pk'] % {'table_name': ATTRIBUTES}) fk_constraint = (inspector.get_foreign_keys(ATTRIBUTES)[0].get('name') or naming_convention['fk'] % {'table_name': ATTRIBUTES}) columns_meta = inspector.get_columns(ATTRIBUTES) name_type = {meta.get('type') for meta in columns_meta if meta['name'] == NAME}.pop() value_type = {meta.get('type') for meta in columns_meta if meta['name'] == VALUE}.pop() node_uuid_column = sa.Column(NODE_UUID, sa.String(36)) op.add_column(ATTRIBUTES, node_uuid_column) attributes = sa.table(ATTRIBUTES, node_uuid_column, sa.Column(UUID, sa.String(36))) with op.batch_alter_table(ATTRIBUTES, naming_convention=naming_convention) as batch_op: batch_op.drop_constraint(fk_constraint, type_='foreignkey') rows = connection.execute(sa.select([attributes.c.uuid, attributes.c.node_uuid])) for row in rows: # move uuid to node_uuid, reuse uuid as a new primary key connection.execute( attributes.update().where(attributes.c.uuid == row.uuid). values(node_uuid=row.uuid, uuid=uuidutils.generate_uuid()) ) with op.batch_alter_table(ATTRIBUTES, naming_convention=naming_convention) as batch_op: batch_op.drop_constraint(pk_constraint, type_='primary') batch_op.create_primary_key(pk_constraint, [UUID]) batch_op.create_foreign_key('fk_node_attribute', NODES, [NODE_UUID], [UUID]) batch_op.alter_column('name', nullable=False, type_=name_type) batch_op.alter_column('value', nullable=True, type_=value_type)
Example #26
Source File: __init__.py From daf-recipes with GNU General Public License v3.0 | 4 votes |
def setup(): if harvest_source_table is None: define_harvester_tables() log.debug('Harvest tables defined in memory') if not model.package_table.exists(): log.debug('Harvest table creation deferred') return if not harvest_source_table.exists(): # Create each table individually rather than # using metadata.create_all() harvest_source_table.create() harvest_job_table.create() harvest_object_table.create() harvest_gather_error_table.create() harvest_object_error_table.create() harvest_object_extra_table.create() harvest_log_table.create() log.debug('Harvest tables created') else: from ckan.model.meta import engine log.debug('Harvest tables already exist') # Check if existing tables need to be updated inspector = Inspector.from_engine(engine) columns = inspector.get_columns('harvest_source') column_names = [column['name'] for column in columns] if not 'title' in column_names: log.debug('Harvest tables need to be updated') migrate_v2() if not 'frequency' in column_names: log.debug('Harvest tables need to be updated') migrate_v3() # Check if this instance has harvest source datasets source_ids = Session.query(HarvestSource.id).filter_by(active=True).all() source_package_ids = Session.query(model.Package.id).filter_by(type=u'harvest', state='active').all() sources_to_migrate = set(source_ids) - set(source_package_ids) if sources_to_migrate: log.debug('Creating harvest source datasets for %i existing sources', len(sources_to_migrate)) sources_to_migrate = [s[0] for s in sources_to_migrate] migrate_v3_create_datasets(sources_to_migrate) # Check if harvest_log table exist - needed for existing users if not 'harvest_log' in inspector.get_table_names(): harvest_log_table.create() # Check if harvest_object has a index index_names = [index['name'] for index in inspector.get_indexes("harvest_object")] if not "harvest_job_id_idx" in index_names: log.debug('Creating index for harvest_object') Index("harvest_job_id_idx", harvest_object_table.c.harvest_job_id).create()
Example #27
Source File: plugin_base.py From jbox with MIT License | 4 votes |
def _prep_testing_database(options, file_config): from alembic.testing import config from alembic.testing.exclusions import against from sqlalchemy import schema from alembic import util if util.sqla_08: from sqlalchemy import inspect else: from sqlalchemy.engine.reflection import Inspector inspect = Inspector.from_engine if options.dropfirst: for cfg in config.Config.all_configs(): e = cfg.db inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData()) )) if config.requirements.schemas.enabled_for_config(cfg): try: view_names = inspector.get_view_names( schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema") )) for tname in reversed(inspector.get_table_names( order_by="foreign_key")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData()) )) if config.requirements.schemas.enabled_for_config(cfg): for tname in reversed(inspector.get_table_names( order_by="foreign_key", schema="test_schema")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema") )) if against(cfg, "postgresql") and util.sqla_100: from sqlalchemy.dialects import postgresql for enum in inspector.get_enums("*"): e.execute(postgresql.DropEnumType( postgresql.ENUM( name=enum['name'], schema=enum['schema'])))