Python alembic.op 模块,batch_alter_table() 实例源码
我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用alembic.op.batch_alter_table()。
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'environment_hierarchy_level_value'
with op.batch_alter_table(table_name) as batch:
batch.drop_column('parent_id')
batch.drop_constraint(
table_name + '_level_id_fkey',
type_='foreignkey'
)
batch.create_foreign_key(
table_name + '_level_id_fkey',
table_prefix + 'environment_hierarchy_level',
['level_id'], ['id'], ondelete='CASCADE'
)
batch.create_unique_constraint(
table_name + '_level_id_value_unique',
['level_id', 'value']
)
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
op.drop_table(table_prefix + 'template')
table_name = table_prefix + 'environment_schema_values'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(table_name + '_schema_id_fkey', 'foreignkey')
batch.alter_column(
'schema_id',
new_column_name='resource_definition_id',
existing_type=sa.Integer(),
)
op.rename_table(table_name, table_prefix + 'resource_values')
op.rename_table(table_prefix + 'schema',
table_prefix + 'resource_definition')
with op.batch_alter_table(table_prefix + 'resource_definition') as batch:
batch.drop_column('namespace_id')
op.drop_table(table_prefix + 'namespace')
table_name = table_prefix + 'resource_values'
with op.batch_alter_table(table_name) as batch:
batch.create_foreign_key(
table_name + '_resource_definition_id_fkey',
table_prefix + 'resource_definition',
['resource_definition_id'],
['id'],
)
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('schedule_info',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('last_changed_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('schedule_meta',
sa.Column('parent_id',
sa.Column('last_run_at',
sa.Column('total_run_count',
sa.ForeignKeyConstraint(['parent_id'], ['schedule_task.id'], ),
sa.PrimaryKeyConstraint('parent_id')
)
with op.batch_alter_table('schedule_task') as batch_op:
batch_op.drop_column('total_run_count')
batch_op.drop_column('last_run_at')
# ### end Alembic commands ###
def create_foreign_key(
self, name, referent, local_cols, remote_cols, **kw):
"""Issue a "create foreign key" instruction using the
current batch migration context.
The batch form of this call omits the ``source`` and ``source_schema``
arguments from the call.
e.g.::
with batch_alter_table("address") as batch_op:
batch_op.create_foreign_key(
"fk_user_address",
"user",["user_id"],["id"])
.. seealso::
:meth:`.Operations.create_foreign_key`
"""
return super(BatchOperations, self).create_foreign_key(
name, self.impl.table_name,
source_schema=self.impl.schema, **kw)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'last_name')
op.drop_column('users', 'first_name')
### end Alembic commands ###
with op.batch_alter_table("notes") as batch_op:
batch_op.drop_constraint(
"notes_author_id_fkey", type_="foreignkey")
with op.batch_alter_table("notebooks") as batch_op:
batch_op.drop_constraint(
"notebooks_author_id_fkey", type_="foreignkey")
op.create_foreign_key(
"notes_author_id_fkey", "notes", "users",
["author_id"], ["id"], ondelete="CASCADE")
op.create_foreign_key(
"notebooks_author_id_fkey", "notebooks", ondelete="CASCADE")
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'component'
with op.batch_alter_table(table_name) as batch:
batch.create_unique_constraint(
table_name + '_component_name_unique',
['name'],
)
def downgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'component'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(
table_name + '_component_name_unique',
type_='unique',
)
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'resource_values'
with op.batch_alter_table(table_name) as batch:
batch.alter_column(
'values',
server_default='{}',
existing_type=tuning_box.db.Json(),
)
def downgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'resource_values'
with op.batch_alter_table(table_name) as batch:
batch.alter_column(
'values',
server_default=None,
)
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'environment_hierarchy_level'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(
table_prefix + 'environment_hierarchy_level_environment_id_fkey',
type_='foreignkey'
)
batch.create_foreign_key(
table_prefix + 'environment_hierarchy_level_environment_id_fkey',
table_prefix + 'environment',
['environment_id'], ondelete='CASCADE'
)
def downgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'environment_hierarchy_level'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(
table_prefix + 'environment_hierarchy_level_environment_id_fkey', ['id']
)
def upgrade():
with op.batch_alter_table('pages') as batch_op:
batch_op.drop_column('is_html')
batch_op.drop_column('use_layout_header')
def downgrade():
with op.batch_alter_table('pages') as batch_op:
batch_op.add_column(
sa.Column('is_html', sa.Boolean(), nullable=True)
)
batch_op.add_column(
sa.Column('use_layout_header', nullable=True)
)
def upgrade():
"""Upgrade database."""
# The following is a ridiculous hack to force table recreation for SQLite to
# enable the use of a default timestamp.
recreate = 'auto'
migrate_context = context.get_context()
sqlite_dialect_class = None
if getattr(sa.dialects, 'sqlite', False):
sqlite_dialect_class = (sa.dialects.sqlite.pysqlite
.SQLiteDialect_pysqlite)
if migrate_context.dialect.__class__ == sqlite_dialect_class:
recreate = 'always'
with op.batch_alter_table('games', recreate=recreate) as batch_op:
batch_op.add_column(sa.Column('played_at',
nullable=False, server_default=sa.func.now()))
def downgrade():
"""Downgrade database."""
with op.batch_alter_table('games') as batch_op:
batch_op.drop_column('played_at')
def upgrade():
"""Upgrade database."""
# The following is a ridiculous hack to force table recreation for SQLite to
# enable the use of a default timestamp.
recreate = 'auto'
migrate_context = context.get_context()
sqlite_dialect_class = None
if getattr(sa.dialects, recreate=recreate) as batch_op:
batch_op.add_column(sa.Column('last_modified_at', server_default=sa.func.now()))
def downgrade():
"""Downgrade database."""
with op.batch_alter_table('games') as batch_op:
batch_op.drop_column('last_modified_at')
def upgrade():
"""Upgrade database."""
# The following is a ridiculous hack to force table recreation for SQLite to
# enable the use of a default timestamp.
recreate = 'auto'
migrate_context = context.get_context()
sqlite_dialect_class = None
if getattr(sa.dialects, recreate=recreate) as batch_op:
batch_op.add_column(sa.Column('created_at', server_default=sa.func.now()))
def upgrade():
with op.batch_alter_table('container', schema=None) as batch_op:
batch_op.add_column(sa.Column('interactive',
nullable=True))
batch_op.drop_column('tty')
batch_op.drop_column('stdin_open')
def upgrade():
# batch_alter_table() is necessary here because SQLite does not support
# the SQL ALTER statement which would normally be emitted here.
# What this function does is basically cause all the data in the old
# version of the table to be copied into a new table,then delete the old
# table at the end if everything works as planned.
with op.batch_alter_table('menu_entries') as batch_op:
batch_op.create_unique_constraint(
constraint_name="unique_menu_entry_date_mensa_category_description",
columns=['date_valid', 'mensa', 'category', 'description'])
def downgrade():
with op.batch_alter_table('menu_entries') as batch_op:
batch_op.drop_constraint(
constraint_name="unique_menu_entry_date_mensa_category_description",
type_='unique')
def upgrade():
# There can be data truncation here as LargeBinary can be smaller than the pickle
# type.
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("xcom") as batch_op:
batch_op.alter_column('value', type_=sa.LargeBinary())
def downgrade():
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("xcom") as batch_op:
batch_op.alter_column('value', type_=sa.PickleType(pickler=dill))
def upgrade():
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("task_instance") as batch_op:
batch_op.alter_column('duration',
existing_type=mysql.INTEGER(display_width=11),
type_=sa.Float(),
existing_nullable=True)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.add_column(sa.Column('ldap_auth', server_default=sa.text('false'), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.drop_column('ldap_auth')
### end Alembic commands ###
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('menus', schema=None) as batch_op:
batch_op.drop_column('location')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('menus', schema=None) as batch_op:
batch_op.add_column(sa.Column('location', sa.VARCHAR(length=256), nullable=False))
### end Alembic commands ###
def upgrade():
with op.batch_alter_table(t1_name) as batch_op:
batch_op.add_column(sa.Column('enabled',
sa.Boolean(),
nullable=False,
server_default='1'))
with op.batch_alter_table(t2_name) as batch_op:
batch_op.add_column(sa.Column('do_not_use',
server_default='0'))
def downgrade():
"""alexm: i believe this method is never called"""
with op.batch_alter_table(t2_name) as batch_op:
batch_op.drop_column('do_not_use')
with op.batch_alter_table(t1_name) as batch_op:
batch_op.drop_column('enabled')
def upgrade():
with op.batch_alter_table('editions', schema=None) as batch_op:
batch_op.add_column(sa.Column('mode', nullable=True))
def downgrade():
with op.batch_alter_table('editions', schema=None) as batch_op:
batch_op.drop_column('mode')
def upgrade():
with op.batch_alter_table('products', schema=None) as batch_op:
batch_op.add_column(sa.Column('surrogate_key',
sa.String(length=32),
nullable=True))
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('restaurants_menuitem', schema=None) as batch_op:
batch_op.alter_column('price',
existing_type=sa.NUMERIC(precision=2, scale=0),
type_=sa.Integer(),
existing_nullable=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('restaurants_menuitem',
existing_type=sa.Integer(),
type_=sa.NUMERIC(precision=2,
existing_nullable=False)
### end Alembic commands ###
def upgrade():
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("task_instance") as batch_op:
batch_op.alter_column('duration',
existing_nullable=True)
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user') as batch_op:
batch_op.drop_column('state')
batch_op.drop_column('last_active_at')
batch_op.drop_column('is_active')
# ### end Alembic commands ###
def upgrade():
with op.batch_alter_table('fda_dap') as batch_op:
batch_op.add_column(sa.Column('drug_name', sa.Text))
batch_op.add_column(sa.Column('active_ingredients', sa.Text))
batch_op.add_column(sa.Column('company', sa.Text))
def downgrade():
with op.batch_alter_table('fda_dap') as batch_op:
batch_op.drop_column('drug_name')
batch_op.drop_column('active_ingredients')
batch_op.drop_column('company')
def upgrade():
with op.batch_alter_table('upgrade_tasks') as batch_op:
batch_op.add_column(
sa.Column('status', sa.Enum(*constants.UPGRADE_STATUSES)))
def downgrade():
with op.batch_alter_table('upgrade_tasks') as batch_op:
batch_op.drop_column('status')
def upgrade():
op.create_table(
'hosts_services',
sa.Column('host_id', sa.String(36), sa.ForeignKey('hosts.id')),
sa.Column('service_id', sa.ForeignKey('services.id')),
sa.UniqueConstraint('host_id', 'service_id'))
with op.batch_alter_table('services') as batch_op:
batch_op.drop_constraint('fk_services_host_id_hosts')
batch_op.drop_column('host_id')
def upgrade():
connection = op.get_bind()
# spam_threshold is a X/15 based value,we're converting it to percent.
for user in connection.execute(user_table.select()):
connection.execute(
user_table.update().where(
user_table.c.email == user.email
).values(
spam_threshold=int(100. * float(user.spam_threshold or 0.) / 15.)
)
)
# set default to 80%
with op.batch_alter_table('user') as batch:
batch.alter_column('spam_threshold', default=80.)
def downgrade():
connection = op.get_bind()
# spam_threshold is a X/15 based value,we're converting it from percent.
for user in connection.execute(user_table.select()):
connection.execute(
user_table.update().where(
user_table.c.email == user.email
).values(
spam_threshold=int(15. * float(user.spam_threshold or 0.) / 100.)
)
)
# set default to 10/15
with op.batch_alter_table('user') as batch:
batch.alter_column('spam_threshold', default=10.)
def downgrade():
with op.batch_alter_table('domain') as batch:
batch.drop_column('max_quota_bytes')
def upgrade():
with op.batch_alter_table('user') as batch:
batch.add_column(sa.Column('forward_keep', nullable=False, server_default=sa.sql.expression.true()))
def downgrade():
with op.batch_alter_table('user') as batch:
batch.drop_column('forward_keep')
def upgrade():
with op.batch_alter_table('user') as batch:
batch.alter_column('email', type_=sa.String(length=255, collation="NOCASE"))
with op.batch_alter_table('alias') as batch:
batch.alter_column('email', collation="NOCASE"))
def downgrade():
with op.batch_alter_table('user') as batch:
batch.alter_column('email', type_=sa.String(length=255))
with op.batch_alter_table('alias') as batch:
batch.alter_column('email', type_=sa.String(length=255))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。