Python sqlalchemy 模块,TEXT 实例源码
我们从Python开源项目中,提取了以下47个代码示例,用于说明如何使用sqlalchemy.TEXT。
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('privatemessages',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('user_id',
sa.Column('from_user_id', nullable=True),
sa.Column('to_user_id',
sa.Column('subject', sa.VARCHAR(length=255),
sa.Column('message', sa.TEXT(),
sa.Column('date_created', sa.DATETIME(),
sa.Column('trash', sa.BOOLEAN(),
sa.Column('draft',
sa.Column('unread',
sa.ForeignKeyConstraint(['from_user_id'], [u'users.id'], ),
sa.ForeignKeyConstraint(['to_user_id'],
sa.ForeignKeyConstraint(['user_id'],
sa.PrimaryKeyConstraint('id')
)
op.drop_table('messages')
op.drop_table('conversations')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('privatemessages',
sa.PrimaryKeyConstraint('id')
)
op.drop_table('messages')
op.drop_table('conversations')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('privatemessages',
sa.PrimaryKeyConstraint('id')
)
op.drop_table('messages')
op.drop_table('conversations')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('housing_evals',
sa.Column('uid', sa.VARCHAR(length=32),
sa.Column('social_attended',
sa.Column('social_hosted',
sa.Column('technical_attended',
sa.Column('technical_hosted',
sa.Column('projects',
sa.Column('comments',
sa.Column('points',
sa.Column('active', sa.DATE(),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def _transaction_test(self):
self.pandassql.execute("CREATE TABLE test_trans (A INT,B TEXT)")
ins_sql = "INSERT INTO test_trans (A,B) VALUES (1,'blah')"
# Make sure when transaction is rolled back,no rows get inserted
try:
with self.pandassql.run_transaction() as trans:
trans.execute(ins_sql)
raise Exception('error')
except:
# ignore raised exception
pass
res = self.pandassql.read_query('SELECT * FROM test_trans')
self.assertEqual(len(res), 0)
# Make sure when transaction is committed,rows do get inserted
with self.pandassql.run_transaction() as trans:
trans.execute(ins_sql)
res2 = self.pandassql.read_query('SELECT * FROM test_trans')
self.assertEqual(len(res2), 1)
# -----------------------------------------------------------------------------
# -- Testing the public API
def test_dtype(self):
cols = ['A', 'B']
data = [(0.8, True),
(0.9, None)]
df = DataFrame(data, columns=cols)
df.to_sql('dtype_test', self.conn)
df.to_sql('dtype_test2', self.conn, dtype={'B': sqlalchemy.TEXT})
Meta = sqlalchemy.schema.MetaData(bind=self.conn)
Meta.reflect()
sqltype = Meta.tables['dtype_test2'].columns['B'].type
self.assertTrue(isinstance(sqltype, sqlalchemy.TEXT))
self.assertRaises(ValueError, df.to_sql,
'error', dtype={'B': str})
# GH9083
df.to_sql('dtype_test3', dtype={'B': sqlalchemy.String(10)})
Meta.reflect()
sqltype = Meta.tables['dtype_test3'].columns['B'].type
self.assertTrue(isinstance(sqltype, sqlalchemy.String))
self.assertEqual(sqltype.length, 10)
def test_execute_fail(self):
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a,b)
);
"""
cur = self.conn.cursor()
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo","bar",1.234)', self.conn)
sql.execute('INSERT INTO test VALUES("foo","baz",2.567)', self.conn)
try:
sys.stdout = StringIO()
self.assertRaises(Exception, sql.execute,
'INSERT INTO test VALUES("foo",7)',
self.conn)
finally:
sys.stdout = sys.__stdout__
def test_execute_closed_connection(self):
create_sql = """
CREATE TABLE test
(
a TEXT, self.conn)
self.conn.close()
try:
sys.stdout = StringIO()
self.assertRaises(Exception, sql.tquery, "select * from test",
con=self.conn)
finally:
sys.stdout = sys.__stdout__
# Initialize connection again (needed for tearDown)
self.setUp()
def _create(self, dicts, cols):
exclude_cols = set()
for name, col in cols.items():
if name in exclude_cols: continue
for d in dicts:
try:
value = d.get(name, '')
if value is None or not isinstance(value, (str, unicode)): continue
if col.type.length < len(value):
# col.type.length = int(1.618 * len(d[name]) + 0.5)
col.type.length = int(1.2 * len(value) + 0.5)
except:
print(name, col.type, value, file=sys.stderr)
raise
if col.type.length > 48:
col.type = sql.TEXT()
exclude_cols.add(name)
break
def variants_gene_columns(self):
# all of these are also stored in the variant_impacts table.
return [
sql.Column("gene", sql.String(20)),
sql.Column("transcript",
sql.Column("is_exonic", sql.Boolean()),
sql.Column("is_coding",
sql.Column("is_lof",
sql.Column("is_splicing",
sql.Column("exon", sql.String(8)),
sql.Column("codon_change", sql.TEXT()),
sql.Column("aa_change",
sql.Column("aa_length",
sql.Column("biotype", sql.String(50)),
sql.Column("impact",
sql.Column("impact_so",
sql.Column("impact_severity", sql.String(4)),
sql.Column("polyphen_pred",
sql.Column("polyphen_score", sql.Float()),
sql.Column("sift_pred",
sql.Column("sift_score",
]
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('privatemessages', sa.DateTime(),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('messages')
op.drop_table('conversations')
### end Alembic commands ###
def _build_sql_query(self, query, query_filters):
sql_query = Package.query.join(Package.publisher)
sa_filters = []
for f in query_filters:
filter_class, filter_term = f.split(":")
if filter_class == 'publisher':
sa_filters.append(Publisher.name == filter_term)
else:
raise InvalidUsage("not supported any other filter right Now")
if len(sa_filters) > 0:
sql_query = sql_query.filter(or_(*sa_filters))
if query != '*' or not query.strip():
sql_query = sql_query.join(Package.tags)\
.filter(PackageTag.descriptor.op('->>')('title')
.cast(sqlalchemy.TEXT)
.ilike("%{q}%".format(q=query)),
PackageTag.tag == 'latest',
Package.status == PackageStateEnum.active)
return sql_query
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("UPDATE translated_string SET base_string='' WHERE base_string is NULL")
op.alter_column('translated_string', 'base_string',
existing_type=sa.TEXT(),
nullable=False)
op.execute("UPDATE translated_string SET comment='' WHERE comment is NULL")
op.alter_column('translated_string', 'comment',
nullable=False)
op.execute("UPDATE translated_string SET context='' WHERE context is NULL")
op.alter_column('translated_string', 'context',
nullable=False)
op.execute("UPDATE translated_string SET translation='' WHERE translation is NULL")
op.alter_column('translated_string', 'translation',
nullable=False)
op.execute("UPDATE translated_string SET translator_comment='' WHERE translator_comment is NULL")
op.alter_column('translated_string', 'translator_comment',
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('translated_string',
nullable=True)
op.alter_column('translated_string',
nullable=True)
# ### end Alembic commands ###
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('web_hook', postgresql.UUID(as_uuid=True),
sa.Column('name', nullable=False, unique=True),
sa.Column('description',
sa.Column('url',
sa.Column('shared_secret',
sa.Column('status', sa.Integer(),
sa.Column('consecutive_failure_count',
sa.Column('register_time', sa.TIMESTAMP(),
sa.Column('created_by_uid',
sa.Column('permissions', server_default='"[]"'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('web_hook_token',
sa.Column('token_id',
sa.Column('web_hook_id', ['users.id'],
sa.ForeignKeyConstraint(['web_hook_id'], ['web_hook.id'],
sa.PrimaryKeyConstraint('user_id', 'web_hook_id')
)
# ### end Alembic commands ###
def upgrade():
op.add_column('pods', sa.Column('direct_access', autoincrement=False, nullable=True))
def downgrade():
bind = op.get_bind()
Base.Metadata.bind = bind
### commands auto generated by Alembic - please adjust! ###
op.add_column(u'predefined_apps', sa.Column('template',
nullable=True))
op.drop_column(u'predefined_apps', 'is_deleted')
downgrade_data(bind)
op.alter_column(u'predefined_apps', u'template', nullable=False)
op.drop_column(u'pods', 'template_version_id')
op.drop_table('predefined_app_templates')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('pages',
sa.Column('ts', postgresql.TIMESTAMP(),
sa.Column('created_by_id',
sa.Column('modified',
sa.Column('modified_by_id',
sa.Column('slug',
sa.Column('title',
sa.Column('content',
sa.ForeignKeyConstraint(['created_by_id'], name=u'pages_created_by_id_fkey'),
sa.ForeignKeyConstraint(['modified_by_id'], name=u'pages_modified_by_id_fkey'),
sa.PrimaryKeyConstraint('id', name=u'pages_pkey')
)
op.add_column('menus_items', sa.Column('is_public', nullable=True))
op.add_column('menus_items', sa.Column('roles', sa.Column('ts', sa.Column('page_id', sa.Column('is_group_label', sa.Column('created_by_id', nullable=True))
op.create_foreign_key(u'menus_items_page_id_fkey', 'menus_items', 'pages', ['page_id'], ['id'])
op.create_foreign_key(u'menus_items_created_by_id_fkey', 'users', ['created_by_id'], ['id'])
op.add_column('menus', nullable=True))
op.add_column('menus', nullable=True))
op.create_foreign_key(u'menus_created_by_id_fkey', 'menus', ['id'])
op.drop_table('menuitem_roles')
### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('node', sa.Column('active_ignition_config', nullable=False))
op.drop_table('provision')
# ### end Alembic commands ###
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column(
'reference_set',
sa.Column('next_tiledb_column_offset', sa.BigInteger(), default=0))
op.add_column(
'reference',
sa.Column('tiledb_column_offset', nullable=True))
op.alter_column('reference', 'length', existing_type=sa.BIGINT(), nullable=False)
op.alter_column('reference', 'name', existing_type=sa.TEXT(), nullable=False)
op.create_unique_constraint('unique_name_per_reference_set_constraint', 'reference',
['reference_set_id', 'name'])
op.create_index('unique_reference_set_id_offset_idx', 'tiledb_column_offset'],
unique=True)
op.drop_column('reference', 'offset')
# Trigger on reference insertion
op.execute('''\
CREATE OR REPLACE FUNCTION increment_next_column_in_reference_set_pgsql()
RETURNS trigger AS $increment_next_column_in_reference_set_pgsql$
BEGIN
UPDATE reference SET tiledb_column_offset=(select next_tiledb_column_offset from reference_set where id=NEW.reference_set_id) where NEW.tiledb_column_offset IS NULL and id=NEW.id;
UPDATE reference_set SET next_tiledb_column_offset=next_tiledb_column_offset+NEW.length WHERE id = NEW.reference_set_id;
RETURN NEW;
END;
$increment_next_column_in_reference_set_pgsql$ LANGUAGE plpgsql;
CREATE TRIGGER increment_next_column_in_reference_set AFTER INSERT ON reference
FOR EACH ROW EXECUTE PROCEDURE increment_next_column_in_reference_set_pgsql();
''')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
# Drop trigger
op.execute(
'DROP TRIGGER increment_next_column_in_reference_set ON reference CASCADE')
op.add_column('reference',
sa.Column('offset', sa.BIGINT(), nullable=True))
op.drop_index('unique_reference_set_id_offset_idx', table_name='reference')
op.drop_constraint('unique_name_per_reference_set_constraint', type_='unique')
op.alter_column('reference', nullable=True)
op.alter_column('reference', nullable=True)
op.drop_column('reference', 'tiledb_column_offset')
op.drop_column('reference_set', 'next_tiledb_column_offset')
### end Alembic commands ###
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('file',
sa.Column('file_id',
sa.Column('sha512_hash',
sa.Column('sha256_hash',
sa.Column('sha1_hash',
sa.Column('md5_hash',
sa.Column('size', sa.FLOAT(),
sa.Column('mime_type', sa.VARCHAR(length=120),
nullable=True),
sa.Column('submitted_by',
nullable=False),
sa.Column('status', sa.VARCHAR(length=20),
sa.Column('last_updated',
sa.Column('first_seen',
sa.PrimaryKeyConstraint('file_id')
)
op.create_table('lookup_request',
sa.Column('request_id',
sa.Column('requested_at',
sa.Column('requestor',
sa.Column('lookup_hash',
sa.Column('result',
sa.ForeignKeyConstraint(['file_id'], ['file.file_id'],
sa.PrimaryKeyConstraint('request_id')
)
# ### end Alembic commands ###
def test_execute_closed_connection(self):
_skip_if_no_pyMysqL()
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = """
CREATE TABLE test
(
a TEXT,
PRIMARY KEY (a(5),b(5))
);
"""
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo",
con=self.conn)
finally:
sys.stdout = sys.__stdout__
# Initialize connection again (needed for tearDown)
self.setUp()
def downgrade():
"""Downgrade the database to an older revision."""
# commands auto generated by Alembic - please adjust! ###
op.add_column('similar_stacks', sa.Column('appstack_id',
nullable=True))
op.add_column('similar_stacks', sa.Column('dependency_list', postgresql.JSONB(),
autoincrement=False, nullable=False))
op.drop_constraint(None, 'similar_stacks', type_='foreignkey')
op.drop_constraint(None, type_='foreignkey')
op.create_foreign_key('similar_stacks_appstack_id_fkey', 'app_stacks',
['appstack_id'], ['id'])
op.drop_constraint('sim_unique', type_='unique')
op.drop_column('similar_stacks', 'stack_id')
op.drop_column('similar_stacks', 'similarity_value')
op.drop_column('similar_stacks', 'similar_stack_id')
op.drop_column('similar_stacks', 'analysis')
op.create_table('app_stacks',
sa.Column('id',
sa.Column('stack_json',
sa.PrimaryKeyConstraint('id', name='app_stacks_pkey'))
op.create_table('reference_stacks',
sa.Column('name',
sa.Column('version',
sa.Column('description',
sa.Column('dependencies', name='reference_stacks_pkey'),
sa.UniqueConstraint('name', 'version', name='stack_unique'))
op.drop_table('stacks')
# end Alembic commands ###
def downgrade():
"""Downgrade the database to an older revision."""
# commands auto generated by Alembic - please adjust! ###
op.create_table('review',
sa.Column('approved',
sa.Column('user',
sa.Column('timestamp',
sa.Column('comment',
sa.Column('epv',
sa.ForeignKeyConstraint(['user'], ['user.id'], name='review_user_fkey'), name='review_pkey'))
# end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('public_tags_value1_idx', 'tags', ['value'], unique=False)
op.create_index('public_tags_project_id0_idx', ['project_id'], unique=False)
op.drop_index(op.f('ix_tags_value'), table_name='tags')
op.drop_index(op.f('ix_tags_project_id'), table_name='tags')
op.create_table('wikicomment',
sa.Column('link',
sa.Column('users_id',
sa.Column('datetimeon', name='wikicomment_pkey')
)
### end Alembic commands ###
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('queue',
sa.Column('enqueued_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('Now()'),
sa.Column('dequeued_at',
sa.Column('expected_at',
sa.Column('schedule_at',
sa.Column('q_name',
sa.Column('data', postgresql.JSON(astext_type=sa.Text()),
sa.PrimaryKeyConstraint('id')
)
op.create_index('priority_idx', 'queue', ['schedule_at', 'expected_at'], unique=False)
# ### end Alembic commands ###
def variants_default_columns(self):
return [
sql.Column("variant_id", sql.Integer(), primary_key=True),
sql.Column("chrom", sql.String(10)),
sql.Column("start", sql.Integer()),
sql.Column("end",
sql.Column("vcf_id", sql.String(12)),
#sql.Column("anno_id",Integer()),
sql.Column("ref",
sql.Column("alt",
sql.Column("qual",
sql.Column("filter",
]
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('notes', sa.Column('body_html', nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('topic', server_default=sa.text(u"nextval('topic_id_seq'::regclass)"), sa.VARCHAR(length=128),
sa.Column('created_at',
sa.Column('is_deleted', server_default=sa.text(u'false'), [u'user.id'], name=u'fk_topic_user'), name=u'topic_pkey'),
postgresql_ignore_search_path=False
)
op.create_table('topic_follow',
sa.Column('topic_id',
sa.ForeignKeyConstraint(['topic_id'], [u'topic.id'], name=u'fk_topic_follow_topic'), name=u'fk_topic_follow_user'), name=u'topic_follow_pkey'),
sa.UniqueConstraint('topic_id', 'user_id', name=u'ux_topic_follow_user_follow_topic')
)
op.add_column('link', sa.Column('topic_id', nullable=True))
op.create_foreign_key(u'fk_link_topic', 'link', 'topic', ['topic_id'], ['id'])
### end Alembic commands ###
def downgrade():
"""Downgrade the database to an older revision."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('similar_stacks', name='stack_unique'))
op.drop_table('stacks')
# ### end Alembic commands ###
def downgrade():
"""Downgrade the database to an older revision."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('review', name='review_pkey'))
# ### end Alembic commands ###
def downgrade():
op.create_table(
'ref',
sa.Column('id',
sa.Column('name', sa.VARCHAR(),
sa.Column('body',
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.drop_table('web_page_cache')
def upgrade():
op.add_column('base_string', sa.Column('plural', sa.Text(), nullable=True))
op.execute("UPDATE base_string SET plural='' WHERE plural is NULL")
op.alter_column('base_string', 'plural',
existing_type=sa.TEXT(),
nullable=False)
def downgrade():
op.drop_table('translated_string')
op.create_table('translated_string', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'),
sa.Column('base_string',
sa.Column('translation',
sa.Column('comment',
sa.Column('translator_comment',
sa.Column('context',
sa.Column('resource_pk',
sa.ForeignKeyConstraint(['resource_pk'], ['resource.id'], name='translated_string_resource_pk_fkey'), name='translated_string_pkey')
)
op.drop_table('base_string')
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('bangumi', sa.Column('type', server_default='2'))
op.alter_column('bangumi', 'type', server_default=None)
op.add_column('bangumi', sa.Column('libyk_so', nullable=True))
### end Alembic commands ###
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('server_session',
sa.Column('session_id', sa.LargeBinary(),
sa.Column('expiry',
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('session_id')
)
### end Alembic commands ###
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('task',
sa.Column('type',
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'bangumi', sa.Column('delete_mark', nullable=True))
op.add_column(u'episodes', nullable=True))
# ### end Alembic commands ###
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('Feed',
sa.Column('episode_id',
sa.Column('bangumi_id',
sa.Column('download_url',
sa.Column('torrent_file_id',
sa.PrimaryKeyConstraint('episode_id')
)
op.add_column(u'bangumi', sa.Column('acg_rip', nullable=True))
op.add_column(u'bangumi', sa.Column('dmhy', nullable=True))
### end Alembic commands ###
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('announce',
sa.Column('image_url',
sa.Column('position',
sa.Column('sort_order',
sa.Column('start_time',
sa.Column('end_time',
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def upgrade():
journal_root = os.path.join(os.environ['WEASYL_STORAGE_ROOT'], 'static/journal')
connection = op.get_bind()
query = connection.execute("SELECT journalid,position('h' in settings) != 0 FROM journal WHERE content IS NULL")
updates = []
for journal_id, is_hidden in query:
content = _try_read([
os.path.join(journal_root, '%d.txt' % (journal_id,)),
_get_hash_path(journal_root, journal_id),
])
if content is not None:
updates.append((journal_id, content))
elif not is_hidden:
# Python 2 doesn’t have FileNotFoundError
raise IOError("Missing journal file for non-hidden journal %d; hide journals with kNown missing files before running this migration" % (journal_id,))
if updates:
connection.execute(
"""
UPDATE journal
SET content = t.content
FROM UNnesT (%(updates)s) AS t (journalid integer,content unkNown)
WHERE
journal.journalid = t.journalid AND
journal.content IS NULL
""",
updates=updates,
)
connection.execute("UPDATE journal SET content = '(file missing)' WHERE content IS NULL AND position('h' in settings) != 0")
op.alter_column('journal', 'content',
nullable=False)
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(CreateSequence(Sequence("endpoint_log_id_seq")))
op.execute(CreateSequence(Sequence("session_token_id_seq")))
op.execute(CreateSequence(Sequence("system_log_id_seq")))
op.execute(CreateSequence(Sequence("user_account_id_seq")))
op.create_table('endpoint_log', server_default=sa.text("nextval('endpoint_log_id_seq')"),
sa.Column('start_utc',
sa.Column('duration_ms',
sa.Column('endpoint',
sa.Column('username',
sa.Column('method',
sa.Column('http_code',
sa.Column('error_message',
sa.PrimaryKeyConstraint('id')
)
op.create_table('session_token', server_default=sa.text("nextval('session_token_id_seq')"),
sa.Column('token',
sa.Column('created_utc',
sa.PrimaryKeyConstraint('id')
)
op.create_table('system_log', server_default=sa.text("nextval('system_log_id_seq')"),
sa.Column('event_utc',
sa.Column('level',
sa.Column('source',
sa.PrimaryKeyConstraint('id')
)
op.create_table('user_account', server_default=sa.text("nextval('user_account_id_seq')"),
sa.Column('email',
sa.Column('secret',
sa.Column('creation_utc',
sa.Column('last_updated_utc',
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
"""Downgrade the database to an older revision."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('stacks',
server_default=sa.text("nextval('stacks_id_seq'::regclass)"),
sa.Column('is_ref_stack', postgresql.JSONB(astext_type=sa.Text()),
autoincrement=False, name='stacks_pkey'),
postgresql_ignore_search_path=False)
op.create_table('similar_components',
sa.Column('fromcomponent',
sa.Column('tocomponent',
sa.Column('similarity_distance', postgresql.DOUBLE_PRECISION(precision=53), name='similar_components_pkey'),
sa.UniqueConstraint('fromcomponent', 'tocomponent', name='sim_comps'))
op.create_table('similar_stacks',
sa.Column('analysis',
sa.Column('similar_stack_id',
sa.Column('similarity_value',
sa.Column('stack_id',
sa.ForeignKeyConstraint(['similar_stack_id'], ['stacks.id'],
name='similar_stacks_similar_stack_id_fkey'),
sa.ForeignKeyConstraint(['stack_id'],
name='similar_stacks_stack_id_fkey'), name='similar_stacks_pkey'),
sa.UniqueConstraint('stack_id', 'similar_stack_id', name='sim_unique'))
op.create_table('esmarker',
sa.Column('worker_result_id',
sa.ForeignKeyConstraint(['worker_result_id'], ['worker_results.id'],
name='esmarker_worker_result_id_fkey'), name='esmarker_pkey'))
# ### end Alembic commands ###
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('publisher',
sa.Column('private',
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_publisher_name'), 'publisher', ['name'], unique=True)
op.create_table('user',
sa.Column('full_name',
sa.Column('auth0_id',
sa.Column('sysadmin',
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_auth0_id'), 'user', ['auth0_id'], unique=False)
op.create_index(op.f('ix_user_email'), ['email'], unique=False)
op.create_index(op.f('ix_user_name'), unique=True)
op.create_table('package',
sa.Column('descriptor', sa.JSON(),
sa.Column('readme',
sa.Column('publisher_id',
sa.ForeignKeyConstraint(['publisher_id'], [u'publisher.id'],
sa.UniqueConstraint('name', 'publisher_id')
)
op.create_index(op.f('ix_package_name'), 'package', unique=False)
op.create_index(op.f('ix_package_status'), ['status'], unique=False)
op.create_table('publisher_user',
sa.Column('role', 'publisher_id')
)
### end Alembic commands ###
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。