Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def upgrade():
with op.batch_alter_table('tables') as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer()))
batch_op.create_foreign_key('user_id', 'ab_user', ['user_id'], ['id'])
def upgrade():
op.execute(CreateSchema('event_abstracts'))
op.create_table('abstracts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('friendly_id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('accepted_track_id', sa.Integer(), nullable=True, index=True),
sa.Column('accepted_type_id', sa.Integer(), nullable=True, index=True),
sa.Column('type_id', sa.Integer(), nullable=True, index=True),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.ForeignKeyConstraint(['accepted_type_id'], ['events.contribution_types.id']),
sa.ForeignKeyConstraint(['type_id'], ['events.contribution_types.id']),
sa.UniqueConstraint('friendly_id', 'event_id'),
sa.PrimaryKeyConstraint('id'),
schema='event_abstracts')
op.create_table('abstract_field_values',
sa.Column('data', postgresql.JSON(), nullable=False),
sa.Column('abstract_id', sa.Integer(), nullable=False, index=True),
sa.Column('contribution_field_id', sa.Integer(), nullable=False, index=True),
sa.ForeignKeyConstraint(['abstract_id'], ['event_abstracts.abstracts.id']),
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_users_username', 'users', ['username'], unique=True)
### end Alembic commands ###
def rename_buildrequest_claims(migrate_engine):
metadata = sa.MetaData()
metadata.bind = migrate_engine
buildrequest_claims = sautils.Table("buildrequest_claims", metadata,
sa.Column(
'brid', sa.Integer, index=True, unique=True),
sa.Column(
'objectid', sa.Integer, index=True, nullable=True),
sa.Column(
'claimed_at', sa.Integer, nullable=False),
)
for index in buildrequest_claims.indexes:
index.drop()
migrate_engine.execute('alter table buildrequest_claims '
'rename to buildrequest_claims_old')
if not isinstance(ref_model, str):
ref_model._readable_names = ref_name
cls._many_to_models.append(ref_model)
ref_model._many_to_models.append(cls)
table_name = cls._readable_name
my_middle_table_name = middle_table_name or '{0}_{1}'.format(table_name, ref_table_name)
if table_name == ref_table_name:
left_column_name = 'left_id'
right_column_name = 'right_id'
else:
left_column_name = '{0}_id'.format(table_name)
right_column_name = '{0}_id'.format(ref_table_name)
middle_table = Table(my_middle_table_name, Database.Base.metadata,
Column(left_column_name, Integer, ForeignKey('{0}.id'.format(table_name), ondelete = "CASCADE"), primary_key = True),
Column(right_column_name, Integer, ForeignKey('{0}.id'.format(ref_table_name), ondelete = "CASCADE"), primary_key = True))
my_backref_name = backref_name or '{0}s'.format(table_name)
parameters = dict(secondary = middle_table, lazy = 'dynamic', backref = backref(my_backref_name, lazy = 'dynamic'))
if table_name == ref_table_name:
parameters['primaryjoin'] = cls.id == middle_table.c.left_id
parameters['secondaryjoin'] = cls.id == middle_table.c.right_id
setattr(cls, ref_name, relationship(ref_model_name, **parameters))
return cls
return ref_table
end_date_micro = Column(Integer)
# TODO: use these new two fields
max_error_in_millis = Column(Integer, nullable=True)
finish_reason = Column(Integer,
nullable=True) # NULL = unknown; 0 = actively finished; 1 = timed out (client); 2 = kicked by scheduler; 3 = batch.
#
# The following data is used for optimized analytics (optimized queries based on this data).
#
start_date_date = Column(Date, index=True)
start_date_weekday = Column(Integer, index=True) # 0..6, as in datetime.datetime.weekday()
start_date_hour = Column(Integer, index=True) # 0..23
start_date_year = Column(Integer, index=True) # e.g., 2015
start_date_month = Column(Integer, index=True) # 1..12, as in datetime.date.month
start_date_week_monday = Column(Integer,
index=True) # number of weeks since January 5, 1970 (first Monday after epoc)
start_date_week_sunday = Column(Integer,
index=True) # number of weeks since January 4, 1970 (first Monday after epoc)
session_time_micro = Column(BigInteger, index=True) # This should take into account finish_reason
session_time_seconds = Column(Integer, index=True) # This should take into account finish_reason
#
# Who accessed the experiment?
#
permission_permanent_id = Column(Unicode(255), nullable=True, index=True)
group_permission_id = Column(Integer, ForeignKey('GroupPermission.id'), nullable=True)
user_permission_id = Column(Integer, ForeignKey('UserPermission.id'), nullable=True)
role_permission_id = Column(Integer, ForeignKey('RolePermission.id'), nullable=True)
origin = Column(Unicode(255), nullable=False, index=True)
tts = relationship(TTS, back_populates="configs")
sounds = relationship(Sounds, back_populates="configs")
skills = relationship(Skill, back_populates="configs")
lang = Column(String)
system_unit = Column(String, default="metric")
time_format = Column(String, default="full")
date_format = Column(String, default="DMY")
opt_in = Column(Boolean, default=False)
confirm_listening = Column(Boolean, default=False)
play_wav_cmdline = Column(String,
default="paplay %1 --stream-name=mycroft-voice")
play_mp3_cmdline = Column(String, default="mpg123 %1")
skills_dir = Column(String, default="/opt/mycroft/skills")
skills_auto_update = Column(Boolean, default=False)
listener_sample_rate = Column(Integer, default=16000)
listener_channels = Column(Integer, default=1)
record_wake_words = Column(Boolean, default=False)
record_utterances = Column(Boolean, default=False)
wake_word_upload = Column(Boolean, default=False)
phoneme_duration = Column(Integer, default=120)
listener_multiplier = Column(Float, default=1.0)
listener_energy_ratio = Column(Float, default=1.5)
wake_word = Column(String, default="hey mycroft")
stand_up_word = Column(String, default="wake up")
def __repr__(self):
return self.uuid
class Sounds(Base):
__tablename__ = "sounds"
status_is_retweet = Column(Boolean)
class User(Base):
''' Class to store individual user traits '''
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
user_name = Column(String)
user_followers_count = Column(Integer)
user_friends_count = Column(Integer)
user_statuses_count = Column(Integer)
user_favourites_count = Column(Integer)
user_listed_count = Column(Integer)
user_mention_count = Column(Integer)
user_retweet_count = Column(Integer)
# create our db
Base.metadata.create_all(db)
workflow_id : id
The ID of the associated workflow.
workflow : :class:`ramp_database.model.Worflow`
The workflow instance.
path_ramp_kit : str
The path where the kit are located.
path_ramp_data : str
The path where the data are located.
events : list of :class:`ramp_database.model.Event`
A back-reference to the event.
keywords : list of :class:`ramp_database.model.ProblemKeyword`
A back-reference to the keywords associated with the problem.
"""
__tablename__ = 'problems'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False, unique=True)
workflow_id = Column(Integer, ForeignKey('workflows.id'), nullable=False)
workflow = relationship('Workflow', backref=backref('problems'))
# XXX: big change in the database
path_ramp_kit = Column(String, nullable=False, unique=False)
path_ramp_data = Column(String, nullable=False, unique=False)
def __init__(self, name, path_ramp_kit, path_ramp_data, session=None):
self.name = name
self.path_ramp_kit = path_ramp_kit
self.path_ramp_data = path_ramp_data
self.reset(session)
def __repr__(self):
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta = MetaData()
meta.bind = migrate_engine
osd_states = Table(
'osd_states', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('osd_name', String(length=255), nullable=False),
Column('device_id', Integer, ForeignKey(models.Device.id), nullable=False),
Column('storage_group_id', Integer, nullable=False),
Column('service_id', Integer, nullable=False),
Column('cluster_id', Integer),
Column('state', String(length=255), nullable=False),
Column('operation_status', String(length=255), nullable=False),
Column('weight', Float, default=1.0, nullable=False),
Column('public_ip', String(length=255)),
Column('cluster_ip', String(length=255)),
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('deleted', Boolean(create_constraint=True, name=None)),
)