**** CubicPower OpenStack Study ****
def define_task_info_table(meta):
Table('tasks', meta, autoload=True)
#NOTE(nikhil): input and result are stored as text in the DB.
# SQLAlchemy marshals the data to/from JSON using custom type
# JSONEncodedDict. It uses simplejson underneath.
task_info = Table('task_info',
meta,
Column('task_id', String(36),
ForeignKey('tasks.id'),
primary_key=True,
nullable=False),
Column('input', Text()),
Column('result', Text()),
Column('message', Text()),
mysql_engine='InnoDB')
return task_info
**** CubicPower OpenStack Study ****
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
tables = [define_task_info_table(meta)]
create_tables(tables)
tasks_table = Table('tasks', meta, autoload=True)
task_info_table = Table('task_info', meta, autoload=True)
tasks = tasks_table.select().execute().fetchall()
for task in tasks:
values = {
'task_id': task.id,
'input': task.input,
'result': task.result,
'message': task.message,
}
task_info_table.insert(values=values).execute()
for col_name in TASKS_MIGRATE_COLUMNS:
tasks_table.columns[col_name].drop()
**** CubicPower OpenStack Study ****
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
tasks_table = Table('tasks', meta, autoload=True)
task_info_table = Table('task_info', meta, autoload=True)
for col_name in TASKS_MIGRATE_COLUMNS:
column = Column(col_name, Text())
column.create(tasks_table)
task_info_records = task_info_table.select().execute().fetchall()
for task_info in task_info_records:
values = {
'input': task_info.input,
'result': task_info.result,
'message': task_info.message
}
tasks_table\
.update(values=values)\
.where(tasks_table.c.id == task_info.task_id)\
.execute()
drop_tables([task_info_table])