/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py

https://bitbucket.org/cistrome/cistrome-harvard/ · Python · 95 lines · 94 code · 0 blank · 1 comment · 2 complexity · 1643e6439ed3e21eea77c16ec63c028d MD5 · raw file

  1. """
  2. Migration script to add the sample_dataset table and remove the 'dataset_files' column
  3. from the 'sample' table
  4. """
  5. from sqlalchemy import *
  6. from sqlalchemy.orm import *
  7. from migrate import *
  8. from migrate.changeset import *
  9. from sqlalchemy.exc import *
  10. from galaxy.model.custom_types import *
  11. from galaxy.util.json import from_json_string, to_json_string
  12. import datetime
  13. now = datetime.datetime.utcnow
  14. import logging
  15. log = logging.getLogger( __name__ )
  16. metadata = MetaData()
  17. def nextval( table, col='id' ):
  18. if migrate_engine.name == 'postgres':
  19. return "nextval('%s_%s_seq')" % ( table, col )
  20. elif migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
  21. return "null"
  22. else:
  23. raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
  24. def localtimestamp():
  25. if migrate_engine.name == 'postgres' or migrate_engine.name == 'mysql':
  26. return "LOCALTIMESTAMP"
  27. elif migrate_engine.name == 'sqlite':
  28. return "current_date || ' ' || current_time"
  29. else:
  30. raise Exception( 'Unable to convert data for unknown database type: %s' % db )
  31. SampleDataset_table = Table('sample_dataset', metadata,
  32. Column( "id", Integer, primary_key=True ),
  33. Column( "create_time", DateTime, default=now ),
  34. Column( "update_time", DateTime, default=now, onupdate=now ),
  35. Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
  36. Column( "name", TrimmedString( 255 ), nullable=False ),
  37. Column( "file_path", TrimmedString( 255 ), nullable=False ),
  38. Column( "status", TrimmedString( 255 ), nullable=False ),
  39. Column( "error_msg", TEXT ),
  40. Column( "size", TrimmedString( 255 ) ) )
  41. def upgrade(migrate_engine):
  42. metadata.bind = migrate_engine
  43. print __doc__
  44. metadata.reflect()
  45. try:
  46. SampleDataset_table.create()
  47. except Exception, e:
  48. log.debug( "Creating sample_dataset table failed: %s" % str( e ) )
  49. cmd = "SELECT id, dataset_files FROM sample"
  50. result = migrate_engine.execute( cmd )
  51. for r in result:
  52. sample_id = r[0]
  53. if r[1]:
  54. dataset_files = from_json_string(r[1])
  55. for df in dataset_files:
  56. if type(df) == type(dict()):
  57. cmd = "INSERT INTO sample_dataset VALUES (%s, %s, %s, %s, '%s', '%s', '%s', '%s', '%s')"
  58. cmd = cmd % ( nextval('sample_dataset'),
  59. localtimestamp(),
  60. localtimestamp(),
  61. str(sample_id),
  62. df.get('name', ''),
  63. df.get('filepath', ''),
  64. df.get('status', '').replace('"', '').replace("'", ""),
  65. "",
  66. df.get('size', '').replace('"', '').replace("'", "").replace(df.get('filepath', ''), '').strip() )
  67. migrate_engine.execute( cmd )
  68. # Delete the dataset_files column in the Sample table
  69. try:
  70. Sample_table = Table( "sample", metadata, autoload=True )
  71. except NoSuchTableError:
  72. Sample_table = None
  73. log.debug( "Failed loading table sample" )
  74. if Sample_table is not None:
  75. try:
  76. Sample_table.c.dataset_files.drop()
  77. except Exception, e:
  78. log.debug( "Deleting column 'dataset_files' from the 'sample' table failed: %s" % ( str( e ) ) )
  79. def downgrade(migrate_engine):
  80. metadata.bind = migrate_engine
  81. pass