/lib/galaxy/model/migrate/versions/0020_library_upload_job.py

https://bitbucket.org/cistrome/cistrome-harvard/ · Python · 124 lines · 107 code · 6 blank · 11 comment · 29 complexity · 39b702dd5480c50544aeb343dab7ba94 MD5 · raw file

  1. from sqlalchemy import *
  2. from sqlalchemy.orm import *
  3. from sqlalchemy.exc import *
  4. from migrate import *
  5. from migrate.changeset import *
  6. import datetime
  7. now = datetime.datetime.utcnow
  8. import sys, logging
  9. # Need our custom types, but don't import anything else from model
  10. from galaxy.model.custom_types import *
  11. log = logging.getLogger( __name__ )
  12. log.setLevel(logging.DEBUG)
  13. handler = logging.StreamHandler( sys.stdout )
  14. format = "%(name)s %(levelname)s %(asctime)s %(message)s"
  15. formatter = logging.Formatter( format )
  16. handler.setFormatter( formatter )
  17. log.addHandler( handler )
  18. metadata = MetaData()
  19. def display_migration_details():
  20. print ""
  21. print "========================================"
  22. print """This script creates a job_to_output_library_dataset table for allowing library
  23. uploads to run as regular jobs. To support this, a library_folder_id column is
  24. added to the job table, and library_folder/output_library_datasets relations
  25. are added to the Job object. An index is also added to the dataset.state
  26. column."""
  27. print "========================================"
  28. JobToOutputLibraryDatasetAssociation_table = Table( "job_to_output_library_dataset", metadata,
  29. Column( "id", Integer, primary_key=True ),
  30. Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
  31. Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
  32. Column( "name", String(255) ) )
  33. def upgrade(migrate_engine):
  34. metadata.bind = migrate_engine
  35. display_migration_details()
  36. # Load existing tables
  37. metadata.reflect()
  38. # Create the job_to_output_library_dataset table
  39. try:
  40. JobToOutputLibraryDatasetAssociation_table.create()
  41. except Exception, e:
  42. print "Creating job_to_output_library_dataset table failed: %s" % str( e )
  43. log.debug( "Creating job_to_output_library_dataset table failed: %s" % str( e ) )
  44. # Create the library_folder_id column
  45. try:
  46. Job_table = Table( "job", metadata, autoload=True )
  47. except NoSuchTableError:
  48. Job_table = None
  49. log.debug( "Failed loading table job" )
  50. if Job_table is not None:
  51. try:
  52. col = Column( "library_folder_id", Integer, index=True )
  53. col.create( Job_table, index_name='ix_job_library_folder_id')
  54. assert col is Job_table.c.library_folder_id
  55. except Exception, e:
  56. log.debug( "Adding column 'library_folder_id' to job table failed: %s" % ( str( e ) ) )
  57. try:
  58. LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
  59. except NoSuchTableError:
  60. LibraryFolder_table = None
  61. log.debug( "Failed loading table library_folder" )
  62. # Add 1 foreign key constraint to the job table
  63. if migrate_engine.name != 'sqlite':
  64. #Sqlite can't alter-table-add-foreign-key
  65. if Job_table is not None and LibraryFolder_table is not None:
  66. try:
  67. cons = ForeignKeyConstraint( [Job_table.c.library_folder_id],
  68. [LibraryFolder_table.c.id],
  69. name='job_library_folder_id_fk' )
  70. # Create the constraint
  71. cons.create()
  72. except Exception, e:
  73. log.debug( "Adding foreign key constraint 'job_library_folder_id_fk' to table 'library_folder' failed: %s" % ( str( e ) ) )
  74. # Create the ix_dataset_state index
  75. try:
  76. Dataset_table = Table( "dataset", metadata, autoload=True )
  77. except NoSuchTableError:
  78. Dataset_table = None
  79. log.debug( "Failed loading table dataset" )
  80. i = Index( "ix_dataset_state", Dataset_table.c.state )
  81. try:
  82. i.create()
  83. except Exception, e:
  84. print str(e)
  85. log.debug( "Adding index 'ix_dataset_state' to dataset table failed: %s" % str( e ) )
  86. def downgrade(migrate_engine):
  87. metadata.bind = migrate_engine
  88. metadata.reflect()
  89. # Drop the library_folder_id column
  90. try:
  91. Job_table = Table( "job", metadata, autoload=True )
  92. except NoSuchTableError:
  93. Job_table = None
  94. log.debug( "Failed loading table job" )
  95. if Job_table is not None:
  96. try:
  97. col = Job_table.c.library_folder_id
  98. col.drop()
  99. except Exception, e:
  100. log.debug( "Dropping column 'library_folder_id' from job table failed: %s" % ( str( e ) ) )
  101. # Drop the job_to_output_library_dataset table
  102. try:
  103. JobToOutputLibraryDatasetAssociation_table.drop()
  104. except Exception, e:
  105. print str(e)
  106. log.debug( "Dropping job_to_output_library_dataset table failed: %s" % str( e ) )
  107. # Drop the ix_dataset_state index
  108. try:
  109. Dataset_table = Table( "dataset", metadata, autoload=True )
  110. except NoSuchTableError:
  111. Dataset_table = None
  112. log.debug( "Failed loading table dataset" )
  113. i = Index( "ix_dataset_state", Dataset_table.c.state )
  114. try:
  115. i.drop()
  116. except Exception, e:
  117. print str(e)
  118. log.debug( "Dropping index 'ix_dataset_state' from dataset table failed: %s" % str( e ) )