PageRenderTime 31ms CodeModel.GetById 11ms app.highlight 15ms RepoModel.GetById 2ms app.codeStats 0ms

/lib/galaxy/model/migrate/versions/0020_library_upload_job.py

https://bitbucket.org/cistrome/cistrome-harvard/
Python | 124 lines | 120 code | 3 blank | 1 comment | 0 complexity | 39b702dd5480c50544aeb343dab7ba94 MD5 | raw file
  1from sqlalchemy import *
  2from sqlalchemy.orm import *
  3from sqlalchemy.exc import *
  4from migrate import *
  5from migrate.changeset import *
  6import datetime
  7now = datetime.datetime.utcnow
  8import sys, logging
  9# Need our custom types, but don't import anything else from model
 10from galaxy.model.custom_types import *
 11
 12log = logging.getLogger( __name__ )
 13log.setLevel(logging.DEBUG)
 14handler = logging.StreamHandler( sys.stdout )
 15format = "%(name)s %(levelname)s %(asctime)s %(message)s"
 16formatter = logging.Formatter( format )
 17handler.setFormatter( formatter )
 18log.addHandler( handler )
 19
 20metadata = MetaData()
 21
 22def display_migration_details():
 23    print ""
 24    print "========================================"
 25    print """This script creates a job_to_output_library_dataset table for allowing library
 26uploads to run as regular jobs.  To support this, a library_folder_id column is
 27added to the job table, and library_folder/output_library_datasets relations
 28are added to the Job object.  An index is also added to the dataset.state
 29column."""
 30    print "========================================"
 31
 32JobToOutputLibraryDatasetAssociation_table = Table( "job_to_output_library_dataset", metadata,
 33    Column( "id", Integer, primary_key=True ),
 34    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
 35    Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
 36    Column( "name", String(255) ) )
 37
 38def upgrade(migrate_engine):
 39    metadata.bind = migrate_engine
 40    display_migration_details()
 41    # Load existing tables
 42    metadata.reflect()
 43    # Create the job_to_output_library_dataset table
 44    try:
 45        JobToOutputLibraryDatasetAssociation_table.create()
 46    except Exception, e:
 47        print "Creating job_to_output_library_dataset table failed: %s" % str( e )
 48        log.debug( "Creating job_to_output_library_dataset table failed: %s" % str( e ) )
 49    # Create the library_folder_id column
 50    try:
 51        Job_table = Table( "job", metadata, autoload=True )
 52    except NoSuchTableError:
 53        Job_table = None
 54        log.debug( "Failed loading table job" )
 55    if Job_table is not None:
 56        try:
 57            col = Column( "library_folder_id", Integer, index=True )
 58            col.create( Job_table, index_name='ix_job_library_folder_id')
 59            assert col is Job_table.c.library_folder_id
 60        except Exception, e:
 61            log.debug( "Adding column 'library_folder_id' to job table failed: %s" % ( str( e ) ) )
 62        try:
 63            LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
 64        except NoSuchTableError:
 65            LibraryFolder_table = None
 66            log.debug( "Failed loading table library_folder" )
 67        # Add 1 foreign key constraint to the job table
 68        if migrate_engine.name != 'sqlite':
 69            #Sqlite can't alter-table-add-foreign-key
 70            if Job_table is not None and LibraryFolder_table is not None:
 71                try:
 72                    cons = ForeignKeyConstraint( [Job_table.c.library_folder_id],
 73                                                 [LibraryFolder_table.c.id],
 74                                                 name='job_library_folder_id_fk' )
 75                    # Create the constraint
 76                    cons.create()
 77                except Exception, e:
 78                    log.debug( "Adding foreign key constraint 'job_library_folder_id_fk' to table 'library_folder' failed: %s" % ( str( e ) ) )
 79    # Create the ix_dataset_state index
 80    try:
 81        Dataset_table = Table( "dataset", metadata, autoload=True )
 82    except NoSuchTableError:
 83        Dataset_table = None
 84        log.debug( "Failed loading table dataset" )
 85    i = Index( "ix_dataset_state", Dataset_table.c.state )
 86    try:
 87        i.create()
 88    except Exception, e:
 89        print str(e)
 90        log.debug( "Adding index 'ix_dataset_state' to dataset table failed: %s" % str( e ) )
 91
 92def downgrade(migrate_engine):
 93    metadata.bind = migrate_engine
 94    metadata.reflect()
 95    # Drop the library_folder_id column
 96    try:
 97        Job_table = Table( "job", metadata, autoload=True )
 98    except NoSuchTableError:
 99        Job_table = None
100        log.debug( "Failed loading table job" )
101    if Job_table is not None:
102        try:
103            col = Job_table.c.library_folder_id
104            col.drop()
105        except Exception, e:
106            log.debug( "Dropping column 'library_folder_id' from job table failed: %s" % ( str( e ) ) )
107    # Drop the job_to_output_library_dataset table
108    try:
109        JobToOutputLibraryDatasetAssociation_table.drop()
110    except Exception, e:
111        print str(e)
112        log.debug( "Dropping job_to_output_library_dataset table failed: %s" % str( e ) )
113    # Drop the ix_dataset_state index
114    try:
115        Dataset_table = Table( "dataset", metadata, autoload=True )
116    except NoSuchTableError:
117        Dataset_table = None
118        log.debug( "Failed loading table dataset" )
119    i = Index( "ix_dataset_state", Dataset_table.c.state )
120    try:
121        i.drop()
122    except Exception, e:
123        print str(e)
124        log.debug( "Dropping index 'ix_dataset_state' from dataset table failed: %s" % str( e ) )