PageRenderTime 58ms CodeModel.GetById 2ms app.highlight 50ms RepoModel.GetById 2ms app.codeStats 0ms

/lib/galaxy/web/controllers/tool_runner.py

https://bitbucket.org/cistrome/cistrome-harvard/
Python | 262 lines | 204 code | 10 blank | 48 comment | 45 complexity | c927005b7c2f4368ff37997b7670bb79 MD5 | raw file
  1"""
  2Upload class
  3"""
  4
  5from galaxy.web.base.controller import *
  6from galaxy.util.bunch import Bunch
  7from galaxy.tools import DefaultToolState
  8from galaxy.tools.parameters.basic import UnvalidatedValue
  9from galaxy.tools.actions import upload_common
 10
 11import logging
 12log = logging.getLogger( __name__ )
 13
 14class AddFrameData:
 15    def __init__( self ):
 16        self.wiki_url = None
 17        self.debug = None
 18        self.from_noframe = None
 19
 20class ToolRunner( BaseUIController ):
 21
 22    #Hack to get biomart to work, ideally, we could pass tool_id to biomart and receive it back
 23    @web.expose
 24    def biomart(self, trans, tool_id='biomart', **kwd):
 25        """Catches the tool id and redirects as needed"""
 26        return self.index(trans, tool_id=tool_id, **kwd)
 27
 28    #test to get hapmap to work, ideally, we could pass tool_id to hapmap biomart and receive it back
 29    @web.expose
 30    def hapmapmart(self, trans, tool_id='hapmapmart', **kwd):
 31        """Catches the tool id and redirects as needed"""
 32        return self.index(trans, tool_id=tool_id, **kwd)
 33
 34    @web.expose
 35    def default(self, trans, tool_id=None, **kwd):
 36        """Catches the tool id and redirects as needed"""
 37        return self.index(trans, tool_id=tool_id, **kwd)
 38
 39    @web.expose
 40    def index(self, trans, tool_id=None, from_noframe=None, **kwd):
 41        # No tool id passed, redirect to main page
 42        if tool_id is None:
 43            return trans.response.send_redirect( url_for( "/static/welcome.html" ) )
 44        # Load the tool
 45        toolbox = self.get_toolbox()
 46        #Backwards compatibility for datasource tools that have default tool_id configured, but which are now using only GALAXY_URL
 47        if isinstance( tool_id, list ):
 48            tool_ids = tool_id
 49        else:
 50            tool_ids = [ tool_id ]
 51        for tool_id in tool_ids:
 52            tool = toolbox.tools_by_id.get( tool_id, None )
 53            if tool:
 54                break
 55        # No tool matching the tool id, display an error (shouldn't happen)
 56        if not tool:
 57            tool_id = ','.join( tool_ids )
 58            log.error( "index called with tool id '%s' but no such tool exists", tool_id )
 59            trans.log_event( "Tool id '%s' does not exist" % tool_id )
 60            return "Tool '%s' does not exist, kwd=%s " % (tool_id, kwd)
 61        params = util.Params( kwd, sanitize = False ) #Sanitize parameters when substituting into command line via input wrappers
 62        #do param translation here, used by datasource tools
 63        if tool.input_translator:
 64            tool.input_translator.translate( params )
 65        # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ),
 66        # so make sure to create a new history if we've never had one before.
 67        history = trans.get_history( create=True )
 68        template, vars = tool.handle_input( trans, params.__dict__ )
 69        if len(params) > 0:
 70            trans.log_event( "Tool params: %s" % (str(params)), tool_id=tool_id )
 71        add_frame = AddFrameData()
 72        add_frame.debug = trans.debug
 73        if from_noframe is not None:
 74            add_frame.wiki_url = trans.app.config.wiki_url
 75            add_frame.from_noframe = True
 76        return trans.fill_template( template, history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
 77        
 78    @web.expose
 79    def rerun( self, trans, id=None, from_noframe=None, **kwd ):
 80        """
 81        Given a HistoryDatasetAssociation id, find the job and that created
 82        the dataset, extract the parameters, and display the appropriate tool
 83        form with parameters already filled in.
 84        """
 85        if not id:
 86            error( "'id' parameter is required" );
 87        try:
 88            id = int( id )
 89        except:
 90            error( "Invalid value for 'id' parameter" )
 91        # Get the dataset object
 92        data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
 93        #only allow rerunning if user is allowed access to the dataset.
 94        if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ):
 95            error( "You are not allowed to access this dataset" )
 96        # Get the associated job, if any. If this hda was copied from another,
 97        # we need to find the job that created the origial hda
 98        job_hda = data
 99        while job_hda.copied_from_history_dataset_association:#should this check library datasets as well?
100            job_hda = job_hda.copied_from_history_dataset_association
101        if not job_hda.creating_job_associations:
102            error( "Could not find the job for this dataset" )
103        # Get the job object
104        job = None
105        for assoc in job_hda.creating_job_associations:
106            job = assoc.job
107            break   
108        if not job:
109            raise Exception("Failed to get job information for dataset hid %d" % data.hid)
110        # Get the tool object
111        tool_id = job.tool_id
112        try:
113            # Load the tool
114            toolbox = self.get_toolbox()
115            tool = toolbox.tools_by_id.get( tool_id, None )
116            assert tool is not None, 'Requested tool has not been loaded.'
117        except:
118            #this is expected, so not an exception
119            error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
120        # Can't rerun upload, external data sources, et cetera. Workflow
121        # compatible will proxy this for now
122        if not tool.is_workflow_compatible:
123            error( "The '%s' tool does not currently support rerunning." % tool.name )
124        # Get the job's parameters
125        try:
126            params_objects = job.get_param_values( trans.app, ignore_errors = True )
127        except:
128            raise Exception( "Failed to get parameters for dataset id %d " % data.id )
129        upgrade_messages = tool.check_and_update_param_values( params_objects, trans )
130        # Need to remap dataset parameters. Job parameters point to original 
131        # dataset used; parameter should be the analygous dataset in the 
132        # current history.
133        history = trans.get_history()
134        hda_source_dict = {} # Mapping from HDA in history to source HDAs.
135        for hda in history.datasets:
136            source_hda = hda.copied_from_history_dataset_association
137            while source_hda:#should this check library datasets as well?
138                #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories, 
139                #but is still less than perfect when eg individual datasets are copied between histories
140                if source_hda not in hda_source_dict or source_hda.hid == hda.hid:
141                    hda_source_dict[ source_hda ] = hda
142                source_hda = source_hda.copied_from_history_dataset_association
143        # Unpack unvalidated values to strings, they'll be validated when the
144        # form is submitted (this happens when re-running a job that was
145        # initially run by a workflow)
146        #This needs to be done recursively through grouping parameters
147        def rerun_callback( input, value, prefixed_name, prefixed_label ):
148            if isinstance( value, UnvalidatedValue ):
149                return str( value )
150            if isinstance( input, DataToolParameter ):
151                if value not in history.datasets and value in hda_source_dict:
152                    return hda_source_dict[ value ]
153        visit_input_values( tool.inputs, params_objects, rerun_callback )
154        # Create a fake tool_state for the tool, with the parameters values 
155        state = tool.new_state( trans )
156        state.inputs = params_objects
157        tool_state_string = util.object_to_string(state.encode(tool, trans.app))
158        # Setup context for template
159        vars = dict( tool_state=state, errors = upgrade_messages )
160        # Is the "add frame" stuff neccesary here?
161        add_frame = AddFrameData()
162        add_frame.debug = trans.debug
163        if from_noframe is not None:
164            add_frame.wiki_url = trans.app.config.wiki_url
165            add_frame.from_noframe = True
166        return trans.fill_template( "tool_form.mako", history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
167    @web.expose
168    def redirect( self, trans, redirect_url=None, **kwd ):
169        if not redirect_url:
170            return trans.show_error_message( "Required URL for redirection missing" )
171        trans.log_event( "Redirecting to: %s" % redirect_url )
172        return trans.fill_template( 'root/redirect.mako', redirect_url=redirect_url )
173    @web.json
174    def upload_async_create( self, trans, tool_id=None, **kwd ):
175        """
176        Precreate datasets for asynchronous uploading.
177        """
178        cntrller = kwd.get( 'cntrller', '' )
179        roles = kwd.get( 'roles', False )
180        if roles:
181            # The user associated the DATASET_ACCESS permission on the uploaded datasets with 1 or more roles.
182            # We need to ensure that the roles are legitimately derived from the roles associated with the LIBRARY_ACCESS
183            # permission if the library is not public ( this should always be the case since any ill-legitimate roles
184            # were filtered out of the roles displayed on the upload form.  In addition, we need to ensure that the user
185            # did not associated roles that would make the dataset in-accessible by everyone.
186            library_id = trans.app.security.decode_id( kwd.get( 'library_id', '' ) )
187            vars = dict( DATASET_ACCESS_in=roles )
188            permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars )
189            if error:
190                return [ 'error', msg ]
191        def create_dataset( name ):
192            ud = Bunch( name=name, file_type=None, dbkey=None )
193            if nonfile_params.get( 'folder_id', False ):
194                replace_id = nonfile_params.get( 'replace_id', None )
195                if replace_id not in [ None, 'None' ]:
196                    replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( replace_id ) )
197                else:
198                    replace_dataset = None
199                # FIXME: instead of passing params here ( chiech have been process by util.Params(), the original kwd
200                # should be passed so that complex objects that may have been included in the initial request remain.
201                library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset )
202            else:
203                library_bunch = None
204            return upload_common.new_upload( trans, cntrller, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD )
205        tool = self.get_toolbox().tools_by_id.get( tool_id, None )
206        if not tool:
207            return False # bad tool_id
208        nonfile_params = util.Params( kwd, sanitize=False )
209        if kwd.get( 'tool_state', None ) not in ( None, 'None' ):
210            encoded_state = util.string_to_object( kwd["tool_state"] )
211            tool_state = DefaultToolState()
212            tool_state.decode( encoded_state, tool, trans.app )
213        else:
214            tool_state = tool.new_state( trans )
215        errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True )
216        datasets = []
217        dataset_upload_inputs = []
218        for input_name, input in tool.inputs.iteritems():
219            if input.type == "upload_dataset":
220                dataset_upload_inputs.append( input )
221        assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )
222        for dataset_upload_input in dataset_upload_inputs:
223            d_type = dataset_upload_input.get_datatype( trans, kwd )
224            
225            if d_type.composite_type is not None:
226                datasets.append( create_dataset( dataset_upload_input.get_composite_dataset_name( kwd ) ) )
227            else:
228                params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] )
229                if params.file_data not in [ None, "" ]:
230                    name = params.file_data
231                    if name.count('/'):
232                        name = name.rsplit('/',1)[1]
233                    if name.count('\\'):
234                        name = name.rsplit('\\',1)[1]
235                    datasets.append( create_dataset( name ) )
236                if params.url_paste not in [ None, "" ]:
237                    url_paste = params.url_paste.replace( '\r', '' ).split( '\n' )
238                    url = False
239                    for line in url_paste:
240                        line = line.rstrip( '\r\n' ).strip()
241                        if not line:
242                            continue
243                        elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ) or line.lower().startswith( 'https://' ):
244                            url = True
245                            datasets.append( create_dataset( line ) )
246                        else:
247                            if url:
248                                continue # non-url when we've already processed some urls
249                            else:
250                                # pasted data
251                                datasets.append( create_dataset( 'Pasted Entry' ) )
252                                break
253        return [ d.id for d in datasets ]
254
255    @web.expose
256    def upload_async_message( self, trans, **kwd ):
257        # might be more appropriate in a different controller
258        msg = """<p>Your upload has been queued.  History entries that are still uploading will be blue, and turn green upon completion.</p>
259        <p><b>Please do not use your browser\'s "stop" or "reload" buttons until the upload is complete, or it may be interrupted.</b></p>
260        <p>You may safely continue to use Galaxy while the upload is in progress.  Using "stop" and "reload" on pages other than Galaxy is also safe.</p>
261        """
262        return trans.show_message( msg, refresh_frames='history' )