/lib/galaxy/web/controllers/tool_runner.py

https://bitbucket.org/cistrome/cistrome-harvard/ · Python · 262 lines · 196 code · 11 blank · 55 comment · 56 complexity · c927005b7c2f4368ff37997b7670bb79 MD5 · raw file

  1. """
  2. Upload class
  3. """
  4. from galaxy.web.base.controller import *
  5. from galaxy.util.bunch import Bunch
  6. from galaxy.tools import DefaultToolState
  7. from galaxy.tools.parameters.basic import UnvalidatedValue
  8. from galaxy.tools.actions import upload_common
  9. import logging
  10. log = logging.getLogger( __name__ )
  11. class AddFrameData:
  12. def __init__( self ):
  13. self.wiki_url = None
  14. self.debug = None
  15. self.from_noframe = None
  16. class ToolRunner( BaseUIController ):
  17. #Hack to get biomart to work, ideally, we could pass tool_id to biomart and receive it back
  18. @web.expose
  19. def biomart(self, trans, tool_id='biomart', **kwd):
  20. """Catches the tool id and redirects as needed"""
  21. return self.index(trans, tool_id=tool_id, **kwd)
  22. #test to get hapmap to work, ideally, we could pass tool_id to hapmap biomart and receive it back
  23. @web.expose
  24. def hapmapmart(self, trans, tool_id='hapmapmart', **kwd):
  25. """Catches the tool id and redirects as needed"""
  26. return self.index(trans, tool_id=tool_id, **kwd)
  27. @web.expose
  28. def default(self, trans, tool_id=None, **kwd):
  29. """Catches the tool id and redirects as needed"""
  30. return self.index(trans, tool_id=tool_id, **kwd)
  31. @web.expose
  32. def index(self, trans, tool_id=None, from_noframe=None, **kwd):
  33. # No tool id passed, redirect to main page
  34. if tool_id is None:
  35. return trans.response.send_redirect( url_for( "/static/welcome.html" ) )
  36. # Load the tool
  37. toolbox = self.get_toolbox()
  38. #Backwards compatibility for datasource tools that have default tool_id configured, but which are now using only GALAXY_URL
  39. if isinstance( tool_id, list ):
  40. tool_ids = tool_id
  41. else:
  42. tool_ids = [ tool_id ]
  43. for tool_id in tool_ids:
  44. tool = toolbox.tools_by_id.get( tool_id, None )
  45. if tool:
  46. break
  47. # No tool matching the tool id, display an error (shouldn't happen)
  48. if not tool:
  49. tool_id = ','.join( tool_ids )
  50. log.error( "index called with tool id '%s' but no such tool exists", tool_id )
  51. trans.log_event( "Tool id '%s' does not exist" % tool_id )
  52. return "Tool '%s' does not exist, kwd=%s " % (tool_id, kwd)
  53. params = util.Params( kwd, sanitize = False ) #Sanitize parameters when substituting into command line via input wrappers
  54. #do param translation here, used by datasource tools
  55. if tool.input_translator:
  56. tool.input_translator.translate( params )
  57. # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ),
  58. # so make sure to create a new history if we've never had one before.
  59. history = trans.get_history( create=True )
  60. template, vars = tool.handle_input( trans, params.__dict__ )
  61. if len(params) > 0:
  62. trans.log_event( "Tool params: %s" % (str(params)), tool_id=tool_id )
  63. add_frame = AddFrameData()
  64. add_frame.debug = trans.debug
  65. if from_noframe is not None:
  66. add_frame.wiki_url = trans.app.config.wiki_url
  67. add_frame.from_noframe = True
  68. return trans.fill_template( template, history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
  69. @web.expose
  70. def rerun( self, trans, id=None, from_noframe=None, **kwd ):
  71. """
  72. Given a HistoryDatasetAssociation id, find the job and that created
  73. the dataset, extract the parameters, and display the appropriate tool
  74. form with parameters already filled in.
  75. """
  76. if not id:
  77. error( "'id' parameter is required" );
  78. try:
  79. id = int( id )
  80. except:
  81. error( "Invalid value for 'id' parameter" )
  82. # Get the dataset object
  83. data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
  84. #only allow rerunning if user is allowed access to the dataset.
  85. if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ):
  86. error( "You are not allowed to access this dataset" )
  87. # Get the associated job, if any. If this hda was copied from another,
  88. # we need to find the job that created the origial hda
  89. job_hda = data
  90. while job_hda.copied_from_history_dataset_association:#should this check library datasets as well?
  91. job_hda = job_hda.copied_from_history_dataset_association
  92. if not job_hda.creating_job_associations:
  93. error( "Could not find the job for this dataset" )
  94. # Get the job object
  95. job = None
  96. for assoc in job_hda.creating_job_associations:
  97. job = assoc.job
  98. break
  99. if not job:
  100. raise Exception("Failed to get job information for dataset hid %d" % data.hid)
  101. # Get the tool object
  102. tool_id = job.tool_id
  103. try:
  104. # Load the tool
  105. toolbox = self.get_toolbox()
  106. tool = toolbox.tools_by_id.get( tool_id, None )
  107. assert tool is not None, 'Requested tool has not been loaded.'
  108. except:
  109. #this is expected, so not an exception
  110. error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
  111. # Can't rerun upload, external data sources, et cetera. Workflow
  112. # compatible will proxy this for now
  113. if not tool.is_workflow_compatible:
  114. error( "The '%s' tool does not currently support rerunning." % tool.name )
  115. # Get the job's parameters
  116. try:
  117. params_objects = job.get_param_values( trans.app, ignore_errors = True )
  118. except:
  119. raise Exception( "Failed to get parameters for dataset id %d " % data.id )
  120. upgrade_messages = tool.check_and_update_param_values( params_objects, trans )
  121. # Need to remap dataset parameters. Job parameters point to original
  122. # dataset used; parameter should be the analygous dataset in the
  123. # current history.
  124. history = trans.get_history()
  125. hda_source_dict = {} # Mapping from HDA in history to source HDAs.
  126. for hda in history.datasets:
  127. source_hda = hda.copied_from_history_dataset_association
  128. while source_hda:#should this check library datasets as well?
  129. #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories,
  130. #but is still less than perfect when eg individual datasets are copied between histories
  131. if source_hda not in hda_source_dict or source_hda.hid == hda.hid:
  132. hda_source_dict[ source_hda ] = hda
  133. source_hda = source_hda.copied_from_history_dataset_association
  134. # Unpack unvalidated values to strings, they'll be validated when the
  135. # form is submitted (this happens when re-running a job that was
  136. # initially run by a workflow)
  137. #This needs to be done recursively through grouping parameters
  138. def rerun_callback( input, value, prefixed_name, prefixed_label ):
  139. if isinstance( value, UnvalidatedValue ):
  140. return str( value )
  141. if isinstance( input, DataToolParameter ):
  142. if value not in history.datasets and value in hda_source_dict:
  143. return hda_source_dict[ value ]
  144. visit_input_values( tool.inputs, params_objects, rerun_callback )
  145. # Create a fake tool_state for the tool, with the parameters values
  146. state = tool.new_state( trans )
  147. state.inputs = params_objects
  148. tool_state_string = util.object_to_string(state.encode(tool, trans.app))
  149. # Setup context for template
  150. vars = dict( tool_state=state, errors = upgrade_messages )
  151. # Is the "add frame" stuff neccesary here?
  152. add_frame = AddFrameData()
  153. add_frame.debug = trans.debug
  154. if from_noframe is not None:
  155. add_frame.wiki_url = trans.app.config.wiki_url
  156. add_frame.from_noframe = True
  157. return trans.fill_template( "tool_form.mako", history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
  158. @web.expose
  159. def redirect( self, trans, redirect_url=None, **kwd ):
  160. if not redirect_url:
  161. return trans.show_error_message( "Required URL for redirection missing" )
  162. trans.log_event( "Redirecting to: %s" % redirect_url )
  163. return trans.fill_template( 'root/redirect.mako', redirect_url=redirect_url )
  164. @web.json
  165. def upload_async_create( self, trans, tool_id=None, **kwd ):
  166. """
  167. Precreate datasets for asynchronous uploading.
  168. """
  169. cntrller = kwd.get( 'cntrller', '' )
  170. roles = kwd.get( 'roles', False )
  171. if roles:
  172. # The user associated the DATASET_ACCESS permission on the uploaded datasets with 1 or more roles.
  173. # We need to ensure that the roles are legitimately derived from the roles associated with the LIBRARY_ACCESS
  174. # permission if the library is not public ( this should always be the case since any ill-legitimate roles
  175. # were filtered out of the roles displayed on the upload form. In addition, we need to ensure that the user
  176. # did not associated roles that would make the dataset in-accessible by everyone.
  177. library_id = trans.app.security.decode_id( kwd.get( 'library_id', '' ) )
  178. vars = dict( DATASET_ACCESS_in=roles )
  179. permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars )
  180. if error:
  181. return [ 'error', msg ]
  182. def create_dataset( name ):
  183. ud = Bunch( name=name, file_type=None, dbkey=None )
  184. if nonfile_params.get( 'folder_id', False ):
  185. replace_id = nonfile_params.get( 'replace_id', None )
  186. if replace_id not in [ None, 'None' ]:
  187. replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( replace_id ) )
  188. else:
  189. replace_dataset = None
  190. # FIXME: instead of passing params here ( chiech have been process by util.Params(), the original kwd
  191. # should be passed so that complex objects that may have been included in the initial request remain.
  192. library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset )
  193. else:
  194. library_bunch = None
  195. return upload_common.new_upload( trans, cntrller, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD )
  196. tool = self.get_toolbox().tools_by_id.get( tool_id, None )
  197. if not tool:
  198. return False # bad tool_id
  199. nonfile_params = util.Params( kwd, sanitize=False )
  200. if kwd.get( 'tool_state', None ) not in ( None, 'None' ):
  201. encoded_state = util.string_to_object( kwd["tool_state"] )
  202. tool_state = DefaultToolState()
  203. tool_state.decode( encoded_state, tool, trans.app )
  204. else:
  205. tool_state = tool.new_state( trans )
  206. errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True )
  207. datasets = []
  208. dataset_upload_inputs = []
  209. for input_name, input in tool.inputs.iteritems():
  210. if input.type == "upload_dataset":
  211. dataset_upload_inputs.append( input )
  212. assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )
  213. for dataset_upload_input in dataset_upload_inputs:
  214. d_type = dataset_upload_input.get_datatype( trans, kwd )
  215. if d_type.composite_type is not None:
  216. datasets.append( create_dataset( dataset_upload_input.get_composite_dataset_name( kwd ) ) )
  217. else:
  218. params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] )
  219. if params.file_data not in [ None, "" ]:
  220. name = params.file_data
  221. if name.count('/'):
  222. name = name.rsplit('/',1)[1]
  223. if name.count('\\'):
  224. name = name.rsplit('\\',1)[1]
  225. datasets.append( create_dataset( name ) )
  226. if params.url_paste not in [ None, "" ]:
  227. url_paste = params.url_paste.replace( '\r', '' ).split( '\n' )
  228. url = False
  229. for line in url_paste:
  230. line = line.rstrip( '\r\n' ).strip()
  231. if not line:
  232. continue
  233. elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ) or line.lower().startswith( 'https://' ):
  234. url = True
  235. datasets.append( create_dataset( line ) )
  236. else:
  237. if url:
  238. continue # non-url when we've already processed some urls
  239. else:
  240. # pasted data
  241. datasets.append( create_dataset( 'Pasted Entry' ) )
  242. break
  243. return [ d.id for d in datasets ]
  244. @web.expose
  245. def upload_async_message( self, trans, **kwd ):
  246. # might be more appropriate in a different controller
  247. msg = """<p>Your upload has been queued. History entries that are still uploading will be blue, and turn green upon completion.</p>
  248. <p><b>Please do not use your browser\'s "stop" or "reload" buttons until the upload is complete, or it may be interrupted.</b></p>
  249. <p>You may safely continue to use Galaxy while the upload is in progress. Using "stop" and "reload" on pages other than Galaxy is also safe.</p>
  250. """
  251. return trans.show_message( msg, refresh_frames='history' )