PageRenderTime 54ms CodeModel.GetById 14ms RepoModel.GetById 0ms app.codeStats 1ms

/lib/galaxy/web/controllers/dataset.py

https://bitbucket.org/cistrome/cistrome-harvard/
Python | 1170 lines | 1141 code | 17 blank | 12 comment | 50 complexity | bc297ac926443be24ae1de071316e11c MD5 | raw file
  1. import logging, os, string, shutil, re, socket, mimetypes, urllib, tempfile, zipfile, glob, sys
  2. from galaxy.web.base.controller import *
  3. from galaxy.web.framework.helpers import time_ago, iff, grids
  4. from galaxy import util, datatypes, jobs, web, model
  5. from cgi import escape, FieldStorage
  6. from galaxy.datatypes.display_applications.util import encode_dataset_user, decode_dataset_user
  7. from galaxy.util.sanitize_html import sanitize_html
  8. from galaxy.util import inflector
  9. from galaxy.model.item_attrs import *
  10. from galaxy.model import LibraryDatasetDatasetAssociation, HistoryDatasetAssociation
  11. from galaxy.web.framework.helpers import to_unicode
  12. import pkg_resources;
  13. pkg_resources.require( "Paste" )
  14. import paste.httpexceptions
  15. if sys.version_info[:2] < ( 2, 6 ):
  16. zipfile.BadZipFile = zipfile.error
  17. if sys.version_info[:2] < ( 2, 5 ):
  18. zipfile.LargeZipFile = zipfile.error
  19. tmpd = tempfile.mkdtemp()
  20. comptypes=[]
  21. ziptype = '32'
  22. tmpf = os.path.join( tmpd, 'compression_test.zip' )
  23. try:
  24. archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
  25. archive.close()
  26. comptypes.append( 'zip' )
  27. ziptype = '64'
  28. except RuntimeError:
  29. log.exception( "Compression error when testing zip compression. This option will be disabled for library downloads." )
  30. except (TypeError, zipfile.LargeZipFile): # ZIP64 is only in Python2.5+. Remove TypeError when 2.4 support is dropped
  31. log.warning( 'Max zip file size is 2GB, ZIP64 not supported' )
  32. comptypes.append( 'zip' )
  33. try:
  34. os.unlink( tmpf )
  35. except OSError:
  36. pass
  37. os.rmdir( tmpd )
  38. log = logging.getLogger( __name__ )
  39. error_report_template = """
  40. GALAXY TOOL ERROR REPORT
  41. ------------------------
  42. This error report was sent from the Galaxy instance hosted on the server
  43. "${host}"
  44. -----------------------------------------------------------------------------
  45. This is in reference to dataset id ${dataset_id} from history id ${history_id}
  46. -----------------------------------------------------------------------------
  47. You should be able to view the history containing the related history item
  48. ${hid}: ${history_item_name}
  49. by logging in as a Galaxy admin user to the Galaxy instance referenced above
  50. and pointing your browser to the following link.
  51. ${history_view_link}
  52. -----------------------------------------------------------------------------
  53. The user '${email}' provided the following information:
  54. ${message}
  55. -----------------------------------------------------------------------------
  56. job id: ${job_id}
  57. tool id: ${job_tool_id}
  58. -----------------------------------------------------------------------------
  59. job command line:
  60. ${job_command_line}
  61. -----------------------------------------------------------------------------
  62. job stderr:
  63. ${job_stderr}
  64. -----------------------------------------------------------------------------
  65. job stdout:
  66. ${job_stdout}
  67. -----------------------------------------------------------------------------
  68. job info:
  69. ${job_info}
  70. -----------------------------------------------------------------------------
  71. job traceback:
  72. ${job_traceback}
  73. -----------------------------------------------------------------------------
  74. (This is an automated message).
  75. """
  76. class HistoryDatasetAssociationListGrid( grids.Grid ):
  77. # Custom columns for grid.
  78. class HistoryColumn( grids.GridColumn ):
  79. def get_value( self, trans, grid, hda):
  80. return hda.history.name
  81. class StatusColumn( grids.GridColumn ):
  82. def get_value( self, trans, grid, hda ):
  83. if hda.deleted:
  84. return "deleted"
  85. return ""
  86. def get_accepted_filters( self ):
  87. """ Returns a list of accepted filters for this column. """
  88. accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
  89. accepted_filters = []
  90. for label, val in accepted_filter_labels_and_vals.items():
  91. args = { self.key: val }
  92. accepted_filters.append( grids.GridColumnFilter( label, args) )
  93. return accepted_filters
  94. # Grid definition
  95. title = "Saved Datasets"
  96. model_class = model.HistoryDatasetAssociation
  97. template='/dataset/grid.mako'
  98. default_sort_key = "-update_time"
  99. columns = [
  100. grids.TextColumn( "Name", key="name",
  101. # Link name to dataset's history.
  102. link=( lambda item: iff( item.history.deleted, None, dict( operation="switch", id=item.id ) ) ), filterable="advanced", attach_popup=True ),
  103. HistoryColumn( "History", key="history",
  104. link=( lambda item: iff( item.history.deleted, None, dict( operation="switch_history", id=item.id ) ) ) ),
  105. grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryDatasetAssociationTagAssociation, filterable="advanced", grid_name="HistoryDatasetAssocationListGrid" ),
  106. StatusColumn( "Status", key="deleted", attach_popup=False ),
  107. grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
  108. ]
  109. columns.append(
  110. grids.MulticolFilterColumn(
  111. "Search",
  112. cols_to_filter=[ columns[0], columns[2] ],
  113. key="free-text-search", visible=False, filterable="standard" )
  114. )
  115. operations = [
  116. grids.GridOperation( "Copy to current history", condition=( lambda item: not item.deleted ), async_compatible=False ),
  117. ]
  118. standard_filters = []
  119. default_filter = dict( name="All", deleted="False", tags="All" )
  120. preserve_state = False
  121. use_paging = True
  122. num_rows_per_page = 50
  123. def build_initial_query( self, trans, **kwargs ):
  124. # Show user's datasets that are not deleted, not in deleted histories, and not hidden.
  125. # To filter HDAs by user, need to join model class/HDA and History table so that it is
  126. # possible to filter by user. However, for dictionary-based filtering to work, need a
  127. # primary table for the query.
  128. return trans.sa_session.query( self.model_class ).select_from( self.model_class.table.join( model.History.table ) ) \
  129. .filter( model.History.user == trans.user ) \
  130. .filter( self.model_class.deleted==False ) \
  131. .filter( model.History.deleted==False ) \
  132. .filter( self.model_class.visible==True )
  133. class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistory, UsesHistoryDatasetAssociation, UsesItemRatings ):
  134. stored_list_grid = HistoryDatasetAssociationListGrid()
  135. @web.expose
  136. def errors( self, trans, id ):
  137. hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id )
  138. return trans.fill_template( "dataset/errors.mako", hda=hda )
  139. @web.expose
  140. def stderr( self, trans, id ):
  141. dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id )
  142. job = dataset.creating_job_associations[0].job
  143. trans.response.set_content_type( 'text/plain' )
  144. return job.stderr
  145. @web.expose
  146. def report_error( self, trans, id, email='', message="" ):
  147. smtp_server = trans.app.config.smtp_server
  148. if smtp_server is None:
  149. return trans.show_error_message( "Mail is not configured for this galaxy instance" )
  150. to_address = trans.app.config.error_email_to
  151. if to_address is None:
  152. return trans.show_error_message( "Error reporting has been disabled for this galaxy instance" )
  153. # Get the dataset and associated job
  154. hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id )
  155. job = hda.creating_job_associations[0].job
  156. # Get the name of the server hosting the Galaxy instance from which this report originated
  157. host = trans.request.host
  158. history_view_link = "%s/ap/history/view?id=%s" % ( str( host ), trans.security.encode_id( hda.history_id ) )
  159. # Build the email message
  160. body = string.Template( error_report_template ) \
  161. .safe_substitute( host=host,
  162. dataset_id=hda.dataset_id,
  163. history_id=hda.history_id,
  164. hid=hda.hid,
  165. history_item_name=hda.get_display_name(),
  166. history_view_link=history_view_link,
  167. job_id=job.id,
  168. job_tool_id=job.tool_id,
  169. job_command_line=job.command_line,
  170. job_stderr=job.stderr,
  171. job_stdout=job.stdout,
  172. job_info=job.info,
  173. job_traceback=job.traceback,
  174. email=email,
  175. message=message )
  176. frm = to_address
  177. # Check email a bit
  178. email = email.strip()
  179. parts = email.split()
  180. if len( parts ) == 1 and len( email ) > 0:
  181. to = to_address + ", " + email
  182. else:
  183. to = to_address
  184. subject = "Galaxy tool error report from " + email
  185. # Send it
  186. try:
  187. util.send_mail( frm, to, subject, body, trans.app.config )
  188. return trans.show_ok_message( "Your error report has been sent" )
  189. except Exception, e:
  190. return trans.show_error_message( "An error occurred sending the report by email: %s" % str( e ) )
  191. @web.expose
  192. def default(self, trans, dataset_id=None, **kwd):
  193. return 'This link may not be followed from within Galaxy.'
  194. @web.expose
  195. def archive_composite_dataset( self, trans, data=None, **kwd ):
  196. # save a composite object into a compressed archive for downloading
  197. params = util.Params( kwd )
  198. valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
  199. outfname = data.name[0:150]
  200. outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
  201. if (params.do_action == None):
  202. params.do_action = 'zip' # default
  203. msg = util.restore_text( params.get( 'msg', '' ) )
  204. messagetype = params.get( 'messagetype', 'done' )
  205. if not data:
  206. msg = "You must select at least one dataset"
  207. messagetype = 'error'
  208. else:
  209. error = False
  210. try:
  211. if (params.do_action == 'zip'):
  212. # Can't use mkstemp - the file must not exist first
  213. tmpd = tempfile.mkdtemp()
  214. tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
  215. if ziptype == '64':
  216. archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
  217. else:
  218. archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED )
  219. archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
  220. elif params.do_action == 'tgz':
  221. archive = util.streamball.StreamBall( 'w|gz' )
  222. elif params.do_action == 'tbz':
  223. archive = util.streamball.StreamBall( 'w|bz2' )
  224. except (OSError, zipfile.BadZipFile):
  225. error = True
  226. log.exception( "Unable to create archive for download" )
  227. msg = "Unable to create archive for %s for download, please report this error" % outfname
  228. messagetype = 'error'
  229. if not error:
  230. current_user_roles = trans.get_current_user_roles()
  231. ext = data.extension
  232. path = data.file_name
  233. fname = os.path.split(path)[-1]
  234. efp = data.extra_files_path
  235. htmlname = os.path.splitext(outfname)[0]
  236. if not htmlname.endswith(ext):
  237. htmlname = '%s_%s' % (htmlname,ext)
  238. archname = '%s.html' % htmlname # fake the real nature of the html file
  239. try:
  240. archive.add(data.file_name,archname)
  241. except IOError:
  242. error = True
  243. log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name)
  244. msg = "Unable to create archive for download, please report this error"
  245. messagetype = 'error'
  246. for root, dirs, files in os.walk(efp):
  247. for fname in files:
  248. fpath = os.path.join(root,fname)
  249. rpath = os.path.relpath(fpath,efp)
  250. try:
  251. archive.add( fpath,rpath )
  252. except IOError:
  253. error = True
  254. log.exception( "Unable to add %s to temporary library download archive" % rpath)
  255. msg = "Unable to create archive for download, please report this error"
  256. messagetype = 'error'
  257. continue
  258. if not error:
  259. if params.do_action == 'zip':
  260. archive.close()
  261. tmpfh = open( tmpf )
  262. # clean up now
  263. try:
  264. os.unlink( tmpf )
  265. os.rmdir( tmpd )
  266. except OSError:
  267. error = True
  268. msg = "Unable to remove temporary library download archive and directory"
  269. log.exception( msg )
  270. messagetype = 'error'
  271. if not error:
  272. trans.response.set_content_type( "application/x-zip-compressed" )
  273. trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.zip" % outfname
  274. return tmpfh
  275. else:
  276. trans.response.set_content_type( "application/x-tar" )
  277. outext = 'tgz'
  278. if params.do_action == 'tbz':
  279. outext = 'tbz'
  280. trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
  281. archive.wsgi_status = trans.response.wsgi_status()
  282. archive.wsgi_headeritems = trans.response.wsgi_headeritems()
  283. return archive.stream
  284. return trans.show_error_message( msg )
  285. @web.expose
  286. def get_metadata_file(self, trans, hda_id, metadata_name):
  287. """ Allows the downloading of metadata files associated with datasets (eg. bai index for bam files) """
  288. data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( hda_id ) )
  289. if not data or not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ):
  290. return trans.show_error_message( "You are not allowed to access this dataset" )
  291. valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
  292. fname = ''.join(c in valid_chars and c or '_' for c in data.name)[0:150]
  293. file_ext = data.metadata.spec.get(metadata_name).get("file_ext", metadata_name)
  294. trans.response.headers["Content-Type"] = "application/octet-stream"
  295. trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s].%s" % (data.hid, fname, file_ext)
  296. return open(data.metadata.get(metadata_name).file_name)
  297. @web.expose
  298. def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
  299. """Catches the dataset id and displays file contents as directed"""
  300. composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
  301. composite_extensions.append('html') # for archiving composite datatypes
  302. # DEPRECATION: We still support unencoded ids for backward compatibility
  303. try:
  304. data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
  305. if data is None:
  306. raise ValueError( 'Invalid reference dataset id: %s.' % dataset_id )
  307. except:
  308. try:
  309. data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( dataset_id ) )
  310. except:
  311. data = None
  312. if not data:
  313. raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
  314. if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ):
  315. return trans.show_error_message( "You are not allowed to access this dataset" )
  316. if data.state == trans.model.Dataset.states.UPLOAD:
  317. return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." )
  318. if filename and filename != "index":
  319. # For files in extra_files_path
  320. file_path = os.path.join( data.extra_files_path, filename )
  321. if os.path.exists( file_path ):
  322. if os.path.isdir( file_path ):
  323. return trans.show_error_message( "Directory listing is not allowed." ) #TODO: Reconsider allowing listing of directories?
  324. mime, encoding = mimetypes.guess_type( file_path )
  325. if not mime:
  326. try:
  327. mime = trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( file_path )[-1] )
  328. except:
  329. mime = "text/plain"
  330. trans.response.set_content_type( mime )
  331. return open( file_path )
  332. else:
  333. return trans.show_error_message( "Could not find '%s' on the extra files path %s." % ( filename, file_path ) )
  334. trans.response.set_content_type(data.get_mime())
  335. trans.log_event( "Display dataset id: %s" % str( dataset_id ) )
  336. if to_ext or isinstance(data.datatype, datatypes.binary.Binary): # Saving the file, or binary file
  337. if data.extension in composite_extensions:
  338. return self.archive_composite_dataset( trans, data, **kwd )
  339. else:
  340. trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
  341. if not to_ext:
  342. to_ext = data.extension
  343. valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
  344. fname = ''.join(c in valid_chars and c or '_' for c in data.name)[0:150]
  345. trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s].%s" % (data.hid, fname, to_ext)
  346. return open( data.file_name )
  347. if not os.path.exists( data.file_name ):
  348. raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % data.file_name )
  349. max_peek_size = 1000000 # 1 MB
  350. if not preview or isinstance(data.datatype, datatypes.images.Image) or os.stat( data.file_name ).st_size < max_peek_size:
  351. return open( data.file_name )
  352. else:
  353. trans.response.set_content_type( "text/html" )
  354. return trans.stream_template_mako( "/dataset/large_file.mako",
  355. truncated_data = open( data.file_name ).read(max_peek_size),
  356. data = data )
  357. @web.expose
  358. def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd):
  359. """Allows user to modify parameters of an HDA."""
  360. message = None
  361. status = 'done'
  362. refresh_frames = []
  363. error = False
  364. def __ok_to_edit_metadata( dataset_id ):
  365. #prevent modifying metadata when dataset is queued or running as input/output
  366. #This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
  367. for job_to_dataset_association in trans.sa_session.query( self.app.model.JobToInputDatasetAssociation ) \
  368. .filter_by( dataset_id=dataset_id ) \
  369. .all() \
  370. + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
  371. .filter_by( dataset_id=dataset_id ) \
  372. .all():
  373. if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
  374. return False
  375. return True
  376. if hid is not None:
  377. history = trans.get_history()
  378. # TODO: hid handling
  379. data = history.datasets[ int( hid ) - 1 ]
  380. id = None
  381. elif dataset_id is not None:
  382. id = trans.app.security.decode_id( dataset_id )
  383. data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
  384. else:
  385. trans.log_event( "dataset_id and hid are both None, cannot load a dataset to edit" )
  386. return trans.show_error_message( "You must provide a history dataset id to edit" )
  387. if data is None:
  388. trans.log_event( "Problem retrieving dataset (encoded: %s, decoded: %s) with history id %s." % ( str( dataset_id ), str( id ), str( hid ) ) )
  389. return trans.show_error_message( "History dataset id is invalid" )
  390. if dataset_id is not None and data.history.user is not None and data.history.user != trans.user:
  391. trans.log_event( "User attempted to edit an HDA they do not own (encoded: %s, decoded: %s)" % ( dataset_id, id ) )
  392. # Do not reveal the dataset's existence
  393. return trans.show_error_message( "History dataset id is invalid" )
  394. current_user_roles = trans.get_current_user_roles()
  395. if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
  396. # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
  397. # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
  398. # permission. In this case, we'll reset this permission to the hda user's private role.
  399. manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
  400. permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
  401. trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
  402. if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
  403. if data.state == trans.model.Dataset.states.UPLOAD:
  404. return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
  405. params = util.Params( kwd, sanitize=False )
  406. if params.change:
  407. # The user clicked the Save button on the 'Change data type' form
  408. if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
  409. #prevent modifying datatype when dataset is queued or running as input/output
  410. if not __ok_to_edit_metadata( data.id ):
  411. message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them."
  412. error = True
  413. else:
  414. trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
  415. trans.sa_session.flush()
  416. if trans.app.config.set_metadata_externally:
  417. trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
  418. message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype )
  419. refresh_frames=['history']
  420. else:
  421. message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype )
  422. error = True
  423. elif params.save:
  424. # The user clicked the Save button on the 'Edit Attributes' form
  425. data.name = params.name
  426. data.info = params.info
  427. message = ''
  428. if __ok_to_edit_metadata( data.id ):
  429. # The following for loop will save all metadata_spec items
  430. for name, spec in data.datatype.metadata_spec.items():
  431. if spec.get("readonly"):
  432. continue
  433. optional = params.get("is_"+name, None)
  434. other = params.get("or_"+name, None)
  435. if optional and optional == 'true':
  436. # optional element... == 'true' actually means it is NOT checked (and therefore omitted)
  437. setattr(data.metadata, name, None)
  438. else:
  439. if other:
  440. setattr( data.metadata, name, other )
  441. else:
  442. setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
  443. data.datatype.after_setting_metadata( data )
  444. # Sanitize annotation before adding it.
  445. if params.annotation:
  446. annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
  447. self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
  448. # If setting metadata previously failed and all required elements have now been set, clear the failed state.
  449. if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
  450. data._state = None
  451. trans.sa_session.flush()
  452. message = "Attributes updated%s" % message
  453. refresh_frames=['history']
  454. else:
  455. trans.sa_session.flush()
  456. message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata."
  457. status = "warning"
  458. refresh_frames=['history']
  459. elif params.detect:
  460. # The user clicked the Auto-detect button on the 'Edit Attributes' form
  461. #prevent modifying metadata when dataset is queued or running as input/output
  462. if not __ok_to_edit_metadata( data.id ):
  463. message = "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them."
  464. error = True
  465. else:
  466. for name, spec in data.metadata.spec.items():
  467. # We need to be careful about the attributes we are resetting
  468. if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
  469. if spec.get( 'default' ):
  470. setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
  471. if trans.app.config.set_metadata_externally:
  472. message = 'Attributes have been queued to be updated'
  473. trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
  474. else:
  475. message = 'Attributes updated'
  476. data.set_meta()
  477. data.datatype.after_setting_metadata( data )
  478. trans.sa_session.flush()
  479. refresh_frames=['history']
  480. elif params.convert_data:
  481. target_type = kwd.get("target_type", None)
  482. if target_type:
  483. message = data.datatype.convert_dataset(trans, data, target_type)
  484. refresh_frames=['history']
  485. elif params.update_roles_button:
  486. if not trans.user:
  487. return trans.show_error_message( "You must be logged in if you want to change permissions." )
  488. if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
  489. access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
  490. manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
  491. # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
  492. # need to ensure that they did not associate roles that would cause accessibility problems.
  493. permissions, in_roles, error, message = \
  494. trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
  495. if error:
  496. # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
  497. permissions[ access_action ] = data.dataset.get_access_roles( trans )
  498. status = 'error'
  499. else:
  500. error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
  501. if error:
  502. message += error
  503. status = 'error'
  504. else:
  505. message = 'Your changes completed successfully.'
  506. trans.sa_session.refresh( data.dataset )
  507. else:
  508. message = "You are not authorized to change this dataset's permissions"
  509. error = True
  510. else:
  511. if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
  512. # Copy dbkey into metadata, for backwards compatability
  513. # This looks like it does nothing, but getting the dbkey
  514. # returns the metadata dbkey unless it is None, in which
  515. # case it resorts to the old dbkey. Setting the dbkey
  516. # sets it properly in the metadata
  517. #### This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
  518. data.metadata.dbkey = data.dbkey
  519. # let's not overwrite the imported datatypes module with the variable datatypes?
  520. # the built-in 'id' is overwritten in lots of places as well
  521. ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
  522. ldatatypes.sort()
  523. all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
  524. if error:
  525. status = 'error'
  526. return trans.fill_template( "/dataset/edit_attributes.mako",
  527. data=data,
  528. data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
  529. datatypes=ldatatypes,
  530. current_user_roles=current_user_roles,
  531. all_roles=all_roles,
  532. message=message,
  533. status=status,
  534. dataset_id=dataset_id,
  535. refresh_frames=refresh_frames )
  536. else:
  537. return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
  538. @web.expose
  539. @web.require_login( "see all available datasets" )
  540. def list( self, trans, **kwargs ):
  541. """List all available datasets"""
  542. status = message = None
  543. if 'operation' in kwargs:
  544. operation = kwargs['operation'].lower()
  545. hda_ids = util.listify( kwargs.get( 'id', [] ) )
  546. # Display no message by default
  547. status, message = None, None
  548. # Load the hdas and ensure they all belong to the current user
  549. hdas = []
  550. for encoded_hda_id in hda_ids:
  551. hda_id = trans.security.decode_id( encoded_hda_id )
  552. hda = trans.sa_session.query( model.HistoryDatasetAssociation ).filter_by( id=hda_id ).first()
  553. if hda:
  554. # Ensure history is owned by current user
  555. if hda.history.user_id != None and trans.user:
  556. assert trans.user.id == hda.history.user_id, "HistoryDatasetAssocation does not belong to current user"
  557. hdas.append( hda )
  558. else:
  559. log.warn( "Invalid history_dataset_association id '%r' passed to list", hda_id )
  560. if hdas:
  561. if operation == "switch" or operation == "switch_history":
  562. # Switch to a history that the HDA resides in.
  563. # Convert hda to histories.
  564. histories = []
  565. for hda in hdas:
  566. histories.append( hda.history )
  567. # Use history controller to switch the history. TODO: is this reasonable?
  568. status, message = trans.webapp.controllers['history']._list_switch( trans, histories )
  569. # Current history changed, refresh history frame; if switching to a dataset, set hda seek.
  570. trans.template_context['refresh_frames'] = ['history']
  571. if operation == "switch":
  572. hda_ids = [ trans.security.encode_id( hda.id ) for hda in hdas ]
  573. trans.template_context[ 'seek_hda_ids' ] = hda_ids
  574. elif operation == "copy to current history":
  575. # Copy a dataset to the current history.
  576. target_histories = [ trans.get_history() ]
  577. status, message = self._copy_datasets( trans, hda_ids, target_histories )
  578. # Current history changed, refresh history frame.
  579. trans.template_context['refresh_frames'] = ['history']
  580. # Render the list view
  581. return self.stored_list_grid( trans, status=status, message=message, **kwargs )
  582. @web.expose
  583. def imp( self, trans, dataset_id=None, **kwd ):
  584. """ Import another user's dataset via a shared URL; dataset is added to user's current history. """
  585. msg = ""
  586. # Set referer message.
  587. referer = trans.request.referer
  588. if referer is not "":
  589. referer_message = "<a href='%s'>return to the previous page</a>" % referer
  590. else:
  591. referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
  592. # Error checking.
  593. if not dataset_id:
  594. return trans.show_error_message( "You must specify a dataset to import. You can %s." % referer_message, use_panels=True )
  595. # Do import.
  596. cur_history = trans.get_history( create=True )
  597. status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ], imported=True )
  598. message = "Dataset imported. <br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message )
  599. return trans.show_message( message, type=status, use_panels=True )
  600. @web.expose
  601. @web.json
  602. @web.require_login( "use Galaxy datasets" )
  603. def get_name_and_link_async( self, trans, id=None ):
  604. """ Returns dataset's name and link. """
  605. dataset = self.get_dataset( trans, id, False, True )
  606. return_dict = { "name" : dataset.name, "link" : url_for( action="display_by_username_and_slug", username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) ) }
  607. return return_dict
  608. @web.expose
  609. def get_embed_html_async( self, trans, id ):
  610. """ Returns HTML for embedding a dataset in a page. """
  611. dataset = self.get_dataset( trans, id, False, True )
  612. if dataset:
  613. return "Embedded Dataset '%s'" % dataset.name
  614. @web.expose
  615. @web.require_login( "use Galaxy datasets" )
  616. def set_accessible_async( self, trans, id=None, accessible=False ):
  617. """ Does nothing because datasets do not have an importable/accessible attribute. This method could potentially set another attribute. """
  618. return
  619. @web.expose
  620. @web.require_login( "rate items" )
  621. @web.json
  622. def rate_async( self, trans, id, rating ):
  623. """ Rate a dataset asynchronously and return updated community data. """
  624. dataset = self.get_dataset( trans, id, check_ownership=False, check_accessible=True )
  625. if not dataset:
  626. return trans.show_error_message( "The specified dataset does not exist." )
  627. # Rate dataset.
  628. dataset_rating = self.rate_item( rate_item, trans.get_user(), dataset, rating )
  629. return self.get_ave_item_rating_data( trans.sa_session, dataset )
  630. @web.expose
  631. def display_by_username_and_slug( self, trans, username, slug, preview=True ):
  632. """ Display dataset by username and slug; because datasets do not yet have slugs, the slug is the dataset's id. """
  633. dataset = self.get_dataset( trans, slug, False, True )
  634. if dataset:
  635. truncated, dataset_data = self.get_data( dataset, preview )
  636. dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
  637. # If data is binary or an image, stream without template; otherwise, use display template.
  638. # TODO: figure out a way to display images in display template.
  639. if isinstance(dataset.datatype, datatypes.binary.Binary) or isinstance(dataset.datatype, datatypes.images.Image) or isinstance(dataset.datatype, datatypes.images.Html):
  640. trans.response.set_content_type( data.get_mime() )
  641. return open( dataset.file_name )
  642. else:
  643. # Get rating data.
  644. user_item_rating = 0
  645. if trans.get_user():
  646. user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), dataset )
  647. if user_item_rating:
  648. user_item_rating = user_item_rating.rating
  649. else:
  650. user_item_rating = 0
  651. ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, dataset )
  652. return trans.fill_template_mako( "/dataset/display.mako", item=dataset, item_data=dataset_data, truncated=truncated,
  653. user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
  654. else:
  655. raise web.httpexceptions.HTTPNotFound()
  656. @web.expose
  657. def get_item_content_async( self, trans, id ):
  658. """ Returns item content in HTML format. """
  659. dataset = self.get_dataset( trans, id, False, True )
  660. if dataset is None:
  661. raise web.httpexceptions.HTTPNotFound()
  662. truncated, dataset_data = self.get_data( dataset, preview=True )
  663. # Get annotation.
  664. dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
  665. return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated )
  666. @web.expose
  667. def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
  668. dataset = self.get_dataset( trans, id, False, True )
  669. if not dataset:
  670. web.httpexceptions.HTTPNotFound()
  671. if dataset and new_annotation:
  672. # Sanitize annotation before adding it.
  673. new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
  674. self.add_item_annotation( trans.sa_session, trans.get_user(), dataset, new_annotation )
  675. trans.sa_session.flush()
  676. return new_annotation
  677. @web.expose
  678. def get_annotation_async( self, trans, id ):
  679. dataset = self.get_dataset( trans, id, False, True )
  680. if not dataset:
  681. web.httpexceptions.HTTPNotFound()
  682. return self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
  683. @web.expose
  684. def display_at( self, trans, dataset_id, filename=None, **kwd ):
  685. """Sets up a dataset permissions so it is viewable at an external site"""
  686. site = filename
  687. data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id )
  688. if not data:
  689. raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
  690. if 'display_url' not in kwd or 'redirect_url' not in kwd:
  691. return trans.show_error_message( 'Invalid parameters specified for "display at" link, please contact a Galaxy administrator' )
  692. try:
  693. redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
  694. except:
  695. redirect_url = kwd['redirect_url'] # not all will need custom text
  696. current_user_roles = trans.get_current_user_roles()
  697. if trans.app.security_agent.dataset_is_public( data.dataset ):
  698. return trans.response.send_redirect( redirect_url ) # anon access already permitted by rbac
  699. if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
  700. trans.app.host_security_agent.set_dataset_permissions( data, trans.user, site )
  701. return trans.response.send_redirect( redirect_url )
  702. else:
  703. return trans.show_error_message( "You are not allowed to view this dataset at external sites. Please contact your Galaxy administrator to acquire management permissions for this dataset." )
  704. @web.expose
  705. def display_application( self, trans, dataset_id=None, user_id=None, app_name = None, link_name = None, app_action = None, action_param = None, **kwds ):
  706. """Access to external display applications"""
  707. if kwds:
  708. log.debug( "Unexpected Keywords passed to display_application: %s" % kwds ) #route memory?
  709. #decode ids
  710. data, user = decode_dataset_user( trans, dataset_id, user_id )
  711. if not data:
  712. raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
  713. if user is None:
  714. user = trans.user
  715. if user:
  716. user_roles = user.all_roles()
  717. else:
  718. user_roles = []
  719. if None in [ app_name, link_name ]:
  720. return trans.show_error_message( "A display application name and link name must be provided." )
  721. if trans.app.security_agent.can_access_dataset( user_roles, data.dataset ):
  722. msg = []
  723. refresh = False
  724. display_app = trans.app.datatypes_registry.display_applications.get( app_name )
  725. assert display_app, "Unknown display application has been requested: %s" % app_name
  726. dataset_hash, user_hash = encode_dataset_user( trans, data, user )
  727. display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans )
  728. assert display_link, "Unknown display link has been requested: %s" % link_name
  729. if data.state == data.states.ERROR:
  730. msg.append( ( 'This dataset is in an error state, you cannot view it at an external display application.', 'error' ) )
  731. elif data.deleted:
  732. msg.append( ( 'This dataset has been deleted, you cannot view it at an external display application.', 'error' ) )
  733. elif data.state != data.states.OK:
  734. msg.append( ( 'You must wait for this dataset to be created before you can view it at an external display application.', 'info' ) )
  735. refresh = True
  736. else:
  737. #We have permissions, dataset is not deleted and is in OK state, allow access
  738. if display_link.display_ready():
  739. if app_action in [ 'data', 'param' ]:
  740. assert action_param, "An action param must be provided for a data or param action"
  741. #data is used for things with filenames that could be passed off to a proxy
  742. #in case some display app wants all files to be in the same 'directory',
  743. #data can be forced to param, but not the other way (no filename for other direction)
  744. #get param name from url param name
  745. action_param = display_link.get_param_name_by_url( action_param )
  746. value = display_link.get_param_value( action_param )
  747. assert value, "An invalid parameter name was provided: %s" % action_param
  748. assert value.parameter.viewable, "This parameter is not viewable."
  749. if value.parameter.type == 'data':
  750. content_length = os.path.getsize( value.file_name )
  751. rval = open( value.file_name )
  752. else:
  753. rval = str( value )
  754. content_length = len( rval )
  755. trans.response.set_content_type( value.mime_type() )
  756. trans.response.headers[ 'Content-Length' ] = content_length
  757. return rval
  758. elif app_action == None:
  759. #redirect user to url generated by display link
  760. return trans.response.send_redirect( display_link.display_url() )
  761. else:
  762. msg.append( ( 'Invalid action provided: %s' % app_action, 'error' ) )
  763. else:
  764. if app_action == None:
  765. if trans.history != data.history:
  766. msg.append( ( 'You must import this dataset into your current history before you can view it at the desired display application.', 'error' ) )
  767. else:
  768. refresh = True
  769. msg.append( ( 'This display application is being prepared.', 'info' ) )
  770. if not display_link.preparing_display():
  771. display_link.prepare_display()
  772. else:
  773. raise Exception( 'Attempted a view action (%s) on a non-ready display application' % app_action )
  774. return trans.fill_template_mako( "dataset/display_application/display.mako", msg = msg, display_app = display_app, display_link = display_link, refresh = refresh )
  775. return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' )
  776. def _delete( self, trans, dataset_id ):
  777. message = None
  778. status = 'done'
  779. id = None
  780. try:
  781. id = trans.app.security.decode_id( dataset_id )
  782. history = trans.get_history()
  783. hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
  784. assert hda, 'Invalid HDA: %s' % id
  785. # Walk up parent datasets to find the containing history
  786. topmost_parent = hda
  787. while topmost_parent.parent:
  788. topmost_parent = topmost_parent.parent
  789. assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
  790. # Mark deleted and cleanup
  791. hda.mark_deleted()
  792. hda.clear_associated_files()
  793. trans.log_event( "Dataset id %s marked as deleted" % str(id) )
  794. if hda.parent_id is None and len( hda.creating_job_associations ) > 0:
  795. # Mark associated job for deletion
  796. job = hda.creating_job_associations[0].job
  797. if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
  798. # Are *all* of the job's other output datasets deleted?
  799. if job.check_if_output_datasets_deleted():
  800. job.mark_deleted( self.app.config.get_bool( 'enable_job_running', True ),
  801. self.app.config.get_bool( 'track_jobs_in_database', False ) )
  802. self.app.job_manager.job_stop_queue.put( job.id )
  803. trans.sa_session.flush()
  804. except Exception, e:
  805. msg = 'HDA deletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
  806. log.exception( msg )
  807. trans.log_event( msg )
  808. message = 'Dataset deletion failed'
  809. status = 'error'
  810. return ( message, status )
  811. def _undelete( self, trans, dataset_id ):
  812. message = None
  813. status = 'done'
  814. id = None
  815. try:
  816. id = trans.app.security.decode_id( dataset_id )
  817. history = trans.get_history()
  818. hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
  819. assert hda and hda.undeletable, 'Invalid HDA: %s' % id
  820. # Walk up parent datasets to find the containing history
  821. topmost_parent = hda
  822. while topmost_parent.parent:
  823. topmost_parent = topmost_parent.parent
  824. assert topmost_parent in history.datasets, "Data does not belong to current history"
  825. # Mark undeleted
  826. hda.mark_undeleted()
  827. trans.sa_session.flush()
  828. trans.log_event( "Dataset id %s has been undeleted" % str(id) )
  829. except Exception, e:
  830. msg = 'HDA undeletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
  831. log.exception( msg )
  832. trans.log_event( msg )
  833. message = 'Dataset undeletion failed'
  834. status = 'error'
  835. return ( message, status )
  836. def _unhide( self, trans, dataset_id ):
  837. try:
  838. id = trans.app.security.decode_id( dataset_id )
  839. except:
  840. return False
  841. history = trans.get_history()
  842. hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
  843. if hda:
  844. # Walk up parent datasets to find the containing history
  845. topmost_parent = hda
  846. while topmost_parent.parent:
  847. topmost_parent = topmost_parent.parent
  848. assert topmost_parent in history.datasets, "Data does not belong to current history"
  849. # Mark undeleted
  850. hda.mark_unhidden()
  851. trans.sa_session.flush()
  852. trans.log_event( "Dataset id %s has been unhidden" % str(id) )
  853. return True
  854. return False
  855. def _purge( self, trans, dataset_id ):
  856. message = None
  857. status = 'done'
  858. try:
  859. id = trans.app.security.decode_id( dataset_id )
  860. history = trans.get_history()
  861. user = trans.get_user()
  862. hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
  863. # Invalid HDA
  864. assert hda, 'Invalid history dataset ID'
  865. # Walk up parent datasets to find the containing history
  866. topmost_parent = hda
  867. while topmost_parent.parent:
  868. topmost_parent = topmost_parent.parent
  869. assert topmost_parent in history.datasets, "Data does not belong to current history"
  870. # If the user is anonymous, make sure the HDA is owned by the current session.
  871. if not user:
  872. assert trans.galaxy_session.current_history_id == trans.history.id, 'Invalid history dataset ID'
  873. # If the user is known, make sure the HDA is owned by the current user.
  874. else:
  875. assert topmost_parent.history.user == trans.user, 'Invalid history dataset ID'
  876. # HDA is not deleted
  877. assert hda.deleted, 'History dataset is not marked as deleted'
  878. # HDA is purgeable
  879. # Decrease disk usage first
  880. if user:
  881. user.total_disk_usage -= hda.quota_amount( user )
  882. # Mark purged
  883. hda.purged = True
  884. trans.sa_session.add( hda )
  885. trans.log_event( "HDA id %s has been purged" % hda.id )
  886. trans.sa_session.flush()
  887. # Don't delete anything if there are active HDAs or any LDDAs, even if
  888. # the LDDAs are deleted. Let the cleanup scripts get it in the latter
  889. # case.
  890. if hda.dataset.user_can_purge:
  891. try:
  892. hda.dataset.full_delete()
  893. trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
  894. trans.sa_session.add( hda.dataset )
  895. except:
  896. log.exception( 'Unable to purge dataset (%s) on purge of HDA (%s):' % ( hda.dataset.id, hda.id ) )
  897. trans.sa_session.flush()
  898. except Exception, e:
  899. msg = 'HDA purge failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
  900. log.exception( msg )
  901. trans.log_event( msg )
  902. message = 'Dataset removal from disk failed'
  903. status = 'error'
  904. return ( message, status )
  905. @web.expose
  906. def delete( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
  907. message, status = self._delete( trans, dataset_id )
  908. return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
  909. @web.expose
  910. def delete_async( self, trans, dataset_id, filename ):
  911. message, status = self._delete( trans, dataset_id )
  912. if status == 'done':
  913. return "OK"
  914. else:
  915. raise Exception( message )
  916. @web.expose
  917. def undelete( self, trans, dataset_id, filename ):
  918. message, status = self._undelete( trans, dataset_id )
  919. return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True, message=message, status=status ) )
  920. @web.expose
  921. def undelete_async( self, trans, dataset_id, filename ):
  922. message, status =self._undelete( trans, dataset_id )
  923. if status == 'done':
  924. return "OK"
  925. else:
  926. raise Exception( message )
  927. @web.expose
  928. def unhide( self, trans, dataset_id, filename ):
  929. if self._unhide( trans, dataset_id ):
  930. return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden = True ) )
  931. raise Exception( "Error unhiding" )
  932. @web.expose
  933. def purge( self, trans, dataset_id, filename, show_deleted_on_refresh = False ):
  934. if trans.app.config.allow_user_dataset_purge:
  935. message, status = self._purge( trans, dataset_id )
  936. else:
  937. message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
  938. status = 'error'
  939. return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
  940. @web.expose
  941. def purge_async( self, trans, dataset_id, filename ):
  942. if trans.app.config.allow_user_dataset_purge:
  943. message, status = self._purge( trans, dataset_id )
  944. else:
  945. message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
  946. status = 'error'
  947. if status == 'done':
  948. return "OK"
  949. else:
  950. raise Exception( message )
  951. @web.expose
  952. def show_params( self, trans, dataset_id=None, from_noframe=None, **kwd ):
  953. """
  954. Show the parameters used for an HDA
  955. """
  956. def source_dataset_chain( dataset, lst ):
  957. try:
  958. cp_from_ldda = dataset.copied_from_library_dataset_dataset_association
  959. cp_from_hda = dataset.copied_from_history_dataset_association
  960. if cp_from_ldda:
  961. lst.append( (cp_from_ldda, "(Data Library)") )
  962. return source_dataset_chain( cp_from_ldda, lst )
  963. elif cp_from_hda:
  964. lst.append( (cp_from_hda, cp_from_hda.history.name) )
  965. return source_dataset_chain( cp_from_hda, lst )
  966. except:
  967. pass
  968. return lst
  969. hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
  970. if not hda:
  971. raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
  972. if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), hda.dataset ):
  973. return trans.show_error_message( "You are not allowed to access this dataset" )
  974. # Get the associated job, if any. If this hda was copied from another,
  975. # we need to find the job that created the origial hda
  976. params_objects = None
  977. tool = None
  978. job_hda = hda
  979. while job_hda.copied_from_history_dataset_association:
  980. job_hda = job_hda.copied_from_history_dataset_association
  981. if job_hda.creating_job_associations:
  982. job = None
  983. for assoc in job_hda.creating_job_associations:
  984. job = assoc.job
  985. break
  986. if job:
  987. # Get the tool object
  988. try:
  989. # Load the tool
  990. toolbox = self.get_toolbox()
  991. tool = toolbox.tools_by_id.get( job.tool_id, None )
  992. assert tool is not None, 'Requested tool has not been loaded.'
  993. params_objects = job.get_param_values( trans.app )
  994. except:
  995. pass
  996. inherit_chain = source_dataset_chain(hda, [])
  997. return trans.fill_template( "show_params.mako", inherit_chain=inherit_chain, history=trans.get_history(), hda=hda, tool=tool, params_objects=params_objects )
  998. @web.expose
  999. def copy_datasets( self, trans, source_history=None, source_dataset_ids="", target_history_id=None, target_history_ids="", new_history_name="", do_copy=False, **kwd ):
  1000. params = util.Params( kwd )
  1001. user = trans.get_user()
  1002. if source_history is not None:
  1003. history = self.get_history(trans, source_history)
  1004. else:
  1005. history = trans.get_history()
  1006. refresh_frames = []
  1007. if source_dataset_ids:
  1008. if not isinstance( source_dataset_ids, list ):
  1009. source_dataset_ids = source_dataset_ids.split(",")
  1010. source_dataset_ids = set(map( trans.security.decode_id, source_dataset_ids ))
  1011. else:
  1012. source_dataset_ids = []
  1013. if target_history_id:
  1014. target_history_ids = [ trans.security.decode_id(target_history_id) ]
  1015. elif target_history_ids:
  1016. if not isinstance( target_history_ids, list ):
  1017. target_history_ids = target_history_ids.split(",")
  1018. target_history_ids = set([ trans.security.decode_id(h) for h in target_history_ids if h ])
  1019. else:
  1020. target_history_ids = []
  1021. done_msg = error_msg = ""
  1022. new_history = None
  1023. if do_copy:
  1024. invalid_datasets = 0
  1025. if not source_dataset_ids or not ( target_history_ids or new_history_name ):
  1026. error_msg = "You must provide both source datasets and target histories. "
  1027. else:
  1028. if new_history_name:
  1029. new_history = trans.app.model.History()
  1030. new_history.name = new_history_name
  1031. new_history.user = user
  1032. trans.sa_session.add( new_history )
  1033. trans.sa_session.flush()
  1034. target_history_ids.append( new_history.id )
  1035. if user:
  1036. target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )]
  1037. else:
  1038. target_histories = [ history ]
  1039. if len( target_histories ) != len( target_history_ids ):
  1040. error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) )
  1041. source_hdas = map( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get, source_dataset_ids )
  1042. source_hdas.sort(key=lambda hda: hda.hid)
  1043. for hda in source_hdas:
  1044. if hda is None:
  1045. error_msg = error_msg + "You tried to copy a dataset that does not exist. "
  1046. invalid_datasets += 1
  1047. elif hda.history != history:
  1048. error_msg = error_msg + "You tried to copy a dataset which is not in your current history. "
  1049. invalid_datasets += 1
  1050. else:
  1051. for hist in target_histories:
  1052. hist.add_dataset( hda.copy( copy_children = True ) )
  1053. if history in target_histories:
  1054. refresh_frames = ['history']
  1055. trans.sa_session.flush()
  1056. hist_names_str = ", ".join( [ hist.name for hist in target_histories ] )
  1057. num_source = len( source_dataset_ids ) - invalid_datasets
  1058. num_target = len(target_histories)
  1059. done_msg = "%i %s copied to %i %s: %s." % (num_source, inflector.cond_plural(num_source, "dataset"), num_target, inflector.cond_plural(num_target, "history"), hist_names_str )
  1060. if new_history is not None:
  1061. done_msg += " <a href=\"%s\" target=\"_top\">Switch to the new history.</a>" % url_for(
  1062. controller="history", action="switch_to_history", hist_id=trans.security.encode_id( new_history.id ) )
  1063. trans.sa_session.refresh( history )
  1064. source_datasets = history.visible_datasets
  1065. target_histories = [history]
  1066. if user:
  1067. target_histories = user.active_histories
  1068. return trans.fill_template( "/dataset/copy_view.mako",
  1069. source_history = history,
  1070. current_history = trans.get_history(),
  1071. source_dataset_ids = source_dataset_ids,
  1072. target_history_id = target_history_id,
  1073. target_history_ids = target_history_ids,
  1074. source_datasets = source_datasets,
  1075. target_histories = target_histories,
  1076. new_history_name = new_history_name,
  1077. done_msg = done_msg,
  1078. error_msg = error_msg,
  1079. refresh_frames = refresh_frames )
  1080. def _copy_datasets( self, trans, dataset_ids, target_histories, imported=False ):
  1081. """ Helper method for copying datasets. """
  1082. user = trans.get_user()
  1083. done_msg = error_msg = ""
  1084. invalid_datasets = 0
  1085. if not dataset_ids or not target_histories:
  1086. error_msg = "You must provide both source datasets and target histories."
  1087. else:
  1088. # User must own target histories to copy datasets to them.
  1089. for history in target_histories:
  1090. if user