PageRenderTime 35ms CodeModel.GetById 17ms RepoModel.GetById 0ms app.codeStats 1ms

/lib/galaxy/webapps/community/controllers/common.py

https://bitbucket.org/cistrome/cistrome-harvard/
Python | 610 lines | 608 code | 2 blank | 0 comment | 9 complexity | cc695534265982bd809e9a07df3dcb00 MD5 | raw file
  1. import os, string, socket, logging
  2. from time import strftime
  3. from datetime import *
  4. from galaxy.tools import *
  5. from galaxy.util.json import from_json_string, to_json_string
  6. from galaxy.web.base.controller import *
  7. from galaxy.webapps.community import model
  8. from galaxy.model.orm import *
  9. from galaxy.model.item_attrs import UsesItemRatings
  10. from mercurial import hg, ui, commands
  11. log = logging.getLogger( __name__ )
  12. email_alert_template = """
  13. GALAXY TOOL SHED REPOSITORY UPDATE ALERT
  14. -----------------------------------------------------------------------------
  15. You received this alert because you registered to receive email whenever
  16. changes were made to the repository named "${repository_name}".
  17. -----------------------------------------------------------------------------
  18. Date of change: ${display_date}
  19. Changed by: ${username}
  20. Revision: ${revision}
  21. Change description:
  22. ${description}
  23. -----------------------------------------------------------------------------
  24. This change alert was sent from the Galaxy tool shed hosted on the server
  25. "${host}"
  26. """
  27. contact_owner_template = """
  28. GALAXY TOOL SHED REPOSITORY MESSAGE
  29. ------------------------
  30. The user '${username}' sent you the following message regarding your tool shed
  31. repository named '${repository_name}'. You can respond by sending a reply to
  32. the user's email address: ${email}.
  33. -----------------------------------------------------------------------------
  34. ${message}
  35. -----------------------------------------------------------------------------
  36. This message was sent from the Galaxy Tool Shed instance hosted on the server
  37. '${host}'
  38. """
  39. # States for passing messages
  40. SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
  41. malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content."
  42. malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content."
  43. class ItemRatings( UsesItemRatings ):
  44. """Overrides rate_item method since we also allow for comments"""
  45. def rate_item( self, trans, user, item, rating, comment='' ):
  46. """ Rate an item. Return type is <item_class>RatingAssociation. """
  47. item_rating = self.get_user_item_rating( trans.sa_session, user, item, webapp_model=trans.model )
  48. if not item_rating:
  49. # User has not yet rated item; create rating.
  50. item_rating_assoc_class = self._get_item_rating_assoc_class( item, webapp_model=trans.model )
  51. item_rating = item_rating_assoc_class()
  52. item_rating.user = trans.user
  53. item_rating.set_item( item )
  54. item_rating.rating = rating
  55. item_rating.comment = comment
  56. trans.sa_session.add( item_rating )
  57. trans.sa_session.flush()
  58. elif item_rating.rating != rating or item_rating.comment != comment:
  59. # User has previously rated item; update rating.
  60. item_rating.rating = rating
  61. item_rating.comment = comment
  62. trans.sa_session.add( item_rating )
  63. trans.sa_session.flush()
  64. return item_rating
  65. ## ---- Utility methods -------------------------------------------------------
  66. def get_categories( trans ):
  67. """Get all categories from the database"""
  68. return trans.sa_session.query( trans.model.Category ) \
  69. .filter( trans.model.Category.table.c.deleted==False ) \
  70. .order_by( trans.model.Category.table.c.name ).all()
  71. def get_category( trans, id ):
  72. """Get a category from the database"""
  73. return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) )
  74. def get_repository( trans, id ):
  75. """Get a repository from the database via id"""
  76. return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
  77. def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
  78. """Get metadata for a specified repository change set from the database"""
  79. return trans.sa_session.query( trans.model.RepositoryMetadata ) \
  80. .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
  81. trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
  82. .first()
  83. def get_repository_metadata_by_id( trans, id ):
  84. """Get repository metadata from the database"""
  85. return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
  86. def get_revision_label( trans, repository, changeset_revision ):
  87. """
  88. Return a string consisting of the human read-able
  89. changeset rev and the changeset revision string.
  90. """
  91. repo = hg.repository( get_configured_ui(), repository.repo_path )
  92. ctx = get_changectx_for_changeset( trans, repo, changeset_revision )
  93. if ctx:
  94. return "%s:%s" % ( str( ctx.rev() ), changeset_revision )
  95. else:
  96. return "-1:%s" % changeset_revision
  97. def get_latest_repository_metadata( trans, id ):
  98. """Get last metadata defined for a specified repository from the database"""
  99. return trans.sa_session.query( trans.model.RepositoryMetadata ) \
  100. .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
  101. .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
  102. .first()
  103. def generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ):
  104. """
  105. Update the received metadata_dict with changes that have been applied
  106. to the received exported_workflow_dict. Store everything except the
  107. workflow steps in the database.
  108. """
  109. workflow_dict = { 'a_galaxy_workflow' : exported_workflow_dict[ 'a_galaxy_workflow' ],
  110. 'name' :exported_workflow_dict[ 'name' ],
  111. 'annotation' : exported_workflow_dict[ 'annotation' ],
  112. 'format-version' : exported_workflow_dict[ 'format-version' ] }
  113. if 'workflows' in metadata_dict:
  114. metadata_dict[ 'workflows' ].append( workflow_dict )
  115. else:
  116. metadata_dict[ 'workflows' ] = [ workflow_dict ]
  117. return metadata_dict
  118. def new_workflow_metadata_required( trans, id, metadata_dict ):
  119. """
  120. TODO: Currently everything about an exported workflow except the name is hard-coded, so
  121. there's no real way to differentiate versions of exported workflows. If this changes at
  122. some future time, this method should be enhanced accordingly...
  123. """
  124. if 'workflows' in metadata_dict:
  125. repository_metadata = get_latest_repository_metadata( trans, id )
  126. if repository_metadata:
  127. if repository_metadata.metadata:
  128. # The repository has metadata, so update the workflows value - no new record is needed.
  129. return False
  130. else:
  131. # There is no saved repository metadata, so we need to create a new repository_metadata table record.
  132. return True
  133. # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
  134. # record is not needed.
  135. return False
  136. def generate_clone_url( trans, repository_id ):
  137. repository = get_repository( trans, repository_id )
  138. protocol, base = trans.request.base.split( '://' )
  139. if trans.user:
  140. username = '%s@' % trans.user.username
  141. else:
  142. username = ''
  143. return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
  144. def generate_tool_guid( trans, repository, tool ):
  145. """
  146. Generate a guid for the received tool. The form of the guid is
  147. <tool shed host>/repos/<tool shed username>/<tool shed repo name>/<tool id>/<tool version>
  148. """
  149. return '%s/repos/%s/%s/%s/%s' % ( trans.request.host,
  150. repository.user.username,
  151. repository.name,
  152. tool.id,
  153. tool.version )
  154. def check_tool_input_params( trans, name, tool, sample_files, invalid_files ):
  155. """
  156. Check all of the tool's input parameters, looking for any that are dynamically generated
  157. using external data files to make sure the files exist.
  158. """
  159. can_set_metadata = True
  160. correction_msg = ''
  161. for input_param in tool.input_params:
  162. if isinstance( input_param, galaxy.tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
  163. # If the tool refers to .loc files or requires an entry in the
  164. # tool_data_table_conf.xml, make sure all requirements exist.
  165. options = input_param.dynamic_options or input_param.options
  166. if options:
  167. if options.tool_data_table or options.missing_tool_data_table_name:
  168. # Make sure the repository contains a tool_data_table_conf.xml.sample file.
  169. sample_found = False
  170. for sample_file in sample_files:
  171. head, tail = os.path.split( sample_file )
  172. if tail == 'tool_data_table_conf.xml.sample':
  173. sample_found = True
  174. error, correction_msg = handle_sample_tool_data_table_conf_file( trans, sample_file )
  175. if error:
  176. can_set_metadata = False
  177. invalid_files.append( ( tail, correction_msg ) )
  178. else:
  179. options.missing_tool_data_table_name = None
  180. break
  181. if not sample_found:
  182. can_set_metadata = False
  183. correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. "
  184. correction_msg += "Upload a file named tool_data_table_conf.xml.sample to the repository "
  185. correction_msg += "that includes the required entry to resolve this issue.<br/>"
  186. invalid_files.append( ( name, correction_msg ) )
  187. if options.index_file or options.missing_index_file:
  188. # Make sure the repository contains the required xxx.loc.sample file.
  189. index_file = options.index_file or options.missing_index_file
  190. index_head, index_tail = os.path.split( index_file )
  191. sample_found = False
  192. for sample_file in sample_files:
  193. sample_head, sample_tail = os.path.split( sample_file )
  194. if sample_tail == '%s.sample' % index_tail:
  195. copy_sample_loc_file( trans, sample_file )
  196. options.index_file = index_tail
  197. options.missing_index_file = None
  198. options.tool_data_table.missing_index_file = None
  199. sample_found = True
  200. break
  201. if not sample_found:
  202. can_set_metadata = False
  203. correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
  204. correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_tail )
  205. invalid_files.append( ( name, correction_msg ) )
  206. return can_set_metadata, invalid_files
  207. def generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict ):
  208. """
  209. Update the received metadata_dict with changes that have been
  210. applied to the received tool.
  211. """
  212. repository = get_repository( trans, id )
  213. # Handle tool.requirements.
  214. tool_requirements = []
  215. for tr in tool.requirements:
  216. name=tr.name
  217. type=tr.type
  218. if type == 'fabfile':
  219. version = None
  220. fabfile = tr.fabfile
  221. method = tr.method
  222. else:
  223. version = tr.version
  224. fabfile = None
  225. method = None
  226. requirement_dict = dict( name=name,
  227. type=type,
  228. version=version,
  229. fabfile=fabfile,
  230. method=method )
  231. tool_requirements.append( requirement_dict )
  232. # Handle tool.tests.
  233. tool_tests = []
  234. if tool.tests:
  235. for ttb in tool.tests:
  236. test_dict = dict( name=ttb.name,
  237. required_files=ttb.required_files,
  238. inputs=ttb.inputs,
  239. outputs=ttb.outputs )
  240. tool_tests.append( test_dict )
  241. tool_dict = dict( id=tool.id,
  242. guid = generate_tool_guid( trans, repository, tool ),
  243. name=tool.name,
  244. version=tool.version,
  245. description=tool.description,
  246. version_string_cmd = tool.version_string_cmd,
  247. tool_config=tool_config,
  248. requirements=tool_requirements,
  249. tests=tool_tests )
  250. if 'tools' in metadata_dict:
  251. metadata_dict[ 'tools' ].append( tool_dict )
  252. else:
  253. metadata_dict[ 'tools' ] = [ tool_dict ]
  254. return metadata_dict
  255. def new_tool_metadata_required( trans, id, metadata_dict ):
  256. """
  257. Compare the last saved metadata for each tool in the repository with the new metadata
  258. in metadata_dict to determine if a new repository_metadata table record is required, or
  259. if the last saved metadata record can updated instead.
  260. """
  261. if 'tools' in metadata_dict:
  262. repository_metadata = get_latest_repository_metadata( trans, id )
  263. if repository_metadata:
  264. metadata = repository_metadata.metadata
  265. if metadata and 'tools' in metadata:
  266. saved_tool_ids = []
  267. # The metadata for one or more tools was successfully generated in the past
  268. # for this repository, so we first compare the version string for each tool id
  269. # in metadata_dict with what was previously saved to see if we need to create
  270. # a new table record or if we can simply update the existing record.
  271. for new_tool_metadata_dict in metadata_dict[ 'tools' ]:
  272. for saved_tool_metadata_dict in metadata[ 'tools' ]:
  273. if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
  274. saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] )
  275. if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]:
  276. if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]:
  277. return True
  278. # So far, a new metadata record is not required, but we still have to check to see if
  279. # any new tool ids exist in metadata_dict that are not in the saved metadata. We do
  280. # this because if a new tarball was uploaded to a repository that included tools, it
  281. # may have removed existing tool files if they were not included in the uploaded tarball.
  282. for new_tool_metadata_dict in metadata_dict[ 'tools' ]:
  283. if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
  284. return True
  285. else:
  286. # We have repository metadata that does not include metadata for any tools in the
  287. # repository, so we can update the existing repository metadata.
  288. return False
  289. else:
  290. # There is no saved repository metadata, so we need to create a new repository_metadata
  291. # table record.
  292. return True
  293. # The received metadata_dict includes no metadata for tools, so a new repository_metadata table
  294. # record is not needed.
  295. return False
  296. def set_repository_metadata( trans, id, changeset_revision, **kwd ):
  297. """Set repository metadata"""
  298. message = ''
  299. status = 'done'
  300. repository = get_repository( trans, id )
  301. repo_dir = repository.repo_path
  302. repo = hg.repository( get_configured_ui(), repo_dir )
  303. invalid_files = []
  304. sample_files = []
  305. ctx = get_changectx_for_changeset( trans, repo, changeset_revision )
  306. if ctx is not None:
  307. metadata_dict = {}
  308. if changeset_revision == repository.tip:
  309. for root, dirs, files in os.walk( repo_dir ):
  310. if not root.find( '.hg' ) >= 0 and not root.find( 'hgrc' ) >= 0:
  311. if '.hg' in dirs:
  312. # Don't visit .hg directories - should be impossible since we don't
  313. # allow uploaded archives that contain .hg dirs, but just in case...
  314. dirs.remove( '.hg' )
  315. if 'hgrc' in files:
  316. # Don't include hgrc files in commit.
  317. files.remove( 'hgrc' )
  318. # Find all special .sample files first.
  319. for name in files:
  320. if name.endswith( '.sample' ):
  321. sample_files.append( os.path.abspath( os.path.join( root, name ) ) )
  322. for name in files:
  323. # Find all tool configs.
  324. if name.endswith( '.xml' ):
  325. try:
  326. full_path = os.path.abspath( os.path.join( root, name ) )
  327. tool = load_tool( trans, full_path )
  328. if tool is not None:
  329. can_set_metadata, invalid_files = check_tool_input_params( trans, name, tool, sample_files, invalid_files )
  330. if can_set_metadata:
  331. # Update the list of metadata dictionaries for tools in metadata_dict.
  332. tool_config = os.path.join( root, name )
  333. metadata_dict = generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict )
  334. except Exception, e:
  335. invalid_files.append( ( name, str( e ) ) )
  336. # Find all exported workflows
  337. elif name.endswith( '.ga' ):
  338. try:
  339. full_path = os.path.abspath( os.path.join( root, name ) )
  340. # Convert workflow data from json
  341. fp = open( full_path, 'rb' )
  342. workflow_text = fp.read()
  343. fp.close()
  344. exported_workflow_dict = from_json_string( workflow_text )
  345. # Update the list of metadata dictionaries for workflows in metadata_dict.
  346. metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict )
  347. except Exception, e:
  348. invalid_files.append( ( name, str( e ) ) )
  349. else:
  350. # Find all special .sample files first.
  351. for filename in ctx:
  352. if filename.endswith( '.sample' ):
  353. sample_files.append( os.path.abspath( os.path.join( root, filename ) ) )
  354. # Get all tool config file names from the hgweb url, something like:
  355. # /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
  356. for filename in ctx:
  357. # Find all tool configs - should not have to update metadata for workflows for now.
  358. if filename.endswith( '.xml' ):
  359. fctx = ctx[ filename ]
  360. # Write the contents of the old tool config to a temporary file.
  361. fh = tempfile.NamedTemporaryFile( 'w' )
  362. tmp_filename = fh.name
  363. fh.close()
  364. fh = open( tmp_filename, 'w' )
  365. fh.write( fctx.data() )
  366. fh.close()
  367. try:
  368. tool = load_tool( trans, tmp_filename )
  369. if tool is not None:
  370. can_set_metadata, invalid_files = check_tool_input_params( trans, filename, tool, sample_files, invalid_files )
  371. if can_set_metadata:
  372. # Update the list of metadata dictionaries for tools in metadata_dict. Note that filename
  373. # here is the relative path to the config file within the change set context, something
  374. # like filtering.xml, but when the change set was the repository tip, the value was
  375. # something like database/community_files/000/repo_1/filtering.xml. This shouldn't break
  376. # anything, but may result in a bit of confusion when maintaining the code / data over time.
  377. metadata_dict = generate_tool_metadata( trans, id, changeset_revision, filename, tool, metadata_dict )
  378. except Exception, e:
  379. invalid_files.append( ( name, str( e ) ) )
  380. try:
  381. os.unlink( tmp_filename )
  382. except:
  383. pass
  384. if metadata_dict:
  385. if changeset_revision == repository.tip:
  386. if new_tool_metadata_required( trans, id, metadata_dict ) or new_workflow_metadata_required( trans, id, metadata_dict ):
  387. # Create a new repository_metadata table row.
  388. repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict )
  389. trans.sa_session.add( repository_metadata )
  390. trans.sa_session.flush()
  391. else:
  392. # Update the last saved repository_metadata table row.
  393. repository_metadata = get_latest_repository_metadata( trans, id )
  394. repository_metadata.changeset_revision = changeset_revision
  395. repository_metadata.metadata = metadata_dict
  396. trans.sa_session.add( repository_metadata )
  397. trans.sa_session.flush()
  398. else:
  399. # We're re-generating metadata for an old repository revision.
  400. repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
  401. repository_metadata.metadata = metadata_dict
  402. trans.sa_session.add( repository_metadata )
  403. trans.sa_session.flush()
  404. else:
  405. message = "Change set revision '%s' includes no tools or exported workflows for which metadata can be set." % str( changeset_revision )
  406. status = "error"
  407. else:
  408. # change_set is None
  409. message = "Repository does not include change set revision '%s'." % str( changeset_revision )
  410. status = 'error'
  411. if invalid_files:
  412. if metadata_dict:
  413. message = "Metadata was defined for some items in change set revision '%s'. " % str( changeset_revision )
  414. message += "Correct the following problems if necessary and reset metadata.<br/>"
  415. else:
  416. message = "Metadata cannot be defined for change set revision '%s'. Correct the following problems and reset metadata.<br/>" % str( changeset_revision )
  417. for itc_tup in invalid_files:
  418. tool_file, exception_msg = itc_tup
  419. if exception_msg.find( 'No such file or directory' ) >= 0:
  420. exception_items = exception_msg.split()
  421. missing_file_items = exception_items[7].split( '/' )
  422. missing_file = missing_file_items[-1].rstrip( '\'' )
  423. if missing_file.endswith( '.loc' ):
  424. sample_ext = '%s.sample' % missing_file
  425. else:
  426. sample_ext = missing_file
  427. correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
  428. correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_ext
  429. else:
  430. correction_msg = exception_msg
  431. message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
  432. status = 'error'
  433. return message, status
  434. def get_repository_by_name( trans, name ):
  435. """Get a repository from the database via name"""
  436. return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one()
  437. def get_changectx_for_changeset( trans, repo, changeset_revision, **kwd ):
  438. """Retrieve a specified changectx from a repository"""
  439. for changeset in repo.changelog:
  440. ctx = repo.changectx( changeset )
  441. if str( ctx ) == changeset_revision:
  442. return ctx
  443. return None
  444. def change_set_is_malicious( trans, id, changeset_revision, **kwd ):
  445. """Check the malicious flag in repository metadata for a specified change set"""
  446. repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
  447. if repository_metadata:
  448. return repository_metadata.malicious
  449. return False
  450. def get_configured_ui():
  451. # Configure any desired ui settings.
  452. _ui = ui.ui()
  453. # The following will suppress all messages. This is
  454. # the same as adding the following setting to the repo
  455. # hgrc file' [ui] section:
  456. # quiet = True
  457. _ui.setconfig( 'ui', 'quiet', True )
  458. return _ui
  459. def get_user( trans, id ):
  460. """Get a user from the database"""
  461. return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
  462. def handle_email_alerts( trans, repository ):
  463. repo_dir = repository.repo_path
  464. repo = hg.repository( get_configured_ui(), repo_dir )
  465. smtp_server = trans.app.config.smtp_server
  466. if smtp_server and repository.email_alerts:
  467. # Send email alert to users that want them.
  468. if trans.app.config.email_from is not None:
  469. email_from = trans.app.config.email_from
  470. elif trans.request.host.split( ':' )[0] == 'localhost':
  471. email_from = 'galaxy-no-reply@' + socket.getfqdn()
  472. else:
  473. email_from = 'galaxy-no-reply@' + trans.request.host.split( ':' )[0]
  474. tip_changeset = repo.changelog.tip()
  475. ctx = repo.changectx( tip_changeset )
  476. t, tz = ctx.date()
  477. date = datetime( *time.gmtime( float( t ) - tz )[:6] )
  478. display_date = date.strftime( "%Y-%m-%d" )
  479. try:
  480. username = ctx.user().split()[0]
  481. except:
  482. username = ctx.user()
  483. # Build the email message
  484. body = string.Template( email_alert_template ) \
  485. .safe_substitute( host=trans.request.host,
  486. repository_name=repository.name,
  487. revision='%s:%s' %( str( ctx.rev() ), ctx ),
  488. display_date=display_date,
  489. description=ctx.description(),
  490. username=username )
  491. frm = email_from
  492. subject = "Galaxy tool shed repository update alert"
  493. email_alerts = from_json_string( repository.email_alerts )
  494. for email in email_alerts:
  495. to = email.strip()
  496. # Send it
  497. try:
  498. util.send_mail( frm, to, subject, body, trans.app.config )
  499. except Exception, e:
  500. log.exception( "An error occurred sending a tool shed repository update alert by email." )
  501. def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
  502. # Make a copy of a repository's files for browsing, remove from disk all files that
  503. # are not tracked, and commit all added, modified or removed files that have not yet
  504. # been committed.
  505. repo_dir = repository.repo_path
  506. repo = hg.repository( get_configured_ui(), repo_dir )
  507. # The following will delete the disk copy of only the files in the repository.
  508. #os.system( 'hg update -r null > /dev/null 2>&1' )
  509. repo.ui.pushbuffer()
  510. commands.status( repo.ui, repo, all=True )
  511. status_and_file_names = repo.ui.popbuffer().strip().split( "\n" )
  512. # status_and_file_names looks something like:
  513. # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
  514. # The codes used to show the status of files are:
  515. # M = modified
  516. # A = added
  517. # R = removed
  518. # C = clean
  519. # ! = deleted, but still tracked
  520. # ? = not tracked
  521. # I = ignored
  522. files_to_remove_from_disk = []
  523. files_to_commit = []
  524. for status_and_file_name in status_and_file_names:
  525. if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
  526. files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
  527. elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
  528. files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
  529. for full_path in files_to_remove_from_disk:
  530. # We'll remove all files that are not tracked or ignored.
  531. if os.path.isdir( full_path ):
  532. try:
  533. os.rmdir( full_path )
  534. except OSError, e:
  535. # The directory is not empty
  536. pass
  537. elif os.path.isfile( full_path ):
  538. os.remove( full_path )
  539. dir = os.path.split( full_path )[0]
  540. try:
  541. os.rmdir( dir )
  542. except OSError, e:
  543. # The directory is not empty
  544. pass
  545. if files_to_commit:
  546. if not commit_message:
  547. commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
  548. repo.dirstate.write()
  549. repo.commit( user=trans.user.username, text=commit_message )
  550. os.chdir( repo_dir )
  551. os.system( 'hg update > /dev/null 2>&1' )
  552. os.chdir( current_working_dir )
  553. def load_tool( trans, config_file ):
  554. """
  555. Load a single tool from the file named by `config_file` and return
  556. an instance of `Tool`.
  557. """
  558. # Parse XML configuration file and get the root element
  559. tree = util.parse_xml( config_file )
  560. root = tree.getroot()
  561. if root.tag == 'tool':
  562. # Allow specifying a different tool subclass to instantiate
  563. if root.find( "type" ) is not None:
  564. type_elem = root.find( "type" )
  565. module = type_elem.get( 'module', 'galaxy.tools' )
  566. cls = type_elem.get( 'class' )
  567. mod = __import__( module, globals(), locals(), [cls])
  568. ToolClass = getattr( mod, cls )
  569. elif root.get( 'tool_type', None ) is not None:
  570. ToolClass = tool_types.get( root.get( 'tool_type' ) )
  571. else:
  572. ToolClass = Tool
  573. return ToolClass( config_file, root, trans.app )
  574. return None
  575. def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True ):
  576. """
  577. Build a SelectField whose options are the changeset_revision
  578. strings of all downloadable_revisions of the received repository.
  579. """
  580. repo = hg.repository( get_configured_ui(), repository.repo_path )
  581. options = []
  582. refresh_on_change_values = []
  583. for repository_metadata in repository.downloadable_revisions:
  584. changeset_revision = repository_metadata.changeset_revision
  585. revision_label = get_revision_label( trans, repository, changeset_revision )
  586. options.append( ( revision_label, changeset_revision ) )
  587. refresh_on_change_values.append( changeset_revision )
  588. if add_id_to_name:
  589. name = 'changeset_revision_%d' % repository.id
  590. else:
  591. name = 'changeset_revision'
  592. select_field = SelectField( name=name,
  593. refresh_on_change=True,
  594. refresh_on_change_values=refresh_on_change_values )
  595. for option_tup in options:
  596. selected = selected_value and option_tup[1] == selected_value
  597. select_field.add_option( option_tup[0], option_tup[1], selected=selected )
  598. return select_field