PageRenderTime 87ms CodeModel.GetById 4ms app.highlight 60ms RepoModel.GetById 1ms app.codeStats 1ms

/lib/galaxy/web/base/controller.py

https://bitbucket.org/cistrome/cistrome-harvard/
Python | 2973 lines | 2914 code | 31 blank | 28 comment | 60 complexity | 44e622a10caafdff829845c52ffeb39f MD5 | raw file

Large files files are truncated, but you can click here to view the full file

   1"""
   2Contains functionality needed in every web interface
   3"""
   4import logging
   5import operator
   6import os
   7import re
   8from gettext import gettext
   9
  10import pkg_resources
  11pkg_resources.require("SQLAlchemy >= 0.4")
  12from sqlalchemy import func, and_, select
  13
  14from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError
  15from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable
  16from galaxy import exceptions
  17from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException
  18from galaxy.exceptions import MessageException
  19
  20from galaxy import web
  21from galaxy import model
  22from galaxy import security
  23from galaxy import util
  24from galaxy import objectstore
  25
  26from galaxy.web import error, url_for
  27from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField
  28from galaxy.web.form_builder import build_select_field, HistoryField, PasswordField, WorkflowField, WorkflowMappingField
  29from galaxy.workflow.modules import module_factory
  30from galaxy.model.orm import eagerload, eagerload_all, desc
  31from galaxy.security.validate_user_input import validate_publicname
  32from galaxy.util.sanitize_html import sanitize_html
  33from galaxy.model.item_attrs import Dictifiable, UsesAnnotations
  34
  35from galaxy.datatypes.interval import ChromatinInteractions
  36from galaxy.datatypes.data import Text
  37
  38from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex, LibraryDatasetDatasetAssociation, HistoryDatasetAssociation
  39
  40from galaxy.datatypes.metadata import FileParameter
  41from galaxy.tools.parameters import RuntimeValue, visit_input_values
  42from galaxy.tools.parameters.basic import DataToolParameter
  43from galaxy.util.json import to_json_string
  44from galaxy.workflow.modules import ToolModule
  45from galaxy.workflow.steps import attach_ordered_steps
  46
  47
  48log = logging.getLogger( __name__ )
  49
  50# States for passing messages
  51SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
  52
  53def _is_valid_slug( slug ):
  54    """ Returns true if slug is valid. """
  55
  56    VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
  57    return VALID_SLUG_RE.match( slug )
  58
  59
  60class BaseController( object ):
  61    """
  62    Base class for Galaxy web application controllers.
  63    """
  64
  65    def __init__( self, app ):
  66        """Initialize an interface for application 'app'"""
  67        self.app = app
  68        self.sa_session = app.model.context
  69
  70    def get_toolbox(self):
  71        """Returns the application toolbox"""
  72        return self.app.toolbox
  73
  74    def get_class( self, class_name ):
  75        """ Returns the class object that a string denotes. Without this method, we'd have to do eval(<class_name>). """
  76        if class_name == 'History':
  77            item_class = self.app.model.History
  78        elif class_name == 'HistoryDatasetAssociation':
  79            item_class = self.app.model.HistoryDatasetAssociation
  80        elif class_name == 'Page':
  81            item_class = self.app.model.Page
  82        elif class_name == 'StoredWorkflow':
  83            item_class = self.app.model.StoredWorkflow
  84        elif class_name == 'Visualization':
  85            item_class = self.app.model.Visualization
  86        elif class_name == 'Tool':
  87            item_class = self.app.model.Tool
  88        elif class_name == 'Job':
  89            item_class = self.app.model.Job
  90        elif class_name == 'User':
  91            item_class = self.app.model.User
  92        elif class_name == 'Group':
  93            item_class = self.app.model.Group
  94        elif class_name == 'Role':
  95            item_class = self.app.model.Role
  96        elif class_name == 'Quota':
  97            item_class = self.app.model.Quota
  98        elif class_name == 'Library':
  99            item_class = self.app.model.Library
 100        elif class_name == 'LibraryFolder':
 101            item_class = self.app.model.LibraryFolder
 102        elif class_name == 'LibraryDatasetDatasetAssociation':
 103            item_class = self.app.model.LibraryDatasetDatasetAssociation
 104        elif class_name == 'LibraryDataset':
 105            item_class = self.app.model.LibraryDataset
 106        elif class_name == 'ToolShedRepository':
 107            item_class = self.app.install_model.ToolShedRepository
 108        else:
 109            item_class = None
 110        return item_class
 111
 112    def get_object( self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ):
 113        """
 114        Convenience method to get a model object with the specified checks.
 115        """
 116        try:
 117            decoded_id = trans.security.decode_id( id )
 118        except:
 119            raise MessageException( "Malformed %s id ( %s ) specified, unable to decode"
 120                                    % ( class_name, str( id ) ), type='error' )
 121        try:
 122            item_class = self.get_class( class_name )
 123            assert item_class is not None
 124            item = trans.sa_session.query( item_class ).get( decoded_id )
 125            assert item is not None
 126        except Exception, exc:
 127            log.exception( "Invalid %s id ( %s ) specified: %s" % ( class_name, id, str( exc ) ) )
 128            raise MessageException( "Invalid %s id ( %s ) specified" % ( class_name, id ), type="error" )
 129
 130        if check_ownership or check_accessible:
 131            self.security_check( trans, item, check_ownership, check_accessible )
 132        if deleted == True and not item.deleted:
 133            raise ItemDeletionException( '%s "%s" is not deleted'
 134                                         % ( class_name, getattr( item, 'name', id ) ), type="warning" )
 135        elif deleted == False and item.deleted:
 136            raise ItemDeletionException( '%s "%s" is deleted'
 137                                         % ( class_name, getattr( item, 'name', id ) ), type="warning" )
 138        return item
 139
 140    # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin
 141    #   but *don't* then becomes difficult
 142    #def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
 143    #    log.warn( 'BaseController.security_check: %s, %b, %b', str( item ), check_ownership, check_accessible )
 144    #    # meant to be overridden in SharableSecurityMixin
 145    #    return item
 146
 147    def get_user( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
 148        return self.get_object( trans, id, 'User', check_ownership=False, check_accessible=False, deleted=deleted )
 149
 150    def get_group( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
 151        return self.get_object( trans, id, 'Group', check_ownership=False, check_accessible=False, deleted=deleted )
 152
 153    def get_role( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
 154        return self.get_object( trans, id, 'Role', check_ownership=False, check_accessible=False, deleted=deleted )
 155
 156    def encode_all_ids( self, trans, rval, recursive=False ):
 157        """
 158        Encodes all integer values in the dict rval whose keys are 'id' or end with '_id'
 159
 160        It might be useful to turn this in to a decorator
 161        """
 162        if type( rval ) != dict:
 163            return rval
 164        for k, v in rval.items():
 165            if (k == 'id' or k.endswith( '_id' )) and v is not None and k not in ['tool_id']:
 166                try:
 167                    rval[k] = trans.security.encode_id( v )
 168                except:
 169                    pass # probably already encoded
 170            if (k.endswith("_ids") and type(v) == list):
 171                try:
 172                    o = []
 173                    for i in v:
 174                        o.append(trans.security.encode_id( i ))
 175                    rval[k] = o
 176                except:
 177                    pass
 178            else:
 179                if recursive and type(v) == dict:
 180                    rval[k] = self.encode_all_ids(trans, v, recursive)
 181        return rval
 182
 183    # incoming param validation
 184    # should probably be in sep. serializer class/object _used_ by controller
 185    def validate_and_sanitize_basestring( self, key, val ):
 186        if not isinstance( val, basestring ):
 187            raise exceptions.RequestParameterInvalidException( '%s must be a string or unicode: %s'
 188                                                               %( key, str( type( val ) ) ) )
 189        return unicode( sanitize_html( val, 'utf-8', 'text/html' ), 'utf-8' )
 190
 191    def validate_and_sanitize_basestring_list( self, key, val ):
 192        try:
 193            assert isinstance( val, list )
 194            return [ unicode( sanitize_html( t, 'utf-8', 'text/html' ), 'utf-8' ) for t in val ]
 195        except ( AssertionError, TypeError ), err:
 196            raise exceptions.RequestParameterInvalidException( '%s must be a list of strings: %s'
 197                                                               %( key, str( type( val ) ) ) )
 198
 199    def validate_boolean( self, key, val ):
 200        if not isinstance( val, bool ):
 201            raise exceptions.RequestParameterInvalidException( '%s must be a boolean: %s'
 202                                                               %( key, str( type( val ) ) ) )
 203        return val
 204
 205    #TODO:
 206    #def validate_integer( self, key, val, min, max ):
 207    #def validate_float( self, key, val, min, max ):
 208    #def validate_number( self, key, val, min, max ):
 209    #def validate_genome_build( self, key, val ):
 210
 211Root = BaseController
 212
 213
 214class BaseUIController( BaseController ):
 215
 216    def get_object( self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ):
 217        try:
 218            return BaseController.get_object( self, trans, id, class_name,
 219                check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
 220
 221        except MessageException:
 222            raise       # handled in the caller
 223        except:
 224            log.exception( "Execption in get_object check for %s %s:" % ( class_name, str( id ) ) )
 225            raise Exception( 'Server error retrieving %s id ( %s ).' % ( class_name, str( id ) ) )
 226
 227
 228class BaseAPIController( BaseController ):
 229
 230    def get_object( self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ):
 231        try:
 232            return BaseController.get_object( self, trans, id, class_name,
 233                check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
 234
 235        except ItemDeletionException, e:
 236            raise HTTPBadRequest( detail="Invalid %s id ( %s ) specified: %s" % ( class_name, str( id ), str( e ) ) )
 237        except MessageException, e:
 238            raise HTTPBadRequest( detail=e.err_msg )
 239        except Exception, e:
 240            log.exception( "Execption in get_object check for %s %s: %s" % ( class_name, str( id ), str( e ) ) )
 241            raise HTTPInternalServerError( comment=str( e ) )
 242
 243    def validate_in_users_and_groups( self, trans, payload ):
 244        """
 245        For convenience, in_users and in_groups can be encoded IDs or emails/group names in the API.
 246        """
 247        def get_id( item, model_class, column ):
 248            try:
 249                return trans.security.decode_id( item )
 250            except:
 251                pass # maybe an email/group name
 252            # this will raise if the item is invalid
 253            return trans.sa_session.query( model_class ).filter( column == item ).first().id
 254        new_in_users = []
 255        new_in_groups = []
 256        invalid = []
 257        for item in util.listify( payload.get( 'in_users', [] ) ):
 258            try:
 259                new_in_users.append( get_id( item, trans.app.model.User, trans.app.model.User.table.c.email ) )
 260            except:
 261                invalid.append( item )
 262        for item in util.listify( payload.get( 'in_groups', [] ) ):
 263            try:
 264                new_in_groups.append( get_id( item, trans.app.model.Group, trans.app.model.Group.table.c.name ) )
 265            except:
 266                invalid.append( item )
 267        if invalid:
 268            msg = "The following value(s) for associated users and/or groups could not be parsed: %s." % ', '.join( invalid )
 269            msg += "  Valid values are email addresses of users, names of groups, or IDs of both."
 270            raise Exception( msg )
 271        payload['in_users'] = map( str, new_in_users )
 272        payload['in_groups'] = map( str, new_in_groups )
 273
 274    def not_implemented( self, trans, **kwd ):
 275        raise HTTPNotImplemented()
 276
 277
 278class Datatype( object ):
 279    """Used for storing in-memory list of datatypes currently in the datatypes registry."""
 280
 281    def __init__( self, extension, dtype, type_extension, mimetype, display_in_upload ):
 282        self.extension = extension
 283        self.dtype = dtype
 284        self.type_extension = type_extension
 285        self.mimetype = mimetype
 286        self.display_in_upload = display_in_upload
 287
 288#
 289# -- Mixins for working with Galaxy objects. --
 290#
 291
 292
 293class CreatesUsersMixin:
 294    """
 295    Mixin centralizing logic for user creation between web and API controller.
 296
 297    Web controller handles additional features such e-mail subscription, activation,
 298    user forms, etc.... API created users are much more vanilla for the time being.
 299    """
 300
 301    def create_user( self, trans, email, username, password ):
 302        user = trans.app.model.User( email=email )
 303        user.set_password_cleartext( password )
 304        user.username = username
 305        if trans.app.config.user_activation_on:
 306            user.active = False
 307        else:
 308            user.active = True  # Activation is off, every new user is active by default.
 309        trans.sa_session.add( user )
 310        trans.sa_session.flush()
 311        trans.app.security_agent.create_private_user_role( user )
 312        if trans.webapp.name == 'galaxy':
 313            # We set default user permissions, before we log in and set the default history permissions
 314            trans.app.security_agent.user_set_default_permissions( user,
 315                                                                   default_access_private=trans.app.config.new_user_dataset_access_role_default_private )
 316        return user
 317
 318
 319class CreatesApiKeysMixin:
 320    """
 321    Mixing centralizing logic for creating API keys for user objects.
 322    """
 323
 324    def create_api_key( self, trans, user ):
 325        guid = trans.app.security.get_new_guid()
 326        new_key = trans.app.model.APIKeys()
 327        new_key.user_id = user.id
 328        new_key.key = guid
 329        trans.sa_session.add( new_key )
 330        trans.sa_session.flush()
 331        return guid
 332
 333
 334class SharableItemSecurityMixin:
 335    """ Mixin for handling security for sharable items. """
 336
 337    def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
 338        """ Security checks for an item: checks if (a) user owns item or (b) item is accessible to user. """
 339        # all items are accessible to an admin
 340        if trans.user and trans.user_is_admin():
 341            return item
 342
 343        # Verify ownership: there is a current user and that user is the same as the item's
 344        if check_ownership:
 345            if not trans.user:
 346                raise ItemOwnershipException( "Must be logged in to manage Galaxy items", type='error' )
 347            if item.user != trans.user:
 348                raise ItemOwnershipException( "%s is not owned by the current user" % item.__class__.__name__, type='error' )
 349
 350        # Verify accessible:
 351        #   if it's part of a lib - can they access via security
 352        #   if it's something else (sharable) have they been added to the item's users_shared_with_dot_users
 353        if check_accessible:
 354            if type( item ) in ( trans.app.model.LibraryFolder, trans.app.model.LibraryDatasetDatasetAssociation, trans.app.model.LibraryDataset ):
 355                if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), item, trans.user ):
 356                    raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
 357            else:
 358                if ( item.user != trans.user ) and ( not item.importable ) and ( trans.user not in item.users_shared_with_dot_users ):
 359                    raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
 360        return item
 361
 362
 363class UsesHistoryMixin( SharableItemSecurityMixin ):
 364    """ Mixin for controllers that use History objects. """
 365
 366    def get_history( self, trans, id, check_ownership=True, check_accessible=False, deleted=None ):
 367        """
 368        Get a History from the database by id, verifying ownership.
 369        """
 370        history = self.get_object( trans, id, 'History',
 371            check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
 372        history = self.security_check( trans, history, check_ownership, check_accessible )
 373        return history
 374
 375    def get_user_histories( self, trans, user=None, include_deleted=False, only_deleted=False ):
 376        """
 377        Get all the histories for a given user (defaulting to `trans.user`)
 378        ordered by update time and filtered on whether they've been deleted.
 379        """
 380        # handle default and/or anonymous user (which still may not have a history yet)
 381        user = user or trans.user
 382        if not user:
 383            current_history = trans.get_history()
 384            return [ current_history ] if current_history else []
 385
 386        history_model = trans.model.History
 387        query = ( trans.sa_session.query( history_model )
 388            .filter( history_model.user == user )
 389            .order_by( desc( history_model.table.c.update_time ) ) )
 390        if only_deleted:
 391            query = query.filter( history_model.deleted == True )
 392        elif not include_deleted:
 393            query = query.filter( history_model.deleted == False )
 394
 395        return query.all()
 396
 397    def get_history_datasets( self, trans, history, show_deleted=False, show_hidden=False, show_purged=False ):
 398        """ Returns history's datasets. """
 399        query = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
 400            .filter( trans.model.HistoryDatasetAssociation.history == history ) \
 401            .options( eagerload( "children" ) ) \
 402            .join( "dataset" ) \
 403            .options( eagerload_all( "dataset.actions" ) ) \
 404            .order_by( trans.model.HistoryDatasetAssociation.hid )
 405        if not show_deleted:
 406            query = query.filter( trans.model.HistoryDatasetAssociation.deleted == False )
 407        if not show_purged:
 408            query = query.filter( trans.model.Dataset.purged == False )
 409        return query.all()
 410
 411    def get_hda_state_counts( self, trans, history, include_deleted=False, include_hidden=False ):
 412        """
 413        Returns a dictionary with state counts for history's HDAs. Key is a
 414        dataset state, value is the number of states in that count.
 415        """
 416        # Build query to get (state, count) pairs.
 417        cols_to_select = [ trans.app.model.Dataset.table.c.state, func.count( '*' ) ]
 418        from_obj = trans.app.model.HistoryDatasetAssociation.table.join( trans.app.model.Dataset.table )
 419
 420        conditions = [ trans.app.model.HistoryDatasetAssociation.table.c.history_id == history.id ]
 421        if not include_deleted:
 422            # Only count datasets that have not been deleted.
 423            conditions.append( trans.app.model.HistoryDatasetAssociation.table.c.deleted == False )
 424        if not include_hidden:
 425            # Only count datasets that are visible.
 426            conditions.append( trans.app.model.HistoryDatasetAssociation.table.c.visible == True )
 427
 428        group_by = trans.app.model.Dataset.table.c.state
 429        query = select( columns=cols_to_select,
 430                        from_obj=from_obj,
 431                        whereclause=and_( *conditions ),
 432                        group_by=group_by )
 433
 434        # Initialize count dict with all states.
 435        state_count_dict = {}
 436        for k, state in trans.app.model.Dataset.states.items():
 437            state_count_dict[ state ] = 0
 438
 439        # Process query results, adding to count dict.
 440        for row in trans.sa_session.execute( query ):
 441            state, count = row
 442            state_count_dict[ state ] = count
 443
 444        return state_count_dict
 445
 446    def get_hda_summary_dicts( self, trans, history ):
 447        """Returns a list of dictionaries containing summary information
 448        for each HDA in the given history.
 449        """
 450        hda_model = trans.model.HistoryDatasetAssociation
 451
 452        # get state, name, etc.
 453        columns = ( hda_model.name, hda_model.hid, hda_model.id, hda_model.deleted,
 454                    trans.model.Dataset.state )
 455        column_keys = [ "name", "hid", "id", "deleted", "state" ]
 456
 457        query = ( trans.sa_session.query( *columns )
 458                    .enable_eagerloads( False )
 459                    .filter( hda_model.history == history )
 460                    .join( trans.model.Dataset )
 461                    .order_by( hda_model.hid ) )
 462
 463        # build dictionaries, adding history id and encoding all ids
 464        hda_dicts = []
 465        for hda_tuple in query.all():
 466            hda_dict = dict( zip( column_keys, hda_tuple ) )
 467            hda_dict[ 'history_id' ] = history.id
 468            trans.security.encode_dict_ids( hda_dict )
 469            hda_dicts.append( hda_dict )
 470        return hda_dicts
 471
 472    def _get_hda_state_summaries( self, trans, hda_dict_list ):
 473        """Returns two dictionaries (in a tuple): state_counts and state_ids.
 474        Each is keyed according to the possible hda states:
 475            _counts contains a sum of the datasets in each state
 476            _ids contains a list of the encoded ids for each hda in that state
 477
 478        hda_dict_list should be a list of hda data in dictionary form.
 479        """
 480        #TODO: doc to rst
 481        # init counts, ids for each state
 482        state_counts = {}
 483        state_ids = {}
 484        for key, state in trans.app.model.Dataset.states.items():
 485            state_counts[ state ] = 0
 486            state_ids[ state ] = []
 487
 488        for hda_dict in hda_dict_list:
 489            item_state = hda_dict['state']
 490            if not hda_dict['deleted']:
 491                state_counts[ item_state ] = state_counts[ item_state ] + 1
 492            # needs to return all ids (no deleted check)
 493            state_ids[ item_state ].append( hda_dict['id'] )
 494
 495        return ( state_counts, state_ids )
 496
 497    def _get_history_state_from_hdas( self, trans, history, hda_state_counts ):
 498        """Returns the history state based on the states of the HDAs it contains.
 499        """
 500        states = trans.app.model.Dataset.states
 501
 502        num_hdas = sum( hda_state_counts.values() )
 503        # (default to ERROR)
 504        state = states.ERROR
 505        if num_hdas == 0:
 506            state = states.NEW
 507
 508        else:
 509            if( ( hda_state_counts[ states.RUNNING ] > 0 )
 510            or  ( hda_state_counts[ states.SETTING_METADATA ] > 0 )
 511            or  ( hda_state_counts[ states.UPLOAD ] > 0 ) ):
 512                state = states.RUNNING
 513
 514            elif hda_state_counts[ states.QUEUED ] > 0:
 515                state = states.QUEUED
 516
 517            elif( ( hda_state_counts[ states.ERROR ] > 0 )
 518            or    ( hda_state_counts[ states.FAILED_METADATA ] > 0 ) ):
 519                state = states.ERROR
 520
 521            elif hda_state_counts[ states.OK ] == num_hdas:
 522                state = states.OK
 523
 524        return state
 525
 526    def get_history_dict( self, trans, history, hda_dictionaries=None ):
 527        """Returns history data in the form of a dictionary.
 528        """
 529        history_dict = history.to_dict( view='element', value_mapper={ 'id':trans.security.encode_id })
 530        history_dict[ 'user_id' ] = None
 531        if history.user_id:
 532            history_dict[ 'user_id' ] = trans.security.encode_id( history.user_id )
 533
 534        history_dict[ 'nice_size' ] = history.get_disk_size( nice_size=True )
 535        history_dict[ 'annotation' ] = history.get_item_annotation_str( trans.sa_session, trans.user, history )
 536        if not history_dict[ 'annotation' ]:
 537            history_dict[ 'annotation' ] = ''
 538        #TODO: item_slug url
 539        if history_dict[ 'importable' ] and history_dict[ 'slug' ]:
 540            #TODO: this should be in History (or a superclass of)
 541            username_and_slug = ( '/' ).join(( 'u', history.user.username, 'h', history_dict[ 'slug' ] ))
 542            history_dict[ 'username_and_slug' ] = username_and_slug
 543
 544        hda_summaries = hda_dictionaries if hda_dictionaries else self.get_hda_summary_dicts( trans, history )
 545        #TODO remove the following in v2
 546        ( state_counts, state_ids ) = self._get_hda_state_summaries( trans, hda_summaries )
 547        history_dict[ 'state_details' ] = state_counts
 548        history_dict[ 'state_ids' ] = state_ids
 549        history_dict[ 'state' ] = self._get_history_state_from_hdas( trans, history, state_counts )
 550
 551        return history_dict
 552
 553    def set_history_from_dict( self, trans, history, new_data ):
 554        """
 555        Changes history data using the given dictionary new_data.
 556        """
 557        #precondition: ownership of the history has already been checked
 558        #precondition: user is not None (many of these attributes require a user to set properly)
 559        user = trans.get_user()
 560
 561        # published histories should always be importable
 562        if 'published' in new_data and new_data[ 'published' ] and not history.importable:
 563            new_data[ 'importable' ] = True
 564        # send what we can down into the model
 565        changed = history.set_from_dict( new_data )
 566
 567        # the rest (often involving the trans) - do here
 568        #TODO: the next two could be an aspect/mixin
 569        #TODO: also need a way to check whether they've changed - assume they have for now
 570        if 'annotation' in new_data:
 571            history.add_item_annotation( trans.sa_session, user, history, new_data[ 'annotation' ] )
 572            changed[ 'annotation' ] = new_data[ 'annotation' ]
 573
 574        if 'tags' in new_data:
 575            self.set_tags_from_list( trans, history, new_data[ 'tags' ], user=user )
 576            changed[ 'tags' ] = new_data[ 'tags' ]
 577
 578        #TODO: sharing with user/permissions?
 579
 580        if changed.keys():
 581            trans.sa_session.flush()
 582
 583            # create a slug if none exists (setting importable to false should not remove the slug)
 584            if 'importable' in changed and changed[ 'importable' ] and not history.slug:
 585                self._create_history_slug( trans, history )
 586
 587        return changed
 588
 589    def _create_history_slug( self, trans, history ):
 590        #TODO: mixins need to die a quick, horrible death
 591        #   (this is duplicate from SharableMixin which can't be added to UsesHistory without exposing various urls)
 592        cur_slug = history.slug
 593
 594        # Setup slug base.
 595        if cur_slug is None or cur_slug == "":
 596            # Item can have either a name or a title.
 597            item_name = history.name
 598            slug_base = util.ready_name_for_url( item_name.lower() )
 599        else:
 600            slug_base = cur_slug
 601
 602        # Using slug base, find a slug that is not taken. If slug is taken,
 603        # add integer to end.
 604        new_slug = slug_base
 605        count = 1
 606        while ( trans.sa_session.query( trans.app.model.History )
 607                    .filter_by( user=history.user, slug=new_slug, importable=True )
 608                    .count() != 0 ):
 609            # Slug taken; choose a new slug based on count. This approach can
 610            # handle numerous items with the same name gracefully.
 611            new_slug = '%s-%i' % ( slug_base, count )
 612            count += 1
 613
 614        # Set slug and return.
 615        trans.sa_session.add( history )
 616        history.slug = new_slug
 617        trans.sa_session.flush()
 618        return history.slug == cur_slug
 619
 620
 621class ExportsHistoryMixin:
 622
 623    def serve_ready_history_export( self, trans, jeha ):
 624        assert jeha.ready
 625        if jeha.compressed:
 626            trans.response.set_content_type( 'application/x-gzip' )
 627        else:
 628            trans.response.set_content_type( 'application/x-tar' )
 629        disposition = 'attachment; filename="%s"' % jeha.export_name
 630        trans.response.headers["Content-Disposition"] = disposition
 631        return open( trans.app.object_store.get_filename( jeha.dataset ) )
 632
 633    def queue_history_export( self, trans, history, gzip=True, include_hidden=False, include_deleted=False ):
 634        # Convert options to booleans.
 635        #
 636        if isinstance( gzip, basestring ):
 637            gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
 638        if isinstance( include_hidden, basestring ):
 639            include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] )
 640        if isinstance( include_deleted, basestring ):
 641            include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
 642
 643        # Run job to do export.
 644        history_exp_tool = trans.app.toolbox.get_tool( '__EXPORT_HISTORY__' )
 645        params = {
 646            'history_to_export': history,
 647            'compress': gzip,
 648            'include_hidden': include_hidden,
 649            'include_deleted': include_deleted
 650        }
 651
 652        history_exp_tool.execute( trans, incoming=params, history=history, set_output_hid=True )
 653
 654
 655class ImportsHistoryMixin:
 656
 657    def queue_history_import( self, trans, archive_type, archive_source ):
 658        # Run job to do import.
 659        history_imp_tool = trans.app.toolbox.get_tool( '__IMPORT_HISTORY__' )
 660        incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type }
 661        history_imp_tool.execute( trans, incoming=incoming )
 662
 663
 664class UsesHistoryDatasetAssociationMixin:
 665    """
 666    Mixin for controllers that use HistoryDatasetAssociation objects.
 667    """
 668
 669    def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False, check_state=True ):
 670        """
 671        Get an HDA object by id performing security checks using
 672        the current transaction.
 673        """
 674        try:
 675            dataset_id = trans.security.decode_id( dataset_id )
 676        except ( AttributeError, TypeError ):
 677            # DEPRECATION: We still support unencoded ids for backward compatibility
 678            try:
 679                dataset_id = int( dataset_id )
 680            except ValueError, v_err:
 681                raise HTTPBadRequest( "Invalid dataset id: %s." % str( dataset_id ) )
 682
 683        try:
 684            data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( dataset_id ) )
 685        except:
 686            raise HTTPRequestRangeNotSatisfiable( "Invalid dataset id: %s." % str( dataset_id ) )
 687
 688        if check_ownership:
 689            # Verify ownership.
 690            user = trans.get_user()
 691            if not user:
 692                error( "Must be logged in to manage Galaxy items" )
 693            if data.history.user != user:
 694                error( "%s is not owned by current user" % data.__class__.__name__ )
 695
 696        if check_accessible:
 697            current_user_roles = trans.get_current_user_roles()
 698
 699            if not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
 700                error( "You are not allowed to access this dataset" )
 701
 702            if check_state and data.state == trans.model.Dataset.states.UPLOAD:
 703                    return trans.show_error_message( "Please wait until this dataset finishes uploading "
 704                                                   + "before attempting to view it." )
 705        return data
 706
 707    def get_history_dataset_association( self, trans, history, dataset_id,
 708                                         check_ownership=True, check_accessible=False, check_state=False ):
 709        """
 710        Get a HistoryDatasetAssociation from the database by id, verifying ownership.
 711        """
 712        #TODO: duplicate of above? alias to above (or vis-versa)
 713        self.security_check( trans, history, check_ownership=check_ownership, check_accessible=check_accessible )
 714        hda = self.get_object( trans, dataset_id, 'HistoryDatasetAssociation',
 715                               check_ownership=False, check_accessible=False )
 716
 717        if check_accessible:
 718            if( not trans.user_is_admin()
 719            and not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), hda.dataset ) ):
 720                error( "You are not allowed to access this dataset" )
 721
 722            if check_state and hda.state == trans.model.Dataset.states.UPLOAD:
 723                error( "Please wait until this dataset finishes uploading before attempting to view it." )
 724        return hda
 725
 726    def get_history_dataset_association_from_ids( self, trans, id, history_id ):
 727        # Just to echo other TODOs, there seems to be some overlap here, still
 728        # this block appears multiple places (dataset show, history_contents
 729        # show, upcoming history job show) so I am consolodating it here.
 730        # Someone smarter than me should determine if there is some redundancy here.
 731
 732        # for anon users:
 733        #TODO: check login_required?
 734        #TODO: this isn't actually most_recently_used (as defined in histories)
 735        if( ( trans.user == None )
 736        and ( history_id == trans.security.encode_id( trans.history.id ) ) ):
 737            history = trans.history
 738            #TODO: dataset/hda by id (from history) OR check_ownership for anon user
 739            hda = self.get_history_dataset_association( trans, history, id,
 740                check_ownership=False, check_accessible=True )
 741        else:
 742            #TODO: do we really need the history?
 743            history = self.get_history( trans, history_id,
 744                check_ownership=False, check_accessible=True, deleted=False )
 745            hda = self.get_history_dataset_association( trans, history, id,
 746                check_ownership=False, check_accessible=True )
 747        return hda
 748
 749    def get_hda_list( self, trans, hda_ids, check_ownership=True, check_accessible=False, check_state=True ):
 750        """
 751        Returns one or more datasets in a list.
 752
 753        If a dataset is not found or is inaccessible to trans.user,
 754        add None in its place in the list.
 755        """
 756        # precondtion: dataset_ids is a list of encoded id strings
 757        hdas = []
 758        for id in hda_ids:
 759            hda = None
 760            try:
 761                hda = self.get_dataset( trans, id,
 762                    check_ownership=check_ownership,
 763                    check_accessible=check_accessible,
 764                    check_state=check_state )
 765            except Exception, exception:
 766                pass
 767            hdas.append( hda )
 768        return hdas
 769
 770    def get_data( self, dataset, preview=True ):
 771        """
 772        Gets a dataset's data.
 773        """
 774        # Get data from file, truncating if necessary.
 775        truncated = False
 776        dataset_data = None
 777        if os.path.exists( dataset.file_name ):
 778            if isinstance( dataset.datatype, Text ):
 779                max_peek_size = 1000000 # 1 MB
 780                if preview and os.stat( dataset.file_name ).st_size > max_peek_size:
 781                    dataset_data = open( dataset.file_name ).read(max_peek_size)
 782                    truncated = True
 783                else:
 784                    dataset_data = open( dataset.file_name ).read(max_peek_size)
 785                    truncated = False
 786            else:
 787                # For now, cannot get data from non-text datasets.
 788                dataset_data = None
 789        return truncated, dataset_data
 790
 791    def check_dataset_state( self, trans, dataset ):
 792        """
 793        Returns a message if dataset is not ready to be used in visualization.
 794        """
 795        if not dataset:
 796            return dataset.conversion_messages.NO_DATA
 797        if dataset.state == trans.app.model.Job.states.ERROR:
 798            return dataset.conversion_messages.ERROR
 799        if dataset.state != trans.app.model.Job.states.OK:
 800            return dataset.conversion_messages.PENDING
 801        return None
 802
 803    def get_hda_dict( self, trans, hda ):
 804        """Return full details of this HDA in dictionary form.
 805        """
 806        #precondition: the user's access to this hda has already been checked
 807        #TODO:?? postcondition: all ids are encoded (is this really what we want at this level?)
 808        expose_dataset_path = trans.user_is_admin() or trans.app.config.expose_dataset_path
 809        hda_dict = hda.to_dict( view='element', expose_dataset_path=expose_dataset_path )
 810        hda_dict[ 'api_type' ] = "file"
 811
 812        # Add additional attributes that depend on trans can hence must be added here rather than at the model level.
 813        can_access_hda = trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), hda.dataset )
 814        can_access_hda = ( trans.user_is_admin() or can_access_hda )
 815        if not can_access_hda:
 816            return self.get_inaccessible_hda_dict( trans, hda )
 817        hda_dict[ 'accessible' ] = True
 818
 819        #TODO: I'm unclear as to which access pattern is right
 820        hda_dict[ 'annotation' ] = hda.get_item_annotation_str( trans.sa_session, trans.user, hda )
 821        #annotation = getattr( hda, 'annotation', hda.get_item_annotation_str( trans.sa_session, trans.user, hda ) )
 822
 823        # ---- return here if deleted AND purged OR can't access
 824        purged = ( hda.purged or hda.dataset.purged )
 825        if ( hda.deleted and purged ):
 826            #TODO: to_dict should really go AFTER this - only summary data
 827            return trans.security.encode_dict_ids( hda_dict )
 828
 829        if expose_dataset_path:
 830            try:
 831                hda_dict[ 'file_name' ] = hda.file_name
 832            except objectstore.ObjectNotFound, onf:
 833                log.exception( 'objectstore.ObjectNotFound, HDA %s: %s', hda.id, onf )
 834
 835        hda_dict[ 'download_url' ] = url_for( 'history_contents_display',
 836            history_id = trans.security.encode_id( hda.history.id ),
 837            history_content_id = trans.security.encode_id( hda.id ) )
 838
 839        # indeces, assoc. metadata files, etc.
 840        meta_files = []
 841        for meta_type in hda.metadata.spec.keys():
 842            if isinstance( hda.metadata.spec[ meta_type ].param, FileParameter ):
 843                meta_files.append( dict( file_type=meta_type ) )
 844        if meta_files:
 845            hda_dict[ 'meta_files' ] = meta_files
 846
 847        # currently, the viz reg is optional - handle on/off
 848        if trans.app.visualizations_registry:
 849            hda_dict[ 'visualizations' ] = trans.app.visualizations_registry.get_visualizations( trans, hda )
 850        else:
 851            hda_dict[ 'visualizations' ] = hda.get_visualizations()
 852        #TODO: it may also be wiser to remove from here and add as API call that loads the visualizations
 853        #           when the visualizations button is clicked (instead of preloading/pre-checking)
 854
 855        # ---- return here if deleted
 856        if hda.deleted and not purged:
 857            return trans.security.encode_dict_ids( hda_dict )
 858
 859        return trans.security.encode_dict_ids( hda_dict )
 860
 861    def get_inaccessible_hda_dict( self, trans, hda ):
 862        return trans.security.encode_dict_ids({
 863            'id'        : hda.id,
 864            'history_id': hda.history.id,
 865            'hid'       : hda.hid,
 866            'name'      : hda.name,
 867            'state'     : hda.state,
 868            'deleted'   : hda.deleted,
 869            'visible'   : hda.visible,
 870            'accessible': False
 871        })
 872
 873    def get_hda_dict_with_error( self, trans, hda=None, history_id=None, id=None, error_msg='Error' ):
 874        return trans.security.encode_dict_ids({
 875            'id'        : hda.id if hda else id,
 876            'history_id': hda.history.id if hda else history_id,
 877            'hid'       : hda.hid if hda else '(unknown)',
 878            'name'      : hda.name if hda else '(unknown)',
 879            'error'     : error_msg,
 880            'state'     : trans.model.Dataset.states.NEW
 881        })
 882
 883    def get_display_apps( self, trans, hda ):
 884        display_apps = []
 885        for display_app in hda.get_display_applications( trans ).itervalues():
 886
 887            app_links = []
 888            for link_app in display_app.links.itervalues():
 889                app_links.append({
 890                    'target': link_app.url.get( 'target_frame', '_blank' ),
 891                    'href'  : link_app.get_display_url( hda, trans ),
 892                    'text'  : gettext( link_app.name )
 893                })
 894            if app_links:
 895                display_apps.append( dict( label=display_app.name, links=app_links ) )
 896
 897        return display_apps
 898
 899    def get_old_display_applications( self, trans, hda ):
 900        display_apps = []
 901        if not trans.app.config.enable_old_display_applications:
 902            return display_apps
 903
 904        for display_app in hda.datatype.get_display_types():
 905            target_frame, display_links = hda.datatype.get_display_links( hda,
 906                display_app, trans.app, trans.request.base )
 907
 908            if len( display_links ) > 0:
 909                display_label = hda.datatype.get_display_label( display_app )
 910
 911                app_links = []
 912                for display_name, display_link in display_links:
 913                    app_links.append({
 914                        'target': target_frame,
 915                        'href'  : display_link,
 916                        'text'  : gettext( display_name )
 917                    })
 918                if app_links:
 919                    display_apps.append( dict( label=display_label, links=app_links ) )
 920
 921        return display_apps
 922
 923    def set_hda_from_dict( self, trans, hda, new_data ):
 924        """
 925        Changes HDA data using the given dictionary new_data.
 926        """
 927        # precondition: access of the hda has already been checked
 928
 929        # send what we can down into the model
 930        changed = hda.set_from_dict( new_data )
 931        # the rest (often involving the trans) - do here
 932        if 'annotation' in new_data.keys() and trans.get_user():
 933            hda.add_item_annotation( trans.sa_session, trans.get_user(), hda, new_data[ 'annotation' ] )
 934            changed[ 'annotation' ] = new_data[ 'annotation' ]
 935        if 'tags' in new_data.keys() and trans.get_user():
 936            self.set_tags_from_list( trans, hda, new_data[ 'tags' ], user=trans.user )
 937        # sharing/permissions?
 938        # purged
 939
 940        if changed.keys():
 941            trans.sa_session.flush()
 942
 943        return changed
 944
 945    def get_hda_job( self, hda ):
 946        # Get dataset's job.
 947        job = None
 948        for job_output_assoc in hda.creating_job_associations:
 949            job = job_output_assoc.job
 950            break
 951        return job
 952
 953    def stop_hda_creating_job( self, hda ):
 954        """
 955        Stops an HDA's creating job if all the job's other outputs are deleted.
 956        """
 957        if hda.parent_id is None and len( hda.creating_job_associations ) > 0:
 958            # Mark associated job for deletion
 959            job = hda.creating_job_associations[0].job
 960            if job.state in [ self.app.model.Job.states.QUEUED, self.app.model.Job.states.RUNNING, self.app.model.Job.states.NEW ]:
 961                # Are *all* of the job's other output datasets deleted?
 962                if job.check_if_output_datasets_deleted():
 963                    job.mark_deleted( self.app.config.track_jobs_in_database )
 964                    self.app.job_manager.job_stop_queue.put( job.id )
 965
 966
 967class UsesLibraryMixin:
 968
 969    def get_library( self, trans, id, check_ownership=False, check_accessible=True ):
 970        l = self.get_object( trans, id, 'Library' )
 971        if check_accessible and not ( trans.user_is_admin() or trans.app.security_agent.can_access_library( trans.get_current_user_roles(), l ) ):
 972            error( "LibraryFolder is not accessible to the current user" )
 973        return l
 974
 975
 976class UsesLibraryMixinItems( SharableItemSecurityMixin ):
 977
 978    def get_library_folder( self, trans, id, check_ownership=False, check_accessible=True ):
 979        return self.get_object( trans, id, 'LibraryFolder',
 980                                check_ownership=False, check_accessible=check_accessible )
 981
 982    def get_library_dataset_dataset_association( self, trans, id, check_ownership=False, check_accessible=True ):
 983        return self.get_object( trans, id, 'LibraryDatasetDatasetAssociation',
 984                                check_ownership=False, check_accessible=check_accessible )
 985
 986    def get_library_dataset( self, trans, id, check_ownership=False, check_accessible=True ):
 987        return self.get_object( trans, id, 'LibraryDataset',
 988                                check_ownership=False, check_accessible=check_accessible )
 989
 990    #TODO: it makes no sense that I can get roles from a user but not user.is_admin()
 991    #def can_user_add_to_library_item( self, trans, user, item ):
 992    #    if not user: return False
 993    #    return (  ( user.is_admin() )
 994    #           or ( trans.app.security_agent.can_add_library_item( user.all_roles(), item ) ) )
 995
 996    def can_current_user_add_to_library_item( self, trans, item ):
 997        if not trans.user: return False
 998        return (  ( trans.user_is_admin() )
 999               or ( trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ) ) )
1000
1001    def copy_hda_to_library_folder( self, trans, hda, library_folder, roles=None, ldda_message='' ):
1002        #PRECONDITION: permissions for this action on hda and library_folder have been checked
1003        roles = roles or []
1004
1005        # this code was extracted from library_common.add_history_datasets_to_library
1006        #TODO: refactor library_common.add_history_datasets_to_library to use this for each hda to copy
1007
1008        # create the new ldda and apply the folder perms to it
1009        ldda = hda.to_library_dataset_dataset_association( trans, target_folder=library_folder,
1010                                                           roles=roles, ldda_message=ldda_message )
1011        self._apply_library_folder_permissions_to_ldda( trans, library_folder, ldda )
1012        self._apply_hda_permissions_to_ldda( trans, hda, ldda )
1013        #TODO:?? not really clear on how permissions are being traded here
1014        #   seems like hda -> ldda permissions should be set in to_library_dataset_dataset_association
1015        #   then they get reset in _apply_library_folder_permissions_to_ldda
1016        #   then finally, re-applies hda -> ldda for missing actions in _apply_hda_permissions_to_ldda??
1017        return ldda
1018
1019    def _apply_library_folder_permissions_to_ldda( self, trans, library_folder, ldda ):
1020        """
1021        Copy actions/roles from library folder to an ldda (and it's library_dataset).
1022        """
1023        #PRECONDITION: permissions for this action on library_folder and ldda have been checked
1024        security_agent = trans.app.security_agent
1025        security_agent.copy_library_permissions( trans, library_folder, ldda )
1026        security_agent.copy_library_permissions( trans, library_folder, ldda.library_dataset )
1027        return security_agent.get_permissions( ldda )
1028
1029    def _apply_hda_permissions_to_ldda( self, trans, hda, ldda ):
1030        """
1031        Copy actions/roles from hda to ldda.library_dataset (and then ldda) if ldda
1032        doesn't already have roles for the given action.
1033        """
1034        #PRECONDITION: permissions for this action on hda and ldda have been checked
1035        # Make sure to apply any defined dataset permissions, allowing the permissions inherited from the
1036        #   library_dataset to over-ride the same permissions on the dataset, if they exist.
1037        security_agent = trans.app.security_agent
1038        dataset_permissions_dict = security_agent.get_permissions( hda.dataset )
1039        library_dataset = ldda.library_dataset
1040        library_dataset_actions = [ permission.action for permission in library_dataset.actions ]
1041
1042        # except that: if DATASET_MANAGE_PERMISSIONS exists in the hda.dataset permissions,
1043        #   we need to instead apply those roles to the LIBRARY_MANAGE permission to the library dataset
1044        dataset_manage_permissions_action = security_agent.get_action( 'DATASET_MANAGE_PERMISSIONS' ).action
1045        library_manage_permissions_action = security_agent.get_action( 'LIBRARY_MANAGE' ).action
1046        #TODO: test this and remove if in loop below
1047        #TODO: doesn't handle action.action
1048        #if dataset_manage_permissions_action in dataset_permissions_dict:
1049        #    managing_roles = dataset_permissions_dict.pop( dataset_manage_permissions_action )
1050        #    dataset_permissions_dict[ library_manage_permissions_action ] = managing_roles
1051
1052        flush_needed = False
1053        for action, dataset_permissions_roles in dataset_permissions_dict.items():
1054            if isinstance( action, security.Action ):
1055                action = action.action
1056
1057            # alter : DATASET_MANAGE_PERMISSIONS -> LIBRARY_MANAGE (see above)
1058            if action == dataset_manage_permissions_action:
1059                action = library_manage_permissions_action
1060
1061            #TODO: generalize to util.update_dict_without_overwrite
1062            # add the hda actions & roles to the library_dataset
1063            #NOTE: only apply an hda perm if it's NOT set in the library_dataset perms (don't overwrite)
1064            if action not in library_dataset_actions:
1065                for role in dataset_permissions_roles:
1066                    ldps = trans.model.LibraryDatasetPermissions( action, library_dataset, role )
1067                    ldps = [ ldps ] if not isinstance( ldps, list ) else ldps
1068                    for ldp in ldps:
1069                        trans.sa_session.add( ldp )
1070                        flush_needed = True
1071
1072        if flush_needed:
1073            trans.sa_session.flush()
1074
1075        # finally, apply the new library_dataset to it's associated ldda (must be the same)
1076        security_agent.copy_library_permissions( trans, library_dataset, ldda )
1077        return security_agent.get_permissions( ldda )
1078
1079
1080class UsesVisualizationMixin( UsesHistoryDatasetAssociationMixin, UsesLibraryMixinItems ):
1081    """
1082    Mixin for controllers that use Visualization objects.
1083    """
1084
1085    viz_types = 

Large files files are truncated, but you can click here to view the full file