PageRenderTime 6ms CodeModel.GetById 4ms app.highlight 55ms RepoModel.GetById 1ms app.codeStats 1ms

/lib/galaxy/config.py

Relevant Search: With Applications for Solr and Elasticsearch

For more in depth reading about search, ranking and generally everything you could ever want to know about how lucene, elasticsearch or solr work under the hood I highly suggest this book. Easily one of the most interesting technical books I have read in a long time. If you are tasked with solving search relevance problems even if not in Solr or Elasticsearch it should be your first reference. Amazon Affiliate Link
https://bitbucket.org/cistrome/cistrome-harvard/
Python | 642 lines | 602 code | 16 blank | 24 comment | 10 complexity | fb7715049af349b884a267a2ca285e61 MD5 | raw file
  1"""
  2Universe configuration builder.
  3"""
  4# absolute_import needed for tool_shed package.
  5from __future__ import absolute_import
  6
  7import sys, os, tempfile, re
  8import logging, logging.config
  9import ConfigParser
 10from datetime import timedelta
 11from galaxy.web.formatting import expand_pretty_datetime_format
 12from galaxy.util import string_as_bool
 13from galaxy.util import listify
 14from galaxy.util import parse_xml
 15from galaxy import eggs
 16import pkg_resources
 17
 18log = logging.getLogger( __name__ )
 19
 20def resolve_path( path, root ):
 21    """If 'path' is relative make absolute by prepending 'root'"""
 22    if not( os.path.isabs( path ) ):
 23        path = os.path.join( root, path )
 24    return path
 25
 26class ConfigurationError( Exception ):
 27    pass
 28
 29class Configuration( object ):
 30    deprecated_options = ( 'database_file', )
 31    def __init__( self, **kwargs ):
 32        self.config_dict = kwargs
 33        self.root = kwargs.get( 'root_dir', '.' )
 34        # Collect the umask and primary gid from the environment
 35        self.umask = os.umask( 077 ) # get the current umask
 36        os.umask( self.umask ) # can't get w/o set, so set it back
 37        self.gid = os.getgid() # if running under newgrp(1) we'll need to fix the group of data created on the cluster
 38
 39        # Database related configuration
 40        self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root )
 41        self.database_connection = kwargs.get( "database_connection", False )
 42        self.database_engine_options = get_database_engine_options( kwargs )
 43        self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
 44        self.database_query_profiling_proxy = string_as_bool( kwargs.get( "database_query_profiling_proxy", "False" ) )
 45
 46        # Don't set this to true for production databases, but probably should
 47        # default to True for sqlite databases.
 48        self.database_auto_migrate = string_as_bool( kwargs.get( "database_auto_migrate", "False" ) )
 49
 50        # Install database related configuration (if different).
 51        self.install_database_connection = kwargs.get( "install_database_connection", None )
 52        self.install_database_engine_options = get_database_engine_options( kwargs, model_prefix="install_" )
 53
 54        # Where dataset files are stored
 55        self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
 56        self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
 57        tempfile.tempdir = self.new_file_path
 58        self.openid_consumer_cache_path = resolve_path( kwargs.get( "openid_consumer_cache_path", "database/openid_consumer_cache" ), self.root )
 59        self.cookie_path = kwargs.get( "cookie_path", "/" )
 60        self.genome_data_path = kwargs.get( "genome_data_path", "tool-data/genome" )
 61        self.rsync_url = kwargs.get( "rsync_url", "rsync://datacache.galaxyproject.org/indexes" )
 62        # Galaxy OpenID settings
 63        self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
 64        self.openid_config = kwargs.get( 'openid_config_file', 'openid_conf.xml' )
 65        self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
 66        self.tool_sheds_config = kwargs.get( 'tool_sheds_config_file', 'tool_sheds_conf.xml' )
 67        self.enable_unique_workflow_defaults = string_as_bool( kwargs.get( 'enable_unique_workflow_defaults', False ) )
 68        self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
 69        self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
 70        self.len_file_path = resolve_path( kwargs.get( "len_file_path", os.path.join( self.tool_data_path, 'shared','ucsc','chrom') ), self.root )
 71        self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
 72        # The value of migrated_tools_config is the file reserved for containing only those tools that have been eliminated from the distribution
 73        # and moved to the tool shed.
 74        self.migrated_tools_config = resolve_path( kwargs.get( 'migrated_tools_config', 'migrated_tools_conf.xml' ), self.root )
 75        if 'tool_config_file' in kwargs:
 76            tcf = kwargs[ 'tool_config_file' ]
 77        elif 'tool_config_files' in kwargs:
 78            tcf = kwargs[ 'tool_config_files' ]
 79        else:
 80            tcf = 'tool_conf.xml,shed_tool_conf.xml'
 81        self.tool_filters = listify( kwargs.get( "tool_filters", [] ), do_strip=True )
 82        self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ), do_strip=True )
 83        self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ), do_strip=True )
 84
 85        self.user_tool_filters = listify( kwargs.get( "user_tool_filters", [] ), do_strip=True )
 86        self.user_label_filters = listify( kwargs.get( "user_tool_label_filters", [] ), do_strip=True )
 87        self.user_section_filters = listify( kwargs.get( "user_tool_section_filters", [] ), do_strip=True )
 88
 89        self.tool_configs = [ resolve_path( p, self.root ) for p in listify( tcf ) ]
 90        self.shed_tool_data_path = kwargs.get( "shed_tool_data_path", None )
 91        if self.shed_tool_data_path:
 92            self.shed_tool_data_path = resolve_path( self.shed_tool_data_path, self.root )
 93        else:
 94            self.shed_tool_data_path = self.tool_data_path
 95        self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
 96        self.shed_tool_data_table_config = resolve_path( kwargs.get( 'shed_tool_data_table_config', 'shed_tool_data_table_conf.xml' ), self.root )
 97        self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
 98        self.manage_dependency_relationships = string_as_bool( kwargs.get( 'manage_dependency_relationships', False ) )
 99        self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
100        self.hours_between_check = kwargs.get( 'hours_between_check', 12 )
101        try:
102            if isinstance( self.hours_between_check, int ):
103                if self.hours_between_check < 1 or self.hours_between_check > 24:
104                    self.hours_between_check = 12
105            elif isinstance( self.hours_between_check, float ):
106                # If we're running functional tests, the minimum hours between check should be reduced to 0.001, or 3.6 seconds.
107                if self.running_functional_tests:
108                    if self.hours_between_check < 0.001 or self.hours_between_check > 24.0:
109                        self.hours_between_check = 12.0
110                else:
111                    if self.hours_between_check < 1.0 or self.hours_between_check > 24.0:
112                        self.hours_between_check = 12.0
113            else:
114                self.hours_between_check = 12
115        except:
116            self.hours_between_check = 12
117        self.update_integrated_tool_panel = kwargs.get( "update_integrated_tool_panel", True )
118        self.enable_data_manager_user_view = string_as_bool( kwargs.get( "enable_data_manager_user_view", "False" ) )
119        self.data_manager_config_file = resolve_path( kwargs.get('data_manager_config_file', 'data_manager_conf.xml' ), self.root )
120        self.shed_data_manager_config_file = resolve_path( kwargs.get('shed_data_manager_config_file', 'shed_data_manager_conf.xml' ), self.root )
121        self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_path',  self.tool_data_path )
122        self.tool_secret = kwargs.get( "tool_secret", "" )
123        self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
124        self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) )
125        self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
126        self.normalize_remote_user_email = string_as_bool( kwargs.get( "normalize_remote_user_email", "False" ) )
127        self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
128        self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
129        self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
130        self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
131        self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
132        self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
133        self.allow_user_dataset_purge = string_as_bool( kwargs.get( "allow_user_dataset_purge", "False" ) )
134        self.allow_user_impersonation = string_as_bool( kwargs.get( "allow_user_impersonation", "False" ) )
135        self.new_user_dataset_access_role_default_private = string_as_bool( kwargs.get( "new_user_dataset_access_role_default_private", "False" ) )
136        self.collect_outputs_from = [ x.strip() for x in kwargs.get( 'collect_outputs_from', 'new_file_path,job_working_directory' ).lower().split(',') ]
137        self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
138        self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates" ), self.root )
139        self.dependency_resolvers_config_file = resolve_path( kwargs.get( 'dependency_resolvers_config_file', 'dependency_resolvers_conf.xml' ), self.root )
140        self.job_config_file = resolve_path( kwargs.get( 'job_config_file', 'job_conf.xml' ), self.root )
141        self.local_job_queue_workers = int( kwargs.get( "local_job_queue_workers", "5" ) )
142        self.cluster_job_queue_workers = int( kwargs.get( "cluster_job_queue_workers", "3" ) )
143        self.job_queue_cleanup_interval = int( kwargs.get("job_queue_cleanup_interval", "5") )
144        self.cluster_files_directory = os.path.abspath( kwargs.get( "cluster_files_directory", "database/pbs" ) )
145        self.job_working_directory = resolve_path( kwargs.get( "job_working_directory", "database/job_working_directory" ), self.root )
146        self.cleanup_job = kwargs.get( "cleanup_job", "always" )
147        self.outputs_to_working_directory = string_as_bool( kwargs.get( 'outputs_to_working_directory', False ) )
148        self.output_size_limit = int( kwargs.get( 'output_size_limit', 0 ) )
149        self.retry_job_output_collection = int( kwargs.get( 'retry_job_output_collection', 0 ) )
150        self.job_walltime = kwargs.get( 'job_walltime', None )
151        self.job_walltime_delta = None
152        if self.job_walltime is not None:
153            h, m, s = [ int( v ) for v in self.job_walltime.split( ':' ) ]
154            self.job_walltime_delta = timedelta( 0, s, 0, 0, m, h )
155        self.admin_users = kwargs.get( "admin_users", "" )
156        self.reset_password_length = int( kwargs.get('reset_password_length', '15') )
157        self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join@bx.psu.edu")
158        self.error_email_to = kwargs.get( 'error_email_to', None )
159        self.activation_email = kwargs.get( 'activation_email', None )
160        self.user_activation_on = string_as_bool( kwargs.get( 'user_activation_on', False ) )
161        self.activation_grace_period = kwargs.get( 'activation_grace_period', None )
162        self.inactivity_box_content = kwargs.get( 'inactivity_box_content', None )
163        self.terms_url = kwargs.get( 'terms_url', None )
164        self.instance_resource_url = kwargs.get( 'instance_resource_url', None )
165        self.registration_warning_message = kwargs.get( 'registration_warning_message', None )
166        #  Get the disposable email domains blacklist file and its contents
167        self.blacklist_location = kwargs.get( 'blacklist_file', None )
168        self.blacklist_content = None
169        if self.blacklist_location is not None:
170            self.blacklist_file = resolve_path( kwargs.get( 'blacklist_file', None ), self.root )
171            try:
172                with open( self.blacklist_file ) as blacklist:
173                    self.blacklist_content = [ line.rstrip() for line in blacklist.readlines() ]
174            except IOError:
175                    print ( "CONFIGURATION ERROR: Can't open supplied blacklist file from path: " + str( self.blacklist_file ) )
176        self.smtp_server = kwargs.get( 'smtp_server', None )
177        self.smtp_username = kwargs.get( 'smtp_username', None )
178        self.smtp_password = kwargs.get( 'smtp_password', None )
179        self.smtp_ssl = kwargs.get( 'smtp_ssl', None )
180        self.track_jobs_in_database = kwargs.get( 'track_jobs_in_database', 'None' )
181        self.start_job_runners = listify(kwargs.get( 'start_job_runners', '' ))
182        self.expose_dataset_path = string_as_bool( kwargs.get( 'expose_dataset_path', 'False' ) )
183        # External Service types used in sample tracking
184        self.external_service_type_config_file = resolve_path( kwargs.get( 'external_service_type_config_file', 'external_service_types_conf.xml' ), self.root )
185        self.external_service_type_path = resolve_path( kwargs.get( 'external_service_type_path', 'external_service_types' ), self.root )
186        # Tasked job runner.
187        self.use_tasked_jobs = string_as_bool( kwargs.get( 'use_tasked_jobs', False ) )
188        self.local_task_queue_workers = int(kwargs.get("local_task_queue_workers", 2))
189        # The transfer manager and deferred job queue
190        self.enable_beta_job_managers = string_as_bool( kwargs.get( 'enable_beta_job_managers', 'False' ) )
191        # Per-user Job concurrency limitations
192        self.cache_user_job_count = string_as_bool( kwargs.get( 'cache_user_job_count', False ) )
193        self.user_job_limit = int( kwargs.get( 'user_job_limit', 0 ) )
194        self.registered_user_job_limit = int( kwargs.get( 'registered_user_job_limit', self.user_job_limit ) )
195        self.anonymous_user_job_limit = int( kwargs.get( 'anonymous_user_job_limit', self.user_job_limit ) )
196        self.default_cluster_job_runner = kwargs.get( 'default_cluster_job_runner', 'local:///' )
197        self.pbs_application_server = kwargs.get('pbs_application_server', "" )
198        self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "" )
199        self.pbs_dataset_path = kwargs.get('pbs_dataset_path', "" )
200        self.pbs_stage_path = kwargs.get('pbs_stage_path', "" )
201        self.drmaa_external_runjob_script = kwargs.get('drmaa_external_runjob_script', None )
202        self.drmaa_external_killjob_script = kwargs.get('drmaa_external_killjob_script', None)
203        self.external_chown_script = kwargs.get('external_chown_script', None)
204        self.environment_setup_file = kwargs.get( 'environment_setup_file', None )
205        self.use_heartbeat = string_as_bool( kwargs.get( 'use_heartbeat', 'False' ) )
206        self.use_memdump = string_as_bool( kwargs.get( 'use_memdump', 'False' ) )
207        self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
208        self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) )
209        self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
210        self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
211        self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
212        self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
213        self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",")
214        self.brand = kwargs.get( 'brand', None )
215        self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
216        # Configuration for the message box directly below the masthead.
217        self.message_box_visible = kwargs.get( 'message_box_visible', False )
218        self.message_box_content = kwargs.get( 'message_box_content', None )
219        self.message_box_class = kwargs.get( 'message_box_class', 'info' )
220        self.support_url = kwargs.get( 'support_url', 'http://wiki.g2.bx.psu.edu/Support' )
221        self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.galaxyproject.org/' )
222        self.blog_url = kwargs.get( 'blog_url', None )
223        self.screencasts_url = kwargs.get( 'screencasts_url', None )
224        self.library_import_dir = kwargs.get( 'library_import_dir', None )
225        self.user_library_import_dir = kwargs.get( 'user_library_import_dir', None )
226        # Searching data libraries
227        self.enable_lucene_library_search = string_as_bool( kwargs.get( 'enable_lucene_library_search', False ) )
228        self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
229        self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
230        self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
231        self.ftp_upload_dir_identifier = kwargs.get( 'ftp_upload_dir_identifier', 'email' )  # attribute on user - email, username, id, etc...
232        self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
233        self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
234        self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
235
236        # Cistrome Static libraries 
237        # added by TL 
238        self.cistrome_static_library_path = resolve_path( kwargs.get( "cistrome_static_library_path", "tool-data/" ), self.root ) 
239        # end
240
241        # Location for dependencies
242        if 'tool_dependency_dir' in kwargs:
243            self.tool_dependency_dir = resolve_path( kwargs.get( "tool_dependency_dir" ), self.root )
244            # Setting the following flag to true will ultimately cause tool dependencies
245            # to be located in the shell environment and used by the job that is executing
246            # the tool.
247            self.use_tool_dependencies = True
248        else:
249            self.tool_dependency_dir = None
250            self.use_tool_dependencies = False
251        # Configuration options for taking advantage of nginx features
252        self.upstream_gzip = string_as_bool( kwargs.get( 'upstream_gzip', False ) )
253        self.apache_xsendfile = string_as_bool( kwargs.get( 'apache_xsendfile', False ) )
254        self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
255        self.nginx_x_archive_files_base = kwargs.get( 'nginx_x_archive_files_base', False )
256        self.nginx_upload_store = kwargs.get( 'nginx_upload_store', False )
257        self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
258        if self.nginx_upload_store:
259            self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
260        self.object_store = kwargs.get( 'object_store', 'disk' )
261        self.object_store_check_old_style = string_as_bool( kwargs.get( 'object_store_check_old_style', False ) )
262        self.object_store_cache_path = resolve_path( kwargs.get( "object_store_cache_path", "database/object_store_cache" ), self.root )
263        # Handle AWS-specific config options for backward compatibility
264        if kwargs.get( 'aws_access_key', None) is not None:
265            self.os_access_key= kwargs.get( 'aws_access_key', None )
266            self.os_secret_key= kwargs.get( 'aws_secret_key', None )
267            self.os_bucket_name= kwargs.get( 's3_bucket', None )
268            self.os_use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
269        else:
270            self.os_access_key = kwargs.get( 'os_access_key', None )
271            self.os_secret_key = kwargs.get( 'os_secret_key', None )
272            self.os_bucket_name = kwargs.get( 'os_bucket_name', None )
273            self.os_use_reduced_redundancy = kwargs.get( 'os_use_reduced_redundancy', False )
274        self.os_host = kwargs.get( 'os_host', None )
275        self.os_port = kwargs.get( 'os_port', None )
276        self.os_is_secure = string_as_bool( kwargs.get( 'os_is_secure', True ) )
277        self.os_conn_path = kwargs.get( 'os_conn_path', '/' )
278        self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
279        self.object_store_config_file = kwargs.get( 'object_store_config_file', None )
280        if self.object_store_config_file is not None:
281            self.object_store_config_file = resolve_path( self.object_store_config_file, self.root )
282        self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
283        if self.distributed_object_store_config_file is not None:
284            self.distributed_object_store_config_file = resolve_path( self.distributed_object_store_config_file, self.root )
285        self.irods_root_collection_path = kwargs.get( 'irods_root_collection_path', None )
286        self.irods_default_resource = kwargs.get( 'irods_default_resource', None )
287        # Parse global_conf and save the parser
288        global_conf = kwargs.get( 'global_conf', None )
289        global_conf_parser = ConfigParser.ConfigParser()
290        self.config_file = None
291        self.global_conf_parser = global_conf_parser
292        if global_conf and "__file__" in global_conf:
293            self.config_file = global_conf['__file__']
294            global_conf_parser.read(global_conf['__file__'])
295        # Heartbeat log file name override
296        if global_conf is not None:
297            self.heartbeat_log = global_conf.get( 'heartbeat_log', 'heartbeat.log' )
298        # Determine which 'server:' this is
299        self.server_name = 'main'
300        for arg in sys.argv:
301            # Crummy, but PasteScript does not give you a way to determine this
302            if arg.lower().startswith('--server-name='):
303                self.server_name = arg.split('=', 1)[-1]
304        # Store all configured server names
305        self.server_names = []
306        for section in global_conf_parser.sections():
307            if section.startswith('server:'):
308                self.server_names.append(section.replace('server:', '', 1))
309        # Store advanced job management config
310        self.job_manager = kwargs.get('job_manager', self.server_name).strip()
311        self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
312        self.default_job_handlers = [ x.strip() for x in kwargs.get('default_job_handlers', ','.join( self.job_handlers ) ).split(',') ]
313        # Use database for job running IPC unless this is a standalone server or explicitly set in the config
314        if self.track_jobs_in_database == 'None':
315            self.track_jobs_in_database = False
316            if len(self.server_names) > 1:
317                self.track_jobs_in_database = True
318        else:
319            self.track_jobs_in_database = string_as_bool( self.track_jobs_in_database )
320        # Store per-tool runner configs
321        self.tool_handlers = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_handlers', 'name' )
322        self.tool_runners = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_runners', 'url' )
323        self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
324        # Cloud configuration options
325        self.enable_cloud_launch = string_as_bool( kwargs.get( 'enable_cloud_launch', False ) )
326        self.cloudlaunch_default_ami = kwargs.get( 'cloudlaunch_default_ami', 'ami-a7dbf6ce' )
327        # Galaxy messaging (AMQP) configuration options
328        self.amqp = {}
329        try:
330            amqp_config = global_conf_parser.items("galaxy_amqp")
331        except ConfigParser.NoSectionError:
332            amqp_config = {}
333        for k, v in amqp_config:
334            self.amqp[k] = v
335        self.biostar_url = kwargs.get( 'biostar_url', None )
336        self.biostar_key_name = kwargs.get( 'biostar_key_name', None )
337        self.biostar_key = kwargs.get( 'biostar_key', None )
338        self.biostar_enable_bug_reports = string_as_bool( kwargs.get( 'biostar_enable_bug_reports', True ) )
339        self.biostar_never_authenticate = string_as_bool( kwargs.get( 'biostar_never_authenticate', False ) )
340        self.pretty_datetime_format = expand_pretty_datetime_format( kwargs.get( 'pretty_datetime_format', '$locale (UTC)' ) )
341        self.master_api_key = kwargs.get( 'master_api_key', None )
342        if self.master_api_key == "changethis":  # default in sample config file
343            raise Exception("Insecure configuration, please change master_api_key to something other than default (changethis)")
344
345        # Experimental: This will not be enabled by default and will hide
346        # nonproduction code.
347        # The api_folders refers to whether the API exposes the /folders section.
348        self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
349        # This is for testing new library browsing capabilities.
350        self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
351        # Error logging with sentry
352        self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
353        # Logging with fluentd
354        self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
355        self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
356        self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
357        # visualization plugin framework
358        self.visualization_plugins_directory = kwargs.get( 'visualization_plugins_directory', None )
359
360    @property
361    def sentry_dsn_public( self ):
362        """
363        Sentry URL with private key removed for use in client side scripts,
364        sentry server will need to be configured to accept events
365        """
366        if self.sentry_dsn:
367            return re.sub( r"^([^:/?#]+:)?//(\w+):(\w+)", r"\1//\2", self.sentry_dsn )
368        else:
369            return None
370
371    def __read_tool_job_config( self, global_conf_parser, section, key ):
372        try:
373            tool_runners_config = global_conf_parser.items( section )
374
375            # Process config to group multiple configs for the same tool.
376            rval = {}
377            for entry in tool_runners_config:
378                tool_config, val = entry
379                tool = None
380                runner_dict = {}
381                if tool_config.find("[") != -1:
382                    # Found tool with additional params; put params in dict.
383                    tool, params = tool_config[:-1].split( "[" )
384                    param_dict = {}
385                    for param in params.split( "," ):
386                        name, value = param.split( "@" )
387                        param_dict[ name ] = value
388                    runner_dict[ 'params' ] = param_dict
389                else:
390                    tool = tool_config
391
392                # Add runner URL.
393                runner_dict[ key ] = val
394
395                # Create tool entry if necessary.
396                if tool not in rval:
397                    rval[ tool ] = []
398
399                # Add entry to runners.
400                rval[ tool ].append( runner_dict )
401
402            return rval
403        except ConfigParser.NoSectionError:
404            return {}
405    def get( self, key, default ):
406        return self.config_dict.get( key, default )
407    def get_bool( self, key, default ):
408        if key in self.config_dict:
409            return string_as_bool( self.config_dict[key] )
410        else:
411            return default
412    def check( self ):
413        paths_to_check = [ self.root, self.tool_path, self.tool_data_path, self.template_path ]
414        # Check that required directories exist
415        for path in paths_to_check:
416            if path not in [ None, False ] and not os.path.isdir( path ):
417                try:
418                    os.makedirs( path )
419                except Exception, e:
420                    raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
421        # Create the directories that it makes sense to create
422        for path in self.file_path, \
423                    self.new_file_path, \
424                    self.job_working_directory, \
425                    self.cluster_files_directory, \
426                    self.template_cache, \
427                    self.ftp_upload_dir, \
428                    self.library_import_dir, \
429                    self.user_library_import_dir, \
430                    self.nginx_upload_store, \
431                    './static/genetrack/plots', \
432                    self.whoosh_index_dir, \
433                    self.object_store_cache_path, \
434                    os.path.join( self.tool_data_path, 'shared', 'jars' ):
435            if path not in [ None, False ] and not os.path.isdir( path ):
436                try:
437                    os.makedirs( path )
438                except Exception, e:
439                    raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
440        # Check that required files exist
441        tool_configs = self.tool_configs
442        if self.migrated_tools_config not in tool_configs:
443            tool_configs.append( self.migrated_tools_config )
444        for path in tool_configs:
445            if not os.path.exists( path ):
446                raise ConfigurationError("File not found: %s" % path )
447        if not os.path.isfile( self.datatypes_config ):
448            raise ConfigurationError("File not found: %s" % self.datatypes_config )
449        # Check for deprecated options.
450        for key in self.config_dict.keys():
451            if key in self.deprecated_options:
452                log.warning( "Config option '%s' is deprecated and will be removed in a future release.  Please consult the latest version of the sample configuration file." % key )
453
454    def is_admin_user( self,user ):
455        """
456        Determine if the provided user is listed in `admin_users`.
457
458        NOTE: This is temporary, admin users will likely be specified in the
459              database in the future.
460        """
461        admin_users = [ x.strip() for x in self.get( "admin_users", "" ).split( "," ) ]
462        return ( user is not None and user.email in admin_users )
463
464def get_database_engine_options( kwargs, model_prefix='' ):
465    """
466    Allow options for the SQLAlchemy database engine to be passed by using
467    the prefix "database_engine_option".
468    """
469    conversions =  {
470        'convert_unicode': string_as_bool,
471        'pool_timeout': int,
472        'echo': string_as_bool,
473        'echo_pool': string_as_bool,
474        'pool_recycle': int,
475        'pool_size': int,
476        'max_overflow': int,
477        'pool_threadlocal': string_as_bool,
478        'server_side_cursors': string_as_bool
479    }
480    prefix = "%sdatabase_engine_option_" % model_prefix
481    prefix_len = len( prefix )
482    rval = {}
483    for key, value in kwargs.iteritems():
484        if key.startswith( prefix ):
485            key = key[prefix_len:]
486            if key in conversions:
487                value = conversions[key](value)
488            rval[ key  ] = value
489    return rval
490
491
492def configure_logging( config ):
493    """
494    Allow some basic logging configuration to be read from ini file.
495    """
496    # Get root logger
497    root = logging.getLogger()
498    # PasteScript will have already configured the logger if the
499    # 'loggers' section was found in the config file, otherwise we do
500    # some simple setup using the 'log_*' values from the config.
501    if not config.global_conf_parser.has_section( "loggers" ):
502        format = config.get( "log_format", "%(name)s %(levelname)s %(asctime)s %(message)s" )
503        level = logging._levelNames[ config.get( "log_level", "DEBUG" ) ]
504        destination = config.get( "log_destination", "stdout" )
505        log.info( "Logging at '%s' level to '%s'" % ( level, destination ) )
506        # Set level
507        root.setLevel( level )
508        # Turn down paste httpserver logging
509        if level <= logging.DEBUG:
510            logging.getLogger( "paste.httpserver.ThreadPool" ).setLevel( logging.WARN )
511        # Remove old handlers
512        for h in root.handlers[:]:
513            root.removeHandler(h)
514        # Create handler
515        if destination == "stdout":
516            handler = logging.StreamHandler( sys.stdout )
517        else:
518            handler = logging.FileHandler( destination )
519        # Create formatter
520        formatter = logging.Formatter( format )
521        # Hook everything up
522        handler.setFormatter( formatter )
523        root.addHandler( handler )
524    # If sentry is configured, also log to it
525    if config.sentry_dsn:
526        pkg_resources.require( "raven" )
527        from raven.handlers.logging import SentryHandler
528        sentry_handler = SentryHandler( config.sentry_dsn )
529        sentry_handler.setLevel( logging.WARN )
530        root.addHandler( sentry_handler )
531
532
533class ConfiguresGalaxyMixin:
534    """ Shared code for configuring Galaxy-like app objects.
535    """
536
537    def _configure_toolbox( self ):
538        # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
539        tool_configs = self.config.tool_configs
540        if self.config.migrated_tools_config not in tool_configs:
541            tool_configs.append( self.config.migrated_tools_config )
542        from galaxy import tools
543        self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
544        # Search support for tools
545        import galaxy.tools.search
546        self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
547
548    def _configure_tool_data_tables( self, from_shed_config ):
549        from galaxy.tools.data import ToolDataTableManager
550
551        # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
552        self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
553                                                      config_filename=self.config.tool_data_table_config_path )
554        # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
555        self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
556                                                     tool_data_path=self.tool_data_tables.tool_data_path,
557                                                     from_shed_config=from_shed_config )
558
559    def _configure_datatypes_registry( self, installed_repository_manager=None ):
560        from galaxy.datatypes import registry
561        # Create an empty datatypes registry.
562        self.datatypes_registry = registry.Registry()
563        if installed_repository_manager:
564            # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories.  We
565            # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
566            # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
567            # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
568            # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence.  If there is a conflict
569            # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
570            installed_repository_manager.load_proprietary_datatypes()
571        # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
572        self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
573
574    def _configure_object_store( self, **kwds ):
575        from galaxy.objectstore import build_object_store_from_config
576        self.object_store = build_object_store_from_config( self.config, **kwds )
577
578    def _configure_security( self ):
579        from galaxy.web import security
580        self.security = security.SecurityHelper( id_secret=self.config.id_secret )
581
582    def _configure_tool_shed_registry( self ):
583        import tool_shed.tool_shed_registry
584
585        # Set up the tool sheds registry
586        if os.path.isfile( self.config.tool_sheds_config ):
587            self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
588        else:
589            self.tool_shed_registry = None
590
591    def _configure_models( self, check_migrate_databases=False, check_migrate_tools=False, config_file=None ):
592        """
593        Preconditions: object_store must be set on self.
594        """
595        if self.config.database_connection:
596            db_url = self.config.database_connection
597        else:
598            db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
599        install_db_url = self.config.install_database_connection
600        # TODO: Consider more aggressive check here that this is not the same
601        # database file under the hood.
602        combined_install_database = not( install_db_url and install_db_url != db_url )
603        install_db_url = install_db_url or db_url
604
605        if check_migrate_databases:
606            # Initialize database / check for appropriate schema version.  # If this
607            # is a new installation, we'll restrict the tool migration messaging.
608            from galaxy.model.migrate.check import create_or_verify_database
609            create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self )
610            if not combined_install_database:
611                from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database
612                tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self )
613
614        if check_migrate_tools:
615            # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
616            from tool_shed.galaxy_install.migrate.check import verify_tools
617            if combined_install_database:
618                install_database_options = self.config.database_engine_options
619            else:
620                install_database_options = self.config.install_database_engine_options
621            verify_tools( self, install_db_url, config_file, install_database_options )
622
623        from galaxy.model import mapping
624        self.model = mapping.init( self.config.file_path,
625                                   db_url,
626                                   self.config.database_engine_options,
627                                   map_install_models=combined_install_database,
628                                   database_query_profiling_proxy=self.config.database_query_profiling_proxy,
629                                   object_store=self.object_store,
630                                   trace_logger=getattr(self, "trace_logger", None),
631                                   use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
632
633        if combined_install_database:
634            log.info("Install database targetting Galaxy's database configuration.")
635            self.install_model = self.model
636        else:
637            from galaxy.model.tool_shed_install import mapping as install_mapping
638            install_db_url = self.config.install_database_connection
639            log.info("Install database using its own connection %s" % install_db_url)
640            install_db_engine_options = self.config.install_database_engine_options
641            self.install_model = install_mapping.init( install_db_url,
642                                                       install_db_engine_options )