Skip to content

Commit

Permalink
Merge branch 'dev' into ui_pref_routing
Browse files Browse the repository at this point in the history
  • Loading branch information
guerler committed Mar 1, 2017
2 parents 8a8603b + 0b4e52c commit cc6fa6a
Show file tree
Hide file tree
Showing 20 changed files with 360 additions and 77 deletions.
1 change: 1 addition & 0 deletions .ci/flake8_blacklist.txt
Expand Up @@ -5,3 +5,4 @@ database/
doc/source/conf.py
eggs/
lib/galaxy/util/jstree.py
lib/galaxy/web/proxy/js/node_modules/
12 changes: 0 additions & 12 deletions client/galaxy/scripts/mvc/grid/grid-view.js
Expand Up @@ -155,18 +155,6 @@ return Backbone.View.extend({
});
});

// Initialize autocomplete for text inputs in search UI.
var t1 = this.$el.find('#input-tags-filter');
if (t1.length) {
t1.autocomplete(this.grid.history_tag_autocomplete_url,
{ selectFirst: false, autoFill: false, highlight: false, mustMatch: false });
}
var t2 = this.$el.find('#input-name-filter');
if (t2.length) {
t2.autocomplete(this.grid.history_name_autocomplete_url,
{ selectFirst: false, autoFill: false, highlight: false, mustMatch: false });
}

// Initialize standard, advanced search toggles.
this.$el.find('.advanced-search-toggle').each( function() {
$(this).off();
Expand Down
1 change: 1 addition & 0 deletions config/datatypes_conf.xml.sample
Expand Up @@ -218,6 +218,7 @@
<datatype extension="mgf" type="galaxy.datatypes.proteomics:Mgf" display_in_upload="true" />
<datatype extension="wiff" type="galaxy.datatypes.proteomics:Wiff" display_in_upload="true" />
<datatype extension="mzxml" type="galaxy.datatypes.proteomics:MzXML" mimetype="application/xml" display_in_upload="true" />
<datatype extension="mzdata" type="galaxy.datatypes.proteomics:MzData" mimetype="application/xml" display_in_upload="true" />
<datatype extension="ms2" type="galaxy.datatypes.proteomics:Ms2" display_in_upload="true" />
<datatype extension="mzq" type="galaxy.datatypes.proteomics:MzQuantML" mimetype="application/xml" display_in_upload="true" />
<datatype extension="mz.sqlite" type="galaxy.datatypes.binary:MzSQlite" mimetype="application/octet-stream" display_in_upload="true" />
Expand Down
Expand Up @@ -6,7 +6,7 @@
# appropriate `apt-get/pip install` statements.
---
-
image: bgruening/docker-jupyter-notebook:16.01
image: bgruening/docker-jupyter-notebook:16.01.1
description: |
The Jupyter notebook is the next iteration of IPython, allowing
analysis in many different languages. This image features the Python,
Expand Down
10 changes: 6 additions & 4 deletions doc/source/releases/17.01_announce.rst
Expand Up @@ -29,15 +29,17 @@ Highlights
Thanks to `@abretaud <https://github.com/abretaud>`__, `@ashvark <https://github.com/ashvark>`__, `@jvolkening <https://github.com/jvolkening>`__, and `@mvdbeek <https://github.com/mvdbeek>`__.
Implemented in `Pull Request 3145`_, `PullRequest 3510`_ and `PullRequest 3514`_.

`Github <https://github.com/galaxyproject/galaxy>`__
===========================================================
Get Galaxy
==========

The code lives at `Github <https://github.com/galaxyproject/galaxy>`__ and you should have `Git <https://git-scm.com/>`__ to obtain it.

New Galaxy repository
To get a new Galaxy repository run:
.. code-block:: shell
$ git clone -b release_17.01 https://github.com/galaxyproject/galaxy.git
Update of existing Galaxy repository
To update an existing Galaxy repository run:
.. code-block:: shell
$ git checkout release_17.01 && git pull --ff-only origin release_17.01
Expand Down
36 changes: 16 additions & 20 deletions lib/galaxy/auth/providers/ldap_ad.py
Expand Up @@ -15,7 +15,7 @@


def _get_subs(d, k, params):
if k not in d:
if k not in d or not d[k]:
raise ConfigurationError("Missing '%s' parameter in LDAP options" % k)
return str(d[k]).format(**params)

Expand All @@ -25,17 +25,17 @@ def _parse_ldap_options(ldap, options_unparsed):
if not options_unparsed:
return []

if "=" not in options_unparsed:
log.error("LDAP authenticate: Invalid syntax in <ldap-options>. Syntax should be option1=value1,option2=value2")
return []

ldap_options = []

# Valid options must start with this prefix. See help(ldap)
prefix = "OPT_"

for opt in options_unparsed.split(","):
key, value = opt.split("=")
try:
key, value = opt.split("=")
except ValueError:
log.warning("LDAP authenticate: Invalid syntax '%s' inside <ldap-options> element. Syntax should be option1=value1,option2=value2" % opt)
continue

try:
pair = []
Expand Down Expand Up @@ -109,14 +109,18 @@ def authenticate(self, email, username, password, options):
else:
ldap_options = _parse_ldap_options(ldap, ldap_options_raw)

if 'search-fields' in options:
try:
# setup connection
ldap.set_option(ldap.OPT_REFERRALS, 0)
try:
# setup connection
ldap.set_option(ldap.OPT_REFERRALS, 0)

for opt in ldap_options:
ldap.set_option(*opt)
for opt in ldap_options:
ldap.set_option(*opt)
except Exception:
log.exception('LDAP authenticate: set_option exception')
return (failure_mode, '', '')

if 'search-fields' in options:
try:
l = ldap.initialize(_get_subs(options, 'server', params))
l.protocol_version = 3

Expand Down Expand Up @@ -155,17 +159,9 @@ def authenticate(self, email, username, password, options):

# bind as user to check their credentials
try:
# setup connection
ldap.set_option(ldap.OPT_REFERRALS, 0)

for opt in ldap_options:
ldap.set_option(*opt)

l = ldap.initialize(_get_subs(options, 'server', params))
l.protocol_version = 3
bind_password = _get_subs(options, 'bind-password', params)
if not bind_password:
raise RuntimeError('LDAP authenticate: empty password')
l.simple_bind_s(_get_subs(
options, 'bind-user', params), bind_password)
try:
Expand Down
8 changes: 8 additions & 0 deletions lib/galaxy/datatypes/proteomics.py
Expand Up @@ -152,6 +152,14 @@ class MzXML(ProteomicsXml):
root = "mzXML"


class MzData(ProteomicsXml):
"""mzData data"""
edam_format = "format_3245"
file_ext = "mzdata"
blurb = "mzData Mass Spectrometry data"
root = "mzData"


class MzIdentML(ProteomicsXml):
edam_format = "format_3247"
file_ext = "mzid"
Expand Down
12 changes: 6 additions & 6 deletions lib/galaxy/jobs/__init__.py
Expand Up @@ -1322,6 +1322,12 @@ def finish(
dataset.info = dataset.info.rstrip() + "\n" + context['stderr'].strip()
dataset.tool_version = self.version_string
dataset.set_size()
if 'uuid' in context:
dataset.dataset.uuid = context['uuid']
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
self.app.object_store.update_from_file(dataset.dataset, create=True)
self._collect_extra_files(dataset.dataset, self.working_directory)
# Handle composite datatypes of auto_primary_file type
if dataset.datatype.composite_type == 'auto_primary_file' and not dataset.has_data():
try:
Expand All @@ -1332,12 +1338,6 @@ def finish(
dataset.set_size()
except Exception as e:
log.warning( 'Unable to generate primary composite file automatically for %s: %s', dataset.dataset.id, e )
if 'uuid' in context:
dataset.dataset.uuid = context['uuid']
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
self.app.object_store.update_from_file(dataset.dataset, create=True)
self._collect_extra_files(dataset.dataset, self.working_directory)
if job.states.ERROR == final_job_state:
dataset.blurb = "error"
dataset.mark_unhidden()
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/orm/engine_factory.py
Expand Up @@ -29,7 +29,7 @@ def after_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
total = time.time() - conn.info['query_start_time'].pop(-1)
if total > slow_query_log_threshold:
log.debug("Slow query: %f(s) for %s" % (total, statement))
log.debug("Slow query: %f(s)\n%s\nParameters: %s" % (total, statement, parameters))

# Create the database engine
engine = create_engine( url, proxy=proxy, **engine_options )
Expand Down
43 changes: 42 additions & 1 deletion lib/galaxy/queue_worker.py
Expand Up @@ -9,6 +9,7 @@

import galaxy.queues
from galaxy import util
from galaxy.model.util import pgcalc

from kombu import Connection
from kombu.mixins import ConsumerMixin
Expand All @@ -18,7 +19,31 @@
log = logging.getLogger(__name__)


def send_local_control_task(app, task, kwargs={}):
"""
This sends a message to the process-local control worker, which is useful
for one-time asynchronous tasks like recalculating user disk usage.
"""
log.info("Queuing async task %s." % task)
payload = {'task': task,
'kwargs': kwargs}
try:
c = Connection(app.config.amqp_internal_connection)
with producers[c].acquire(block=True) as producer:
producer.publish(payload,
exchange=galaxy.queues.galaxy_exchange,
declare=[galaxy.queues.galaxy_exchange] + [galaxy.queues.control_queue_from_config(app.config)],
routing_key='control')
except Exception:
log.exception("Error queueing async task: %s." % payload)


def send_control_task(app, task, noop_self=False, kwargs={}):
"""
This sends a control task out to all processes, useful for things like
reloading a data table, which needs to happen individually in all
processes.
"""
log.info("Sending %s control task." % task)
payload = {'task': task,
'kwargs': kwargs}
Expand Down Expand Up @@ -112,6 +137,21 @@ def reload_sanitize_whitelist(app):
app.config.reload_sanitize_whitelist()


def recalculate_user_disk_usage(app, **kwargs):
user_id = kwargs.get('user_id', None)
sa_session = app.model.context
if user_id:
user = sa_session.query( app.model.User ).get( app.security.decode_id( user_id ) )
if user:
if sa_session.get_bind().dialect.name not in ( 'postgres', 'postgresql' ):
new = user.calculate_disk_usage()
else:
new = pgcalc(sa_session, user.id)
user.set_disk_usage(new)
sa_session.add(user)
sa_session.flush()


def reload_tool_data_tables(app, **kwargs):
params = util.Params(kwargs)
log.debug("Executing tool data table reload for %s" % params.get('table_names', 'all tables'))
Expand All @@ -135,7 +175,8 @@ def admin_job_lock(app, **kwargs):
'reload_display_application': reload_display_application,
'reload_tool_data_tables': reload_tool_data_tables,
'admin_job_lock': admin_job_lock,
'reload_sanitize_whitelist': reload_sanitize_whitelist}
'reload_sanitize_whitelist': reload_sanitize_whitelist,
'recalculate_user_disk_usage': recalculate_user_disk_usage}


class GalaxyQueueWorker(ConsumerMixin, threading.Thread):
Expand Down
14 changes: 7 additions & 7 deletions lib/galaxy/tools/search/__init__.py
Expand Up @@ -49,8 +49,9 @@ def __init__( self, toolbox, index_help=True ):
self.build_index( index_help )

def build_index( self, index_help=True ):
# Works around https://bitbucket.org/mchaput/whoosh/issues/391/race-conditions-with-temp-storage
"""Prepare search index for tools loaded in toolbox."""
RamStorage.temp_storage = _temp_storage
# Works around https://bitbucket.org/mchaput/whoosh/issues/391/race-conditions-with-temp-storage
self.storage = RamStorage()
self.index = self.storage.create_index( self.schema )
writer = self.index.writer()
Expand All @@ -66,16 +67,15 @@ def build_index( self, index_help=True ):
"section": to_unicode( tool.get_panel_section()[1] if len( tool.get_panel_section() ) == 2 else '' ),
"help": to_unicode( "" )
}
# Hyphens are wildcards in Whoosh causing bad things
if tool.name.find( '-' ) != -1:
# Hyphens are wildcards in Whoosh causing bad things
add_doc_kwds['name'] = (' ').join( [ token.text for token in self.rex( to_unicode( tool.name ) ) ] )
else:
add_doc_kwds['name'] = to_unicode( tool.name )
# We do not want to search Tool Shed or version parts
# of the long ids
if id.find( '/' ) != -1:
slash_indexes = [ m.start() for m in re.finditer( '/', id ) ]
id_stub = id[ ( slash_indexes[1] + 1 ): slash_indexes[4] ]
if tool.guid:
# Create a stub consisting of owner, repo, and tool from guid
slash_indexes = [ m.start() for m in re.finditer( '/', tool.guid ) ]
id_stub = tool.guid[ ( slash_indexes[1] + 1 ): slash_indexes[4] ]
add_doc_kwds['stub'] = (' ').join( [ token.text for token in self.rex( to_unicode( id_stub ) ) ] )
else:
add_doc_kwds['stub'] = to_unicode( id )
Expand Down
11 changes: 10 additions & 1 deletion lib/galaxy/visualization/plugins/plugin.py
Expand Up @@ -274,7 +274,16 @@ def get_api_key():
render_vars[ 'plugin_path' ] = os.path.abspath( self.path )

if self.config.get( 'plugin_type', 'visualization' ) == "interactive_environment":
request = self.INTENV_REQUEST_FACTORY( trans, self )
try:
request = self.INTENV_REQUEST_FACTORY( trans, self )
except:
log.exception("IE plugin request handling failed")
return trans.fill_template( 'message.mako',
message='Loading the interactive environment failed, please contact the {admin_tag} for assistance'.format(
admin_tag='<a href="mailto:{admin_mail}">Galaxy administrator</a>'.format(
admin_mail=trans.app.config.error_email_to)
if trans.app.config.error_email_to else 'Galaxy administrator'),
status='error')
render_vars[ "ie_request" ] = request

template_filename = self.config[ 'entry_point' ][ 'file' ]
Expand Down
15 changes: 0 additions & 15 deletions lib/galaxy/webapps/galaxy/controllers/history.py
Expand Up @@ -1261,21 +1261,6 @@ def set_accessible_async( self, trans, id=None, accessible=False ):
return
# TODO: used in page/editor.mako

@web.expose
def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
"""Return autocomplete data for history names"""
user = trans.get_user()
if not user:
return

ac_data = ""
for history in ( trans.sa_session.query( model.History )
.filter_by( user=user )
.filter( func.lower( model.History.name ).like(q.lower() + "%") ) ):
ac_data = ac_data + history.name + "\n"
return ac_data
# TODO: used in grid_base.mako

@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
Expand Down

0 comments on commit cc6fa6a

Please sign in to comment.