Skip to content

Commit

Permalink
Merge branch 'release_17.05'
Browse files Browse the repository at this point in the history
  • Loading branch information
nsoranzo committed Sep 15, 2017
2 parents 4a7045c + 97e8e36 commit 0a39bfc
Show file tree
Hide file tree
Showing 26 changed files with 95 additions and 47 deletions.
Expand Up @@ -279,7 +279,11 @@ var InfinitelyScrollingCollection = ControlledFetchCollection.extend({

// TODO: this fails in the edge case where
// the first fetch offset === limit (limit 4, offset 4, collection.length 4)
options.offset = options.reset? 0 : ( options.offset || collection.lastFetched );
if (options.reset){
options.offset = 0;
} else if (options.offset === undefined){
options.offset = collection.lastFetched;
}
var limit = options.limit = options.limit || collection.limitPerFetch || null;
// console.log( 'fetchMore, limit:', limit, 'offset:', options.offset );

Expand Down
Expand Up @@ -162,7 +162,11 @@ var FolderToolbarView = Backbone.View.extend({
if (this.validate_new_folder(folderDetails)){
var folder = new mod_library_model.FolderAsModel();
url_items = Backbone.history.fragment.split('/');
current_folder_id = url_items[url_items.length-1];
if(url_items.indexOf('page') > -1){
current_folder_id = url_items[url_items.length-3];
}else {
current_folder_id = url_items[url_items.length-1];
}
folder.url = folder.urlRoot + current_folder_id ;

folder.save(folderDetails, {
Expand Down
1 change: 1 addition & 0 deletions client/galaxy/scripts/mvc/ui/ui-select-content.js
Expand Up @@ -301,6 +301,7 @@ var View = Backbone.View.extend({
} catch( e ) {
this._handleDropStatus( 'danger' );
}
ev.preventDefault();
},

/** Highlight drag result */
Expand Down
4 changes: 4 additions & 0 deletions config/datatypes_conf.xml.sample
Expand Up @@ -548,8 +548,12 @@
</datatype>
<datatype extension="fps" type="galaxy.datatypes.molecules:FPS" mimetype="text/html" display_in_upload="true" />
<datatype extension="obfs" type="galaxy.datatypes.molecules:OBFS" mimetype="text/html" display_in_upload="true" />
<datatype extension="drf" type="galaxy.datatypes.molecules:DRF" display_in_upload="true" />
<datatype extension="phar" type="galaxy.datatypes.molecules:PHAR" display_in_upload="false" />
<datatype extension="pdb" type="galaxy.datatypes.molecules:PDB" display_in_upload="true" />
<datatype extension="pdbqt" type="galaxy.datatypes.molecules:PDBQT" display_in_upload="true" />
<datatype extension="grd" type="galaxy.datatypes.molecules:grd" display_in_upload="true" />
<datatype extension="grd.tgz" type="galaxy.datatypes.molecules:grdtgz" display_in_upload="true" />
<!-- mothur formats -->
<datatype extension="mothur.otu" type="galaxy.datatypes.mothur:Otu" display_in_upload="true"/>
<datatype extension="mothur.list" type="galaxy.datatypes.mothur:Otu" subclass="true" display_in_upload="true"/>
Expand Down
2 changes: 1 addition & 1 deletion create_db.sh
@@ -1,5 +1,6 @@
#!/bin/sh

cd `dirname $0`
: ${GALAXY_VIRTUAL_ENV:=.venv}

if [ -d "$GALAXY_VIRTUAL_ENV" ];
Expand All @@ -8,5 +9,4 @@ then
. "$GALAXY_VIRTUAL_ENV/bin/activate"
fi

cd `dirname $0`
python ./scripts/create_db.py $@
8 changes: 8 additions & 0 deletions extract_dataset_parts.sh
@@ -1,6 +1,14 @@
#!/bin/sh

cd `dirname $0`
: ${GALAXY_VIRTUAL_ENV:=.venv}

if [ -d "$GALAXY_VIRTUAL_ENV" ];
then
printf "Activating virtualenv at $GALAXY_VIRTUAL_ENV\n"
. "$GALAXY_VIRTUAL_ENV/bin/activate"
fi

for file in $1/split_info*.json
do
# echo processing $file
Expand Down
30 changes: 18 additions & 12 deletions lib/galaxy/datatypes/data.py
Expand Up @@ -347,7 +347,7 @@ def display_data(self, trans, data, preview=False, filename=None, to_ext=None, *
except:
mime = "text/plain"
self._clean_and_set_mime_type( trans, mime )
return open( file_path )
return self._yield_user_file_content( trans, data, file_path )
else:
return paste.httpexceptions.HTTPNotFound( "Could not find '%s' on the extra files path %s." % ( filename, file_path ) )
self._clean_and_set_mime_type( trans, data.get_mime() )
Expand All @@ -370,23 +370,29 @@ def display_data(self, trans, data, preview=False, filename=None, to_ext=None, *
max_peek_size = 10000000 # 10 MB for html
preview = util.string_as_bool( preview )
if not preview or isinstance(data.datatype, datatypes.images.Image) or os.stat( data.file_name ).st_size < max_peek_size:
if trans.app.config.sanitize_all_html and trans.response.get_content_type() == "text/html":
# Sanitize anytime we respond with plain text/html content.
# Check to see if this dataset's parent job is whitelisted
# We cannot currently trust imported datasets for rendering.
if not data.creating_job.imported and data.creating_job.tool_id in trans.app.config.sanitize_whitelist:
return open(data.file_name).read()
# This is returning to the browser, it needs to be encoded.
# TODO Ideally this happens a layer higher, but this is a bad
# issue affecting many tools
return sanitize_html(open( data.file_name ).read()).encode('utf-8')
return open( data.file_name )
return self._yield_user_file_content(trans, data, data.file_name)
else:
trans.response.set_content_type( "text/html" )
return trans.stream_template_mako( "/dataset/large_file.mako",
truncated_data=open( data.file_name ).read(max_peek_size),
data=data)

def _yield_user_file_content(self, trans, from_dataset, filename):
"""This method is responsible for sanitizing the HTML if needed."""
if trans.app.config.sanitize_all_html and trans.response.get_content_type() == "text/html":
# Sanitize anytime we respond with plain text/html content.
# Check to see if this dataset's parent job is whitelisted
# We cannot currently trust imported datasets for rendering.
if not from_dataset.creating_job.imported and from_dataset.creating_job.tool_id in trans.app.config.sanitize_whitelist:
return open(filename)

# This is returning to the browser, it needs to be encoded.
# TODO Ideally this happens a layer higher, but this is a bad
# issue affecting many tools
return sanitize_html(open(filename).read()).encode('utf-8')

return open(filename)

def _download_filename(self, dataset, to_ext, hdca=None, element_identifier=None):
def escape(raw_identifier):
return ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in raw_identifier)[0:150]
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/molecules.py
Expand Up @@ -350,7 +350,7 @@ def merge(split_files, output_file):

class OBFS(Binary):
"""OpenBabel Fastsearch format (fs)."""
file_ext = 'fs'
file_ext = 'obfs'
composite_type = 'basic'
allow_datatype_change = False

Expand Down
24 changes: 19 additions & 5 deletions lib/galaxy/model/tool_shed_install/__init__.py
@@ -1,10 +1,13 @@
import logging
import os

from urlparse import urljoin
from sqlalchemy.orm.exc import DetachedInstanceError

from galaxy.util.dictifiable import Dictifiable
from galaxy.util.bunch import Bunch
from galaxy.util import asbool
from tool_shed.util import common_util
from urlparse import urljoin

log = logging.getLogger( __name__ )

Expand Down Expand Up @@ -624,10 +627,21 @@ def __descendants( app, tool_version ):
tool_versions.append( next_version )
__descendants( app, next_version )

__ancestors( app, self )
if self not in tool_versions:
tool_versions.append( self )
__descendants( app, self )
try:
__ancestors( app, self )
if self not in tool_versions:
tool_versions.append( self )
__descendants( app, self )
except DetachedInstanceError:
# This can happen when loading a tool while referencing
# and outdated tool version cache, so we build a new cache
tool_versions = []
from galaxy.tools.toolbox.lineages.tool_shed import ToolVersionCache
app.tool_version_cache = ToolVersionCache( app )
__ancestors( app, self )
if self not in tool_versions:
tool_versions.append( self )
__descendants( app, self )
return tool_versions

def get_version_ids( self, app, reverse=False ):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tools/deps/installable.py
Expand Up @@ -69,9 +69,9 @@ def _check():

try:
if auto_init and os.access(parent_path, os.W_OK):
with FileLock(os.path.join(parent_path, desc.lower())):
with FileLock(os.path.join(parent_path, desc.lower()), timeout=300):
return _check()
else:
return _check()
except FileLockException:
return ensure_installed(installable_context, auto_init)
raise Exception("Failed to get file lock for %s" % os.path.join(parent_path, desc.lower()))
2 changes: 2 additions & 0 deletions lib/galaxy/web/framework/base.py
Expand Up @@ -427,6 +427,8 @@ def send_redirect( self, url ):
"""
Send an HTTP redirect response to (target `url`)
"""
if "\n" in url or "\r" in url:
raise httpexceptions.HTTPInternalServerError("Invalid redirect URL encountered.")
raise httpexceptions.HTTPFound( url.encode('utf-8'), headers=self.wsgi_headeritems() )

def wsgi_headeritems( self ):
Expand Down
14 changes: 6 additions & 8 deletions lib/galaxy/webhooks/__init__.py
Expand Up @@ -54,14 +54,12 @@ def load_webhooks(self):
log.warning('directory not found: %s', config_dir)
continue

config_file = os.listdir(config_dir)[0]
config_file = config_file \
if config_file.endswith('.yml') \
or config_file.endswith('.yaml') \
else ''

if config_file:
self.load_webhook_from_config(config_dir, config_file)
config_dir_contents = os.listdir(config_dir)
# We are assuming that all yml/yaml files in a webhooks'
# config directory are webhook config files.
for config_file in config_dir_contents:
if config_file.endswith('.yml') or config_file.endswith('.yaml'):
self.load_webhook_from_config(config_dir, config_file)

def load_webhook_from_config(self, config_dir, config_file):
try:
Expand Down
12 changes: 8 additions & 4 deletions lib/galaxy/workflow/modules.py
Expand Up @@ -560,7 +560,9 @@ def from_dict( Class, trans, d, exact_tools=False, **kwds ):
tool_id = d.get( 'content_id' ) or d.get( 'tool_id' )
if tool_id is None:
raise exceptions.RequestParameterInvalidException( "No tool id could be located for step [%s]." % d )
tool_version = str( d.get( 'tool_version' ) )
tool_version = d.get( 'tool_version' )
if tool_version:
tool_version = str(tool_version)
module = super( ToolModule, Class ).from_dict( trans, d, tool_id=tool_id, tool_version=tool_version, exact_tools=exact_tools )
module.post_job_actions = d.get( 'post_job_actions', {} )
module.workflow_outputs = d.get( 'workflow_outputs', [] )
Expand Down Expand Up @@ -1060,14 +1062,16 @@ def inject( self, step, step_args=None, steps=None ):
# Populate module.
module = step.module = module_factory.from_workflow_step( self.trans, step )

# Fix any missing parameters
step.upgrade_messages = module.check_and_update_state()

# Any connected input needs to have value DummyDataset (these
# are not persisted so we need to do it every time)
module.add_dummy_datasets( connections=step.input_connections, steps=steps )
state, step_errors = module.compute_runtime_state( self.trans, step_args )
step.state = state

# Fix any missing parameters
step.upgrade_messages = module.check_and_update_state()

# Populate subworkflow components
if step.type == "subworkflow":
subworkflow = step.subworkflow
populate_module_and_state( self.trans, subworkflow, param_map={}, )
Expand Down
2 changes: 1 addition & 1 deletion manage_db.sh
Expand Up @@ -5,6 +5,7 @@
# sh manage_db.sh downgrade --version=3 <tool_shed if using that webapp - galaxy is the default>
#######

cd `dirname $0`
: ${GALAXY_VIRTUAL_ENV:=.venv}

if [ -d "$GALAXY_VIRTUAL_ENV" ];
Expand All @@ -13,5 +14,4 @@ then
. "$GALAXY_VIRTUAL_ENV/bin/activate"
fi

cd `dirname $0`
python ./scripts/manage_db.py $@
2 changes: 1 addition & 1 deletion manage_tools.sh
@@ -1,5 +1,6 @@
#!/bin/sh

cd `dirname $0`
: ${GALAXY_VIRTUAL_ENV:=.venv}

if [ -d "$GALAXY_VIRTUAL_ENV" ];
Expand All @@ -8,5 +9,4 @@ then
. "$GALAXY_VIRTUAL_ENV/bin/activate"
fi

cd `dirname $0`
python ./scripts/manage_tools.py $@

0 comments on commit 0a39bfc

Please sign in to comment.