Skip to content

Commit

Permalink
Merge branch 'dev' into replace_history_import
Browse files Browse the repository at this point in the history
  • Loading branch information
guerler committed Nov 13, 2017
2 parents 4fd3021 + c83a826 commit cf66742
Show file tree
Hide file tree
Showing 79 changed files with 723 additions and 6,487 deletions.
18 changes: 15 additions & 3 deletions .ci/jenkins/selenium/run_tests.sh
Expand Up @@ -2,10 +2,22 @@

# Enable retries on tests to reduce chances of transient failures.
: ${GALAXY_TEST_SELENIUM_RETRIES:=1}
: ${GALAXY_TEST_ERRORS_DIRECTORY:=database/test-errors}

# If in Jenkins environment, use it for artifacts.
if [ -n "$BUILD_NUMBER" ];
then
: ${GALAXY_TEST_ERRORS_DIRECTORY:=${BUILD_NUMBER}-test-errors}
: ${GALAXY_TEST_SCREENSHOTS_DIRECTORY:=${BUILD_NUMBER}-test-screenshots}
else
: ${GALAXY_TEST_ERRORS_DIRECTORY:=database/test-errors}
: ${GALAXY_TEST_SCREENSHOTS_DIRECTORY:=database/test-screenshots}
fi

mkdir -p "$GALAXY_TEST_ERRORS_DIRECTORY"
mkdir -p "$GALAXY_TEST_SCREENSHOTS_DIRECTORY"

# Start Selenium server in the test Docker container.
DOCKER_RUN_EXTRA_ARGS="-e USE_SELENIUM=1 -e GALAXY_TEST_SELENIUM_RETRIES=${GALAXY_TEST_SELENIUM_RETRIES} -e GALAXY_TEST_ERRORS_DIRECTORY=${GALAXY_TEST_ERRORS_DIRECTORY} ${DOCKER_RUN_EXTRA_ARGS}"
DOCKER_RUN_EXTRA_ARGS="-e USE_SELENIUM=1 -e GALAXY_TEST_SELENIUM_RETRIES=${GALAXY_TEST_SELENIUM_RETRIES} -e GALAXY_TEST_ERRORS_DIRECTORY=${GALAXY_TEST_ERRORS_DIRECTORY} -e GALAXY_TEST_SCREENSHOTS_DIRECTORY=${GALAXY_TEST_SCREENSHOTS_DIRECTORY} ${DOCKER_RUN_EXTRA_ARGS}"
export DOCKER_RUN_EXTRA_ARGS

./run_tests.sh --dockerize --db postgres --external_tmp --clean_pyc --selenium "$@"
./run_tests.sh --dockerize --db postgres --external_tmp --clean_pyc --skip_flakey_fails --selenium "$@"
4 changes: 0 additions & 4 deletions client/galaxy/scripts/apps/panels/admin-panel.js
Expand Up @@ -24,10 +24,6 @@ var AdminPanel = Backbone.View.extend({
title: "Data tables",
url: "admin/view_tool_data_tables"
},
{
title: "Data libraries",
url: "library_admin/browse_libraries"
},
{
title: "Display applications",
url: "admin/display_applications"
Expand Down
5 changes: 5 additions & 0 deletions client/galaxy/scripts/utils/ajax-queue.js
Expand Up @@ -129,6 +129,11 @@ class AjaxQueue {
* fn: the deferring fn or ajax call }
*/
class NamedAjaxQueue extends AjaxQueue {
constructor(initialFunctions) {
super(initialFunctions);
this.names = {};
}

/** add the obj.fn to the queue if obj.name hasn't been used before */
add(obj) {
if (!(obj.hasOwnProperty("name") && obj.hasOwnProperty("fn"))) {
Expand Down
13 changes: 0 additions & 13 deletions config/galaxy.ini.sample
Expand Up @@ -951,19 +951,6 @@ use_interactive = True
# communicate with this manager over the port specified here.
#transfer_manager_port = 8163

# Search data libraries with whoosh
#enable_whoosh_library_search = True
# Whoosh indexes are stored in this directory.
#whoosh_index_dir = database/whoosh_indexes

# Search data libraries with lucene
#enable_lucene_library_search = False
# maximum file size to index for searching, in MB
#fulltext_max_size = 500
#fulltext_noindex_filetypes = bam,sam,wig,bigwig,fasta,fastq,fastqsolexa,fastqillumina,fastqsanger
# base URL of server providing search functionality using lucene
#fulltext_url = http://localhost:8081

# -- Toolbox Search

# The following boosts are used to customize this instance's toolbox search.
Expand Down
2 changes: 1 addition & 1 deletion doc/source/releases/17.09_announce.rst
Expand Up @@ -143,7 +143,7 @@ Disclosed on the `mailing list <https://lists.galaxyproject.org/pipermail/galaxy

Vulnerabilities were found by Eric Rasche and Manabu Ishii respectively. Detailed descriptions of these categories of vulnerabilities can be found at:

- https://www.owasp.org/index.php/Cross-site_Scripting_(XSS)
- `<https://www.owasp.org/index.php/Cross-site_Scripting_(XSS)>`__
- https://www.owasp.org/index.php/Session_fixation

The fix for these issues has been applied to Galaxy releases back to 16.10 and can be found in this `diff <https://gist.githubusercontent.com/jmchilton/760bf8ba6055b9a47a48529fcc49a493/raw/01bc98e5a8067a435f38d7cf4fda4e304c4425a2/2017augsecurity_1610.patch>`__
Expand Down
264 changes: 264 additions & 0 deletions lib/galaxy/actions/library.py
@@ -0,0 +1,264 @@
"""
Contains library functions
"""
import json
import logging
import os.path
from markupsafe import escape
from galaxy import util
from galaxy.tools.actions import upload_common
from galaxy.tools.parameters import populate_state
from galaxy.util.path import (
safe_contains,
safe_relpath,
unsafe_walk
)

log = logging.getLogger(__name__)


class LibraryActions(object):
"""
Mixin for controllers that provide library functionality.
"""

def _upload_dataset(self, trans, library_id, folder_id, replace_dataset=None, **kwd):
# Set up the traditional tool state/params
cntrller = 'api'
tool_id = 'upload1'
message = None
tool = trans.app.toolbox.get_tool(tool_id)
state = tool.new_state(trans)
populate_state(trans, tool.inputs, kwd, state.inputs)
tool_params = state.inputs
dataset_upload_inputs = []
for input_name, input in tool.inputs.items():
if input.type == "upload_dataset":
dataset_upload_inputs.append(input)
# Library-specific params
server_dir = kwd.get('server_dir', '')
upload_option = kwd.get('upload_option', 'upload_file')
response_code = 200
if upload_option == 'upload_directory':
if server_dir in [None, 'None', '']:
response_code = 400
if trans.user_is_admin():
import_dir = trans.app.config.library_import_dir
import_dir_desc = 'library_import_dir'
else:
import_dir = trans.app.config.user_library_import_dir
if server_dir != trans.user.email:
import_dir = os.path.join(import_dir, trans.user.email)
import_dir_desc = 'user_library_import_dir'
full_dir = os.path.join(import_dir, server_dir)
unsafe = None
if safe_relpath(server_dir):
if import_dir_desc == 'user_library_import_dir' and safe_contains(import_dir, full_dir, whitelist=trans.app.config.user_library_import_symlink_whitelist):
for unsafe in unsafe_walk(full_dir, whitelist=[import_dir] + trans.app.config.user_library_import_symlink_whitelist):
log.error('User attempted to import a path that resolves to a path outside of their import dir: %s -> %s', unsafe, os.path.realpath(unsafe))
else:
log.error('User attempted to import a directory path that resolves to a path outside of their import dir: %s -> %s', server_dir, os.path.realpath(full_dir))
unsafe = True
if unsafe:
response_code = 403
message = 'Invalid server_dir'
if import_dir:
message = 'Select a directory'
else:
response_code = 403
message = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc
elif upload_option == 'upload_paths':
if not trans.app.config.allow_library_path_paste:
response_code = 403
message = '"allow_library_path_paste" is not defined in the Galaxy configuration file'
# Some error handling should be added to this method.
try:
# FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd
# should be passed so that complex objects that may have been included in the initial request remain.
library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset)
except Exception:
response_code = 500
message = "Unable to parse upload parameters, please report this error."
# Proceed with (mostly) regular upload processing if we're still errorless
if response_code == 200:
precreated_datasets = upload_common.get_precreated_datasets(trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=cntrller)
if upload_option == 'upload_file':
tool_params = upload_common.persist_uploads(tool_params, trans)
uploaded_datasets = upload_common.get_uploaded_datasets(trans, cntrller, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch)
elif upload_option == 'upload_directory':
uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets(trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message)
elif upload_option == 'upload_paths':
uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets(trans, kwd, library_bunch, response_code, message)
upload_common.cleanup_unused_precreated_datasets(precreated_datasets)
if upload_option == 'upload_file' and not uploaded_datasets:
response_code = 400
message = 'Select a file, enter a URL or enter text'
if response_code != 200:
return (response_code, message)
json_file_path = upload_common.create_paramfile(trans, uploaded_datasets)
data_list = [ud.data for ud in uploaded_datasets]
job_params = {}
job_params['link_data_only'] = json.dumps(kwd.get('link_data_only', 'copy_files'))
job_params['uuid'] = json.dumps(kwd.get('uuid', None))
job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params)
trans.sa_session.add(job)
trans.sa_session.flush()
return output

def _get_server_dir_uploaded_datasets(self, trans, params, full_dir, import_dir_desc, library_bunch, response_code, message):
dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc)
files = dir_response[0]
if not files:
return dir_response
uploaded_datasets = []
for file in files:
name = os.path.basename(file)
uploaded_datasets.append(self._make_library_uploaded_dataset(trans, params, name, file, 'server_dir', library_bunch))
return uploaded_datasets, 200, None

def _get_path_paste_uploaded_datasets(self, trans, params, library_bunch, response_code, message):
preserve_dirs = util.string_as_bool(params.get('preserve_dirs', False))
uploaded_datasets = []
(files_and_folders, _response_code, _message) = self._get_path_files_and_folders(params, preserve_dirs)
if _response_code:
return (uploaded_datasets, _response_code, _message)
for (path, name, folder) in files_and_folders:
uploaded_datasets.append(self._make_library_uploaded_dataset(trans, params, name, path, 'path_paste', library_bunch, folder))
return uploaded_datasets, 200, None

def _get_path_files_and_folders(self, params, preserve_dirs):
problem_response = self._check_path_paste_params(params)
if problem_response:
return problem_response
files_and_folders = []
for (line, path) in self._paths_list(params):
line_files_and_folders = self._get_single_path_files_and_folders(line, path, preserve_dirs)
files_and_folders.extend(line_files_and_folders)
return files_and_folders, None, None

def _get_single_path_files_and_folders(self, line, path, preserve_dirs):
files_and_folders = []
if os.path.isfile(path):
name = os.path.basename(path)
files_and_folders.append((path, name, None))
for basedir, dirs, files in os.walk(line):
for file in files:
file_path = os.path.abspath(os.path.join(basedir, file))
if preserve_dirs:
in_folder = os.path.dirname(file_path.replace(path, '', 1).lstrip('/'))
else:
in_folder = None
files_and_folders.append((file_path, file, in_folder))
return files_and_folders

def _paths_list(self, params):
return [(l.strip(), os.path.abspath(l.strip())) for l in params.get('filesystem_paths', '').splitlines() if l.strip()]

def _check_path_paste_params(self, params):
if params.get('filesystem_paths', '') == '':
message = "No paths entered in the upload form"
response_code = 400
return None, response_code, message
bad_paths = []
for (_, path) in self._paths_list(params):
if not os.path.exists(path):
bad_paths.append(path)
if bad_paths:
message = 'Invalid paths: "%s".' % '", "'.join(bad_paths)
response_code = 400
return None, response_code, message
return None

def _make_library_uploaded_dataset(self, trans, params, name, path, type, library_bunch, in_folder=None):
link_data_only = params.get('link_data_only', 'copy_files')
uuid_str = params.get('uuid', None)
file_type = params.get('file_type', None)
library_bunch.replace_dataset = None # not valid for these types of upload
uploaded_dataset = util.bunch.Bunch()
new_name = name
# Remove compressed file extensions, if any, but only if
# we're copying files into Galaxy's file space.
if link_data_only == 'copy_files':
if new_name.endswith('.gz'):
new_name = new_name.rstrip('.gz')
elif new_name.endswith('.zip'):
new_name = new_name.rstrip('.zip')
uploaded_dataset.name = new_name
uploaded_dataset.path = path
uploaded_dataset.type = type
uploaded_dataset.ext = None
uploaded_dataset.file_type = file_type
uploaded_dataset.dbkey = params.get('dbkey', None)
uploaded_dataset.to_posix_lines = params.get('to_posix_lines', None)
uploaded_dataset.space_to_tab = params.get('space_to_tab', None)
uploaded_dataset.tag_using_filenames = params.get('tag_using_filenames', True)
if in_folder:
uploaded_dataset.in_folder = in_folder
uploaded_dataset.data = upload_common.new_upload(trans, 'api', uploaded_dataset, library_bunch)
uploaded_dataset.link_data_only = link_data_only
uploaded_dataset.uuid = uuid_str
if link_data_only == 'link_to_files':
uploaded_dataset.data.file_name = os.path.abspath(path)
# Since we are not copying the file into Galaxy's managed
# default file location, the dataset should never be purgable.
uploaded_dataset.data.dataset.purgable = False
trans.sa_session.add_all((uploaded_dataset.data, uploaded_dataset.data.dataset))
trans.sa_session.flush()
return uploaded_dataset

def _create_folder(self, trans, parent_id, library_id, **kwd):
is_admin = trans.user_is_admin()
current_user_roles = trans.get_current_user_roles()
try:
parent_folder = trans.sa_session.query(trans.app.model.LibraryFolder).get(trans.security.decode_id(parent_id))
except Exception:
parent_folder = None
# Check the library which actually contains the user-supplied parent folder, not the user-supplied
# library, which could be anything.
self._check_access(trans, is_admin, parent_folder, current_user_roles)
self._check_add(trans, is_admin, parent_folder, current_user_roles)
new_folder = trans.app.model.LibraryFolder(name=kwd.get('name', ''),
description=kwd.get('description', ''))
# We are associating the last used genome build with folders, so we will always
# initialize a new folder with the first dbkey in genome builds list which is currently
# ? unspecified (?)
new_folder.genome_build = trans.app.genome_builds.default_value
parent_folder.add_folder(new_folder)
trans.sa_session.add(new_folder)
trans.sa_session.flush()
# New folders default to having the same permissions as their parent folder
trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder)
return 200, dict(created=new_folder)

def _check_access(self, trans, is_admin, item, current_user_roles):
can_access = True
if isinstance(item, trans.model.HistoryDatasetAssociation):
# Make sure the user has the DATASET_ACCESS permission on the history_dataset_association.
if not item:
message = "Invalid history dataset (%s) specified." % escape(str(item))
can_access = False
elif not trans.app.security_agent.can_access_dataset(current_user_roles, item.dataset) and item.history.user == trans.user:
message = "You do not have permission to access the history dataset with id (%s)." % str(item.id)
can_access = False
else:
# Make sure the user has the LIBRARY_ACCESS permission on the library item.
if not item:
message = "Invalid library item (%s) specified." % escape(str(item))
can_access = False
elif not (is_admin or trans.app.security_agent.can_access_library_item(current_user_roles, item, trans.user)):
if isinstance(item, trans.model.Library):
item_type = 'data library'
elif isinstance(item, trans.model.LibraryFolder):
item_type = 'folder'
else:
item_type = '(unknown item type)'
message = "You do not have permission to access the %s with id (%s)." % (escape(item_type), str(item.id))
can_access = False
if not can_access:
return 400, message

def _check_add(self, trans, is_admin, item, current_user_roles):
# Deny access if the user is not an admin and does not have the LIBRARY_ADD permission.
if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)):
message = "You are not authorized to add an item to (%s)." % escape(item.name)
return 403, message
6 changes: 1 addition & 5 deletions lib/galaxy/config.py
Expand Up @@ -403,9 +403,6 @@ def __init__(self, **kwargs):
self.user_library_import_dir = kwargs.get('user_library_import_dir', None)
self.user_library_import_symlink_whitelist = listify(kwargs.get('user_library_import_symlink_whitelist', []), do_strip=True)
# Searching data libraries
self.enable_lucene_library_search = string_as_bool(kwargs.get('enable_lucene_library_search', False))
self.enable_whoosh_library_search = string_as_bool(kwargs.get('enable_whoosh_library_search', False))
self.whoosh_index_dir = resolve_path(kwargs.get("whoosh_index_dir", "database/whoosh_indexes"), self.root)
self.ftp_upload_dir = kwargs.get('ftp_upload_dir', None)
self.ftp_upload_dir_identifier = kwargs.get('ftp_upload_dir_identifier', 'email') # attribute on user - email, username, id, etc...
self.ftp_upload_dir_template = kwargs.get('ftp_upload_dir_template', '${ftp_upload_dir}%s${ftp_upload_dir_identifier}' % os.path.sep)
Expand Down Expand Up @@ -759,8 +756,7 @@ def check(self):
# Create the directories that it makes sense to create
for path in (self.new_file_path, self.template_cache, self.ftp_upload_dir,
self.library_import_dir, self.user_library_import_dir,
self.nginx_upload_store, self.whoosh_index_dir,
self.object_store_cache_path):
self.nginx_upload_store, self.object_store_cache_path):
self._ensure_directory(path)
# Check that required files exist
tool_configs = self.tool_configs
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/datatypes/converters/bgzip.py
Expand Up @@ -9,7 +9,7 @@
import subprocess
import tempfile

from pysam import ctabix
import pysam


def main():
Expand Down Expand Up @@ -44,7 +44,7 @@ def main():
grepped.stdout.close()
output, err = after_sort.communicate()

ctabix.tabix_compress(tmpfile.name, output_fname, force=True)
pysam.tabix_compress(tmpfile.name, output_fname, force=True)


if __name__ == "__main__":
Expand Down

0 comments on commit cf66742

Please sign in to comment.