From e4518ff3e3d628f2cd47ea48217da6352ef11e30 Mon Sep 17 00:00:00 2001 From: E Rasche Date: Wed, 13 Sep 2017 06:03:55 +0000 Subject: [PATCH 01/12] Rebase hardening commits from dev --- cron/build_chrom_db.py | 6 ++--- cron/parse_builds.py | 5 ++-- cron/parse_builds_3_sites.py | 6 ++--- lib/galaxy/datatypes/binary.py | 13 ++++------ .../converters/interval_to_coverage.py | 5 ++-- .../converters/lped_to_pbed_converter.py | 6 ++--- .../converters/pbed_ldreduced_converter.py | 3 +-- .../converters/pbed_to_lped_converter.py | 4 +--- lib/galaxy/datatypes/sequence.py | 4 ++-- lib/galaxy/datatypes/tabular.py | 13 ++++------ lib/galaxy/datatypes/text.py | 11 +++++---- lib/galaxy/external_services/actions.py | 4 ++-- .../pacific_biosciences_smrt_portal.py | 8 ++++--- lib/galaxy/jobs/runners/pulsar.py | 6 ++--- .../jobs/runners/util/job_script/__init__.py | 5 ++-- lib/galaxy/jobs/runners/util/kill.py | 5 ++-- lib/galaxy/jobs/transfer_manager.py | 12 ++++++---- lib/galaxy/managers/citations.py | 8 +++---- lib/galaxy/objectstore/s3.py | 9 ++++--- lib/galaxy/tools/data/__init__.py | 5 ++-- .../tools/imp_exp/unpack_tar_gz_archive.py | 18 +++++++------- lib/galaxy/tools/toolbox/parser.py | 2 +- lib/galaxy/tours/__init__.py | 2 +- lib/galaxy/util/plugin_config.py | 2 +- .../web/base/interactive_environments.py | 2 +- lib/galaxy/web/proxy/__init__.py | 15 +++++------- .../webapps/galaxy/controllers/async.py | 5 ++-- .../galaxy/controllers/library_common.py | 14 +++++------ lib/galaxy/webapps/galaxy/controllers/root.py | 6 ++--- .../galaxy/controllers/visualization.py | 2 +- .../webapps/galaxy/controllers/workflow.py | 4 ++-- .../webapps/reports/controllers/system.py | 9 ++++--- .../webapps/tool_shed/controllers/upload.py | 13 +++++----- lib/tool_shed/capsule/capsule_manager.py | 23 ++++++++---------- scripts/data_libraries/build_lucene_index.py | 7 +++--- scripts/edam_mapping.py | 5 ++-- scripts/microbes/harvest_bacteria.py | 6 ++--- scripts/tool_shed/api/export.py | 24 +++++-------------- scripts/transfer.py | 1 + test/api/test_workflows.py | 2 +- test/api/test_workflows_from_yaml.py | 6 ++--- test/api/workflows_format_2/converter.py | 4 ++-- test/api/workflows_format_2/main.py | 2 +- test/unit/workflows/workflow_support.py | 2 +- 44 files changed, 144 insertions(+), 170 deletions(-) diff --git a/cron/build_chrom_db.py b/cron/build_chrom_db.py index 4cb7f34c8957..91183c79d92e 100644 --- a/cron/build_chrom_db.py +++ b/cron/build_chrom_db.py @@ -17,8 +17,8 @@ import os import sys +import requests from six.moves.urllib.parse import urlencode -from six.moves.urllib.request import urlopen import parse_builds @@ -36,8 +36,8 @@ def getchrominfo(url, db): "hgta_regionType": "", "position": "", "hgta_doTopSubmit": "get info"}) - page = urlopen(URL) - for line in page: + page = requests.get(URL).text + for line in page.split('\n'): line = line.rstrip( "\r\n" ) if line.startswith("#"): continue diff --git a/cron/parse_builds.py b/cron/parse_builds.py index 06566e55cb06..5d93a0281c70 100644 --- a/cron/parse_builds.py +++ b/cron/parse_builds.py @@ -9,18 +9,17 @@ import sys import xml.etree.ElementTree as ElementTree -from six.moves.urllib.request import urlopen +import requests def getbuilds(url): try: - page = urlopen(url) + text = requests.get(url).text except: print("#Unable to open " + url) print("?\tunspecified (?)") sys.exit(1) - text = page.read() try: tree = ElementTree.fromstring(text) except: diff --git a/cron/parse_builds_3_sites.py b/cron/parse_builds_3_sites.py index 2a9e41533c37..1968af268086 100644 --- a/cron/parse_builds_3_sites.py +++ b/cron/parse_builds_3_sites.py @@ -6,7 +6,7 @@ import xml.etree.ElementTree as ElementTree -from six.moves.urllib.request import urlopen +import requests sites = ['http://genome.ucsc.edu/cgi-bin/', 'http://archaea.ucsc.edu/cgi-bin/', @@ -20,11 +20,11 @@ def main(): trackurl = sites[i] + "hgTracks?" builds = [] try: - page = urlopen(site) + text = requests.get(site).text except: print("#Unable to connect to " + site) continue - text = page.read() + try: tree = ElementTree.fromstring(text) except: diff --git a/lib/galaxy/datatypes/binary.py b/lib/galaxy/datatypes/binary.py index 9445c7875d7d..d09c6d17f94a 100644 --- a/lib/galaxy/datatypes/binary.py +++ b/lib/galaxy/datatypes/binary.py @@ -241,10 +241,8 @@ def merge(split_files, output_file): def _is_coordinate_sorted( self, file_name ): """See if the input BAM file is sorted from the header information.""" - params = [ "samtools", "view", "-H", file_name ] - output = subprocess.Popen( params, stderr=subprocess.PIPE, stdout=subprocess.PIPE ).communicate()[0] - # find returns -1 if string is not found - return output.find( "SO:coordinate" ) != -1 or output.find( "SO:sorted" ) != -1 + output = subprocess.check_output(["samtools", "view", "-H", file_name]) + return 'SO:coordinate' in output or 'SO:sorted' in output def dataset_content_needs_grooming( self, file_name ): """See if file_name is a sorted BAM file""" @@ -269,8 +267,7 @@ def dataset_content_needs_grooming( self, file_name ): return False index_name = tempfile.NamedTemporaryFile( prefix="bam_index" ).name stderr_name = tempfile.NamedTemporaryFile( prefix="bam_index_stderr" ).name - command = 'samtools index %s %s' % ( file_name, index_name ) - proc = subprocess.Popen( args=command, shell=True, stderr=open( stderr_name, 'wb' ) ) + proc = subprocess.Popen(['samtools', 'index', file_name, index_name], stderr=open(stderr_name, 'wb')) proc.wait() stderr = open( stderr_name ).read().strip() if stderr: @@ -313,8 +310,8 @@ def groom_dataset_content( self, file_name ): tmp_sorted_dataset_file_name_prefix = os.path.join( tmp_dir, 'sorted' ) stderr_name = tempfile.NamedTemporaryFile( dir=tmp_dir, prefix="bam_sort_stderr" ).name samtools_created_sorted_file_name = "%s.bam" % tmp_sorted_dataset_file_name_prefix # samtools accepts a prefix, not a filename, it always adds .bam to the prefix - command = "samtools sort %s %s" % ( file_name, tmp_sorted_dataset_file_name_prefix ) - proc = subprocess.Popen( args=command, shell=True, cwd=tmp_dir, stderr=open( stderr_name, 'wb' ) ) + proc = subprocess.Popen(['samtools', 'sort', file_name, tmp_sorted_dataset_file_name_prefix], + cwd=tmp_dir, stderr=open(stderr_name, 'wb')) exit_code = proc.wait() # Did sort succeed? stderr = open( stderr_name ).read().strip() diff --git a/lib/galaxy/datatypes/converters/interval_to_coverage.py b/lib/galaxy/datatypes/converters/interval_to_coverage.py index 47882f7be6a4..631473c7c332 100644 --- a/lib/galaxy/datatypes/converters/interval_to_coverage.py +++ b/lib/galaxy/datatypes/converters/interval_to_coverage.py @@ -132,8 +132,9 @@ def close(self): # Sort through a tempfile first temp_file = tempfile.NamedTemporaryFile(mode="r") environ['LC_ALL'] = 'POSIX' - commandline = "sort -f -n -k %d -k %d -k %d -o %s %s" % (chr_col_1 + 1, start_col_1 + 1, end_col_1 + 1, temp_file.name, in_fname) - subprocess.check_call(commandline, shell=True) + subprocess.check_call([ + 'sort', '-f', '-n', '-k', chr_col_1 + 1, '-k', start_col_1 + 1, '-k', end_col_1 + 1, '-o', temp_file.name, in_fname + ]) coverage = CoverageWriter( out_stream=open(out_fname, "a"), chromCol=chr_col_2, positionCol=position_col_2, diff --git a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py index fc7ef10adbc3..6548aad1b8c6 100644 --- a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py +++ b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py @@ -72,9 +72,9 @@ def rgConv(inpedfilepath, outhtmlname, outfilepath, plink): if not missval: print('### lped_to_pbed_converter.py cannot identify missing value in %s' % pedf) missval = '0' - cl = '%s --noweb --file %s --make-bed --out %s --missing-genotype %s' % (plink, inpedfilepath, outroot, missval) - p = subprocess.Popen(cl, shell=True, cwd=outfilepath) - p.wait() # run plink + subprocess.check_call([plink, '--noweb', '--file', inpedfilepath, + '--make-bed', '--out', outroot, + '--missing-genotype', missval], cwd=outfilepath) def main(): diff --git a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py index 02a6541fe428..9a913b877be6 100644 --- a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py +++ b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py @@ -41,8 +41,7 @@ def pruneLD(plinktasks=[], cd='./', vclbase=[]): for task in plinktasks: # each is a list vcl = vclbase + task with open(plog, 'w') as sto: - x = subprocess.Popen(' '.join(vcl), shell=True, stdout=sto, stderr=sto, cwd=cd) - x.wait() + subprocess.check_call(vcl, stdout=sto, stderr=sto, cwd=cd) try: lplog = open(plog, 'r').readlines() lplog = [elem for elem in lplog if elem.find('Pruning SNP') == -1] diff --git a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py index ed45a204db1f..dc4bec51cb32 100644 --- a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py +++ b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py @@ -40,9 +40,7 @@ def rgConv(inpedfilepath, outhtmlname, outfilepath, plink): """ basename = os.path.split(inpedfilepath)[-1] # get basename outroot = os.path.join(outfilepath, basename) - cl = '%s --noweb --bfile %s --recode --out %s ' % (plink, inpedfilepath, outroot) - p = subprocess.Popen(cl, shell=True, cwd=outfilepath) - p.wait() # run plink + subprocess.check_call([plink, '--noweb', '--bfile', inpedfilepath, '--recode', '--out', outroot], cwd=outfilepath) def main(): diff --git a/lib/galaxy/datatypes/sequence.py b/lib/galaxy/datatypes/sequence.py index 1b4619d731e9..ca856975f1d4 100644 --- a/lib/galaxy/datatypes/sequence.py +++ b/lib/galaxy/datatypes/sequence.py @@ -8,6 +8,7 @@ import os import re import string +import subprocess from cgi import escape from six import PY3 @@ -662,8 +663,7 @@ def process_split_file(data): else: commands = Sequence.get_split_commands_sequential(is_gzip(input_name), input_name, output_name, start_sequence, sequence_count) for cmd in commands: - if 0 != os.system(cmd): - raise Exception("Executing '%s' failed" % cmd) + subprocess.check_call(cmd, shell=True) return True process_split_file = staticmethod(process_split_file) diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py index ed08e8aac767..16990df6c9b2 100644 --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -515,15 +515,12 @@ def merge( split_files, output_file): Multiple SAM files may each have headers. Since the headers should all be the same, remove the headers from files 1-n, keeping them in the first file only """ - cmd = 'mv %s %s' % ( split_files[0], output_file ) - result = os.system(cmd) - if result != 0: - raise Exception('Result %s from %s' % (result, cmd)) + shutil.move(split_files[0], output_file) + if len(split_files) > 1: - cmd = 'egrep -v -h "^@" %s >> %s' % ( ' '.join(split_files[1:]), output_file ) - result = os.system(cmd) - if result != 0: - raise Exception('Result %s from %s' % (result, cmd)) + cmd = ['egrep', '-v', '-h', '^@'] + split_files[1:] + ['>>', output_file] + subprocess.check_call(cmd, shell=True) + merge = staticmethod(merge) # Dataproviders diff --git a/lib/galaxy/datatypes/text.py b/lib/galaxy/datatypes/text.py index a1dc9388d65c..0be154d9d53b 100644 --- a/lib/galaxy/datatypes/text.py +++ b/lib/galaxy/datatypes/text.py @@ -10,6 +10,8 @@ import subprocess import tempfile +from six.moves import shlex_quote + from galaxy.datatypes.data import get_file_peek, Text from galaxy.datatypes.metadata import MetadataElement, MetadataParameter from galaxy.datatypes.sniff import get_headers @@ -144,13 +146,12 @@ def _display_data_trusted(self, trans, dataset, preview=False, filename=None, to ofilename = ofile_handle.name ofile_handle.close() try: - cmd = 'ipython nbconvert --to html --template full %s --output %s' % (dataset.file_name, ofilename) - log.info("Calling command %s" % cmd) - subprocess.call(cmd, shell=True) + cmd = ['ipython', 'nbconvert', '--to', 'html', '--template', 'full', dataset.file_name, '--output', ofilename] + subprocess.check_call(cmd) ofilename = '%s.html' % ofilename - except: + except subprocess.CalledProcessError: ofilename = dataset.file_name - log.exception( 'Command "%s" failed. Could not convert the IPython Notebook to HTML, defaulting to plain text.' % cmd ) + log.exception('Command "%s" failed. Could not convert the IPython Notebook to HTML, defaulting to plain text.', ' '.join(map(shlex_quote, cmd))) return open( ofilename ) def set_meta( self, dataset, **kwd ): diff --git a/lib/galaxy/external_services/actions.py b/lib/galaxy/external_services/actions.py index c62dadf3e34d..239c961ed26b 100644 --- a/lib/galaxy/external_services/actions.py +++ b/lib/galaxy/external_services/actions.py @@ -1,6 +1,6 @@ # Contains actions that are used in External Services import logging -from urllib import urlopen +import requests from galaxy.web import url_for from galaxy.util.template import fill_template from result_handlers.basic import ExternalServiceActionResultHandler @@ -104,7 +104,7 @@ def __init__( self, name, param_dict, url, method, target ): # display_handler @property def content( self ): if self._content is None: - self._content = urlopen( self.url ).read() + self._content = requests.get(self.url).text return self._content diff --git a/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py index 3026194f1b83..099227388192 100644 --- a/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py +++ b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py @@ -2,13 +2,15 @@ Module for managing jobs in Pacific Bioscience's SMRT Portal and automatically transferring files produced by SMRT Portal. """ -import json import logging import urllib2 from string import Template +import requests + from data_transfer import DataTransfer + log = logging.getLogger( __name__ ) __all__ = [ 'SMRTPortalPlugin' ] @@ -87,8 +89,8 @@ def check_job( self, job ): if self._missing_params( job.params, [ 'smrt_host', 'smrt_job_id' ] ): return self.job_states.INVALID url = 'http://' + job.params[ 'smrt_host' ] + self.api_path + '/Jobs/' + job.params[ 'smrt_job_id' ] + '/Status' - r = urllib2.urlopen( url ) - status = json.loads( r.read() ) + r = requests.get(url) + status = r.json() # TODO: error handling: unexpected json or bad response, bad url, etc. if status[ 'Code' ] == 'Completed': log.debug( "SMRT Portal job '%s' is Completed. Initiating transfer." % job.params[ 'smrt_job_id' ] ) diff --git a/lib/galaxy/jobs/runners/pulsar.py b/lib/galaxy/jobs/runners/pulsar.py index 94f58b6bc2c5..33b78dff5c14 100644 --- a/lib/galaxy/jobs/runners/pulsar.py +++ b/lib/galaxy/jobs/runners/pulsar.py @@ -8,6 +8,7 @@ import errno import logging import os +import subprocess from time import sleep from pulsar.client import build_client_manager @@ -210,7 +211,7 @@ def __init_pulsar_app( self, pulsar_conf_path ): else: log.info("Loading Pulsar app configuration from %s" % pulsar_conf_path) with open(pulsar_conf_path, "r") as f: - conf.update(yaml.load(f) or {}) + conf.update(yaml.safe_load(f) or {}) if "job_metrics_config_file" not in conf: conf["job_metrics"] = self.app.job_metrics if "staging_directory" not in conf: @@ -375,8 +376,7 @@ def __prepare_input_files_locally(self, job_wrapper): prepare_input_files_cmds = getattr(job_wrapper, 'prepare_input_files_cmds', None) if prepare_input_files_cmds is not None: for cmd in prepare_input_files_cmds: # run the commands to stage the input files - if 0 != os.system(cmd): - raise Exception('Error running file staging command: %s' % cmd) + subprocess.check_call(cmd, shell=True) job_wrapper.prepare_input_files_cmds = None # prevent them from being used in-line def _populate_parameter_defaults( self, job_destination ): diff --git a/lib/galaxy/jobs/runners/util/job_script/__init__.py b/lib/galaxy/jobs/runners/util/job_script/__init__.py index a08857c24626..46a0c2354a16 100644 --- a/lib/galaxy/jobs/runners/util/job_script/__init__.py +++ b/lib/galaxy/jobs/runners/util/job_script/__init__.py @@ -117,9 +117,8 @@ def _handle_script_integrity(path, config): sleep_amt = getattr(config, "check_job_script_integrity_sleep", DEFAULT_INTEGRITY_SLEEP) for i in range(count): try: - proc = subprocess.Popen([path], shell=True, env={"ABC_TEST_JOB_SCRIPT_INTEGRITY_XYZ": "1"}) - proc.wait() - if proc.returncode == 42: + returncode = subprocess.call([path], env={"ABC_TEST_JOB_SCRIPT_INTEGRITY_XYZ": "1"}) + if returncode == 42: script_integrity_verified = True break diff --git a/lib/galaxy/jobs/runners/util/kill.py b/lib/galaxy/jobs/runners/util/kill.py index 52022466552c..e12343c05362 100644 --- a/lib/galaxy/jobs/runners/util/kill.py +++ b/lib/galaxy/jobs/runners/util/kill.py @@ -1,4 +1,5 @@ import os +import subprocess from platform import system from time import sleep from subprocess import Popen @@ -41,8 +42,8 @@ def _stock_kill_pid(pid): def __kill_windows(pid): try: - Popen("taskkill /F /T /PID %i" % pid, shell=True) - except Exception: + subprocess.check_call(['taskkill', '/F', '/T', '/PID', pid]) + except subprocess.CalledProcessError: pass diff --git a/lib/galaxy/jobs/transfer_manager.py b/lib/galaxy/jobs/transfer_manager.py index ccd5c88e67fb..2d2eb162af86 100644 --- a/lib/galaxy/jobs/transfer_manager.py +++ b/lib/galaxy/jobs/transfer_manager.py @@ -9,6 +9,8 @@ import subprocess import threading +from six.moves import shlex_quote + from galaxy.util import listify, sleeper from galaxy.util.json import jsonrpc_request, validate_jsonrpc_response @@ -22,8 +24,8 @@ class TransferManager( object ): def __init__( self, app ): self.app = app self.sa_session = app.model.context.current - self.command = 'python %s' % os.path.abspath( os.path.join( os.getcwd(), 'scripts', 'transfer.py' ) ) - if app.config.get_bool( 'enable_job_recovery', True ): + self.command = ['python', os.path.abspath(os.path.join(os.getcwd(), 'scripts', 'transfer.py'))] + if app.config.get_bool('enable_job_recovery', True): # Only one Galaxy server process should be able to recover jobs! (otherwise you'll have nasty race conditions) self.running = True self.sleeper = sleeper.Sleeper() @@ -67,9 +69,9 @@ def run( self, transfer_jobs ): # The transfer script should daemonize fairly quickly - if this is # not the case, this process will need to be moved to a # non-blocking method. - cmd = '%s %s' % ( self.command, tj.id ) - log.debug( 'Transfer command is: %s' % cmd ) - p = subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + cmd = self.command + [tj.id] + log.debug('Transfer command is: %s', ' '.join(map(shlex_quote, cmd))) + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) p.wait() output = p.stdout.read( 32768 ) if p.returncode != 0: diff --git a/lib/galaxy/managers/citations.py b/lib/galaxy/managers/citations.py index ef83443d67aa..9e0613648483 100644 --- a/lib/galaxy/managers/citations.py +++ b/lib/galaxy/managers/citations.py @@ -1,6 +1,6 @@ import functools import os -import urllib2 +import requests from beaker.cache import CacheManager from beaker.util import parse_cache_config_options @@ -47,10 +47,8 @@ def __init__( self, config ): def _raw_get_bibtex( self, doi ): dx_url = "http://dx.doi.org/" + doi headers = {'Accept': 'text/bibliography; style=bibtex, application/x-bibtex'} - req = urllib2.Request(dx_url, data="", headers=headers) - response = urllib2.urlopen(req) - bibtex = response.read() - return bibtex + req = requests.get(dx_url, headers=headers) + return req.text def get_bibtex( self, doi ): createfunc = functools.partial(self._raw_get_bibtex, doi) diff --git a/lib/galaxy/objectstore/s3.py b/lib/galaxy/objectstore/s3.py index 9e94c69a608f..b06309786f8b 100644 --- a/lib/galaxy/objectstore/s3.py +++ b/lib/galaxy/objectstore/s3.py @@ -13,7 +13,7 @@ from datetime import datetime from galaxy.exceptions import ObjectNotFound, ObjectInvalid -from galaxy.util import string_as_bool, umask_fix_perms, safe_relpath, directory_hash_id +from galaxy.util import string_as_bool, umask_fix_perms, safe_relpath, directory_hash_id, which from galaxy.util.sleeper import Sleeper from .s3_multipart_upload import multipart_upload from ..objectstore import ObjectStore, convert_bytes @@ -59,10 +59,9 @@ def __init__(self, config, config_xml): self.cache_monitor_thread.start() log.info("Cache cleaner manager started") # Test if 'axel' is available for parallel download and pull the key into cache - try: - subprocess.call('axel') + if which('axel'): self.use_axel = True - except OSError: + else: self.use_axel = False def _configure_connection(self): @@ -333,7 +332,7 @@ def _download(self, rel_path): log.debug("Parallel pulled key '%s' into cache to %s", rel_path, self._get_cache_path(rel_path)) ncores = multiprocessing.cpu_count() url = key.generate_url(7200) - ret_code = subprocess.call("axel -a -n %s '%s'" % (ncores, url)) + ret_code = subprocess.call(['axel', '-a', '-n', ncores, url]) if ret_code == 0: return True else: diff --git a/lib/galaxy/tools/data/__init__.py b/lib/galaxy/tools/data/__init__.py index cd287a84a328..441a6ea75ca6 100644 --- a/lib/galaxy/tools/data/__init__.py +++ b/lib/galaxy/tools/data/__init__.py @@ -15,7 +15,8 @@ from glob import glob from tempfile import NamedTemporaryFile -from urllib2 import urlopen + +import requests from galaxy import util from galaxy.util.odict import odict @@ -290,7 +291,7 @@ def configure_and_load( self, config_element, tool_data_path, from_shed_config=F if filename: tmp_file = NamedTemporaryFile( prefix='TTDT_URL_%s-' % self.name ) try: - tmp_file.write( urlopen( filename, timeout=url_timeout ).read() ) + tmp_file.write(requests.get(filename, timeout=url_timeout).text) except Exception as e: log.error( 'Error loading Data Table URL "%s": %s', filename, e ) continue diff --git a/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py b/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py index 932779c0e438..f1318644ed88 100644 --- a/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py +++ b/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py @@ -15,6 +15,8 @@ import math from base64 import b64decode +import requests + # Set max size of archive/file that will be handled to be 100 GB. This is # arbitrary and should be adjusted as needed. MAX_SIZE = 100 * math.pow( 2, 30 ) @@ -25,18 +27,16 @@ def url_to_file( url, dest_file ): Transfer a file from a remote URL to a temporary file. """ try: - url_reader = urllib2.urlopen( url ) + url_reader = requests.get(url, stream=True) CHUNK = 10 * 1024 # 10k total = 0 fp = open( dest_file, 'wb') - while True: - chunk = url_reader.read( CHUNK ) - if not chunk: - break - fp.write( chunk ) - total += CHUNK - if total > MAX_SIZE: - break + for chunk in url_reader.iter_content(chunk_size=CHUNK): + if chunk: + fp.write(chunk) + total += CHUNK + if total > MAX_SIZE: + break fp.close() return dest_file except Exception as e: diff --git a/lib/galaxy/tools/toolbox/parser.py b/lib/galaxy/tools/toolbox/parser.py index eafe4958fcd6..4d15f02f409f 100644 --- a/lib/galaxy/tools/toolbox/parser.py +++ b/lib/galaxy/tools/toolbox/parser.py @@ -59,7 +59,7 @@ class YamlToolConfSource(ToolConfSource): def __init__(self, config_filename): with open(config_filename, "r") as f: - as_dict = yaml.load(f) + as_dict = yaml.safe_load(f) self.as_dict = as_dict def parse_tool_path(self): diff --git a/lib/galaxy/tours/__init__.py b/lib/galaxy/tours/__init__.py index c497f2de284d..c2f517fa5195 100644 --- a/lib/galaxy/tours/__init__.py +++ b/lib/galaxy/tours/__init__.py @@ -68,7 +68,7 @@ def _load_tour_from_path(self, tour_path): tour_id = os.path.splitext(filename)[0] try: with open(tour_path) as handle: - conf = yaml.load(handle) + conf = yaml.safe_load(handle) tour = tour_loader(conf) self.tours[tour_id] = tour_loader(conf) log.info("Loaded tour '%s'" % tour_id) diff --git a/lib/galaxy/util/plugin_config.py b/lib/galaxy/util/plugin_config.py index c03633692cc9..8095d279fe9c 100644 --- a/lib/galaxy/util/plugin_config.py +++ b/lib/galaxy/util/plugin_config.py @@ -76,4 +76,4 @@ def __read_yaml(path): raise ImportError("Attempting to read YAML configuration file - but PyYAML dependency unavailable.") with open(path, "rb") as f: - return yaml.load(f) + return yaml.safe_load(f) diff --git a/lib/galaxy/web/base/interactive_environments.py b/lib/galaxy/web/base/interactive_environments.py index f9fdcf6fba3b..d6b09e45daea 100644 --- a/lib/galaxy/web/base/interactive_environments.py +++ b/lib/galaxy/web/base/interactive_environments.py @@ -93,7 +93,7 @@ def load_allowed_images(self): raise Exception("[{0}] Could not find allowed_images.yml, or image tag in {0}.ini file for ".format(self.attr.viz_id)) with open(fn, 'r') as handle: - self.allowed_images = [x['image'] for x in yaml.load(handle)] + self.allowed_images = [x['image'] for x in yaml.safe_load(handle)] if len(self.allowed_images) == 0: raise Exception("No allowed images specified for " + self.attr.viz_id) diff --git a/lib/galaxy/web/proxy/__init__.py b/lib/galaxy/web/proxy/__init__.py index d46f68c6968b..85711dd75d2d 100644 --- a/lib/galaxy/web/proxy/__init__.py +++ b/lib/galaxy/web/proxy/__init__.py @@ -7,9 +7,10 @@ from galaxy.util.lazy_process import LazyProcess, NoOpLazyProcess from galaxy.util import sqlite from galaxy.util import unique_id -import urllib2 import time +import requests + log = logging.getLogger( __name__ ) @@ -243,20 +244,16 @@ def handle_requests(self, authentication, proxy_requests, route_name, container_ 'ContainerIds': container_ids, } - req = urllib2.Request(self.api_url) - req.add_header('Content-Type', 'application/json') - # Sometimes it takes our poor little proxy a second or two to get # going, so if this fails, re-call ourselves with an increased timeout. try: - urllib2.urlopen(req, json.dumps(values)) - except urllib2.URLError as err: - log.debug(err) + requests.get(self.api_url, headers={'Content-Type': 'application/json'}, data=json.dumps(values)) + except requests.exceptions.ConnectionError as err: + log.exception(err) if sleep > 5: excp = "Could not contact proxy after %s seconds" % sum(range(sleep + 1)) raise Exception(excp) time.sleep(sleep) - self.handle_requests(authentication, proxy_requests, route_name, container_ids, sleep=sleep + 1) - pass + self.handle_requests(authentication, proxy_requests, route_name, container_ids, container_interface, sleep=sleep + 1) # TODO: MQ diven proxy? diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index c0dca96de903..708e1f3dfeb0 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -5,6 +5,8 @@ import logging import urllib +import requests + from galaxy import jobs, web from galaxy.util import Params from galaxy.util.hash_util import hmac_new @@ -131,8 +133,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): url = "%s%s%s" % ( url, url_join_char, urllib.urlencode( params.flatten() ) ) log.debug("connecting to -> %s" % url) trans.log_event( "Async connecting to -> %s" % url ) - text = urllib.urlopen(url).read(-1) - text = text.strip() + text = requests.get(url).text.strip() if not text.endswith('OK'): raise Exception( text ) data.state = data.blurb = data.states.RUNNING diff --git a/lib/galaxy/webapps/galaxy/controllers/library_common.py b/lib/galaxy/webapps/galaxy/controllers/library_common.py index 0a79ee4f60ce..02ee7428aec9 100644 --- a/lib/galaxy/webapps/galaxy/controllers/library_common.py +++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py @@ -1,4 +1,5 @@ import glob +import json import logging import operator import os @@ -8,10 +9,9 @@ import tarfile import tempfile import urllib -import urllib2 import zipfile -from json import dumps, loads +import requests from markupsafe import escape from sqlalchemy import and_, false from sqlalchemy.orm import eagerload_all @@ -554,7 +554,7 @@ def __ok_to_edit_metadata( ldda_id ): if len(em_string): payload = None try: - payload = loads(em_string) + payload = json.loads(em_string) except Exception: message = 'Invalid JSON input' status = 'error' @@ -1116,8 +1116,8 @@ def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_datase json_file_path = upload_common.create_paramfile( trans, uploaded_datasets ) data_list = [ ud.data for ud in uploaded_datasets ] job_params = {} - job_params['link_data_only'] = dumps( kwd.get( 'link_data_only', 'copy_files' ) ) - job_params['uuid'] = dumps( kwd.get( 'uuid', None ) ) + job_params['link_data_only'] = json.dumps(kwd.get('link_data_only', 'copy_files')) + job_params['uuid'] = json.dumps(kwd.get('uuid', None)) job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params ) trans.sa_session.add( job ) trans.sa_session.flush() @@ -2750,9 +2750,7 @@ def lucene_search( trans, cntrller, search_term, search_url, **kwd ): message = escape( kwd.get( 'message', '' ) ) status = kwd.get( 'status', 'done' ) full_url = "%s/find?%s" % ( search_url, urllib.urlencode( { "kwd" : search_term } ) ) - response = urllib2.urlopen( full_url ) - ldda_ids = loads( response.read() )[ "ids" ] - response.close() + ldda_ids = requests.get(full_url).json()['ids'] lddas = [ trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) for ldda_id in ldda_ids ] return status, message, get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' ) diff --git a/lib/galaxy/webapps/galaxy/controllers/root.py b/lib/galaxy/webapps/galaxy/controllers/root.py index 4f6829d29277..e280a4c067b8 100644 --- a/lib/galaxy/webapps/galaxy/controllers/root.py +++ b/lib/galaxy/webapps/galaxy/controllers/root.py @@ -3,8 +3,8 @@ """ import cgi import os -import urllib +import requests from paste.httpexceptions import HTTPNotFound, HTTPBadGateway from galaxy import web @@ -482,8 +482,8 @@ def welcome( self, trans ): def bucket_proxy( self, trans, bucket=None, **kwd): if bucket: trans.response.set_content_type( 'text/xml' ) - b_list_xml = urllib.urlopen('http://s3.amazonaws.com/%s/' % bucket) - return b_list_xml.read() + b_list_xml = requests.get('http://s3.amazonaws.com/%s/' % bucket) + return b_list_xml.text raise Exception("You must specify a bucket") # ---- Debug methods ---------------------------------------------------- diff --git a/lib/galaxy/webapps/galaxy/controllers/visualization.py b/lib/galaxy/webapps/galaxy/controllers/visualization.py index 3eda0900eed5..10f9880050da 100644 --- a/lib/galaxy/webapps/galaxy/controllers/visualization.py +++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py @@ -1041,7 +1041,7 @@ def gie_list( self, trans, **kwargs ): continue with open( image_file, 'r' ) as handle: - self.gie_image_map[gie] = yaml.load( handle ) + self.gie_image_map[gie] = yaml.safe_load(handle) return trans.fill_template_mako( "visualization/gie.mako", diff --git a/lib/galaxy/webapps/galaxy/controllers/workflow.py b/lib/galaxy/webapps/galaxy/controllers/workflow.py index 63f5399297f9..71f855270d9d 100644 --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -4,7 +4,7 @@ import logging import os import sgmllib -import urllib2 +import requests from sqlalchemy import and_ from sqlalchemy.sql import expression @@ -871,7 +871,7 @@ def import_workflow( self, trans, cntrller='workflow', **kwd ): # Load workflow from external URL # NOTE: blocks the web thread. try: - workflow_data = urllib2.urlopen( url ).read() + workflow_data = requests.get(url).text except Exception as e: message = "Failed to open URL: %s
Exception: %s" % ( escape( url ), escape( str( e ) ) ) status = 'error' diff --git a/lib/galaxy/webapps/reports/controllers/system.py b/lib/galaxy/webapps/reports/controllers/system.py index 419701a9a72e..72c614b92379 100644 --- a/lib/galaxy/webapps/reports/controllers/system.py +++ b/lib/galaxy/webapps/reports/controllers/system.py @@ -1,5 +1,6 @@ import logging import os +import subprocess from datetime import datetime, timedelta from decimal import Decimal @@ -148,12 +149,11 @@ def dataset_info( self, trans, **kwd ): message=message ) def get_disk_usage( self, file_path ): - df_cmd = 'df -h ' + file_path is_sym_link = os.path.islink( file_path ) file_system = disk_size = disk_used = disk_avail = disk_cap_pct = mount = None - df_file = os.popen( df_cmd ) - while True: - df_line = df_file.readline() + df_output = subprocess.check_output(['df', '-h', file_path]) + + for df_line in df_output: df_line = df_line.strip() if df_line: df_line = df_line.lower() @@ -176,7 +176,6 @@ def get_disk_usage( self, file_path ): pass else: break # EOF - df_file.close() return ( file_system, disk_size, disk_used, disk_avail, disk_cap_pct, mount ) @web.expose diff --git a/lib/galaxy/webapps/tool_shed/controllers/upload.py b/lib/galaxy/webapps/tool_shed/controllers/upload.py index 1d005c7f7f29..84aa5bafb56c 100644 --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -3,7 +3,8 @@ import shutil import tarfile import tempfile -import urllib + +import requests from galaxy import util from galaxy import web @@ -74,7 +75,7 @@ def upload( self, trans, **kwd ): elif url: valid_url = True try: - stream = urllib.urlopen( url ) + stream = requests.get(url, stream=True) except Exception as e: valid_url = False message = 'Error uploading file via http: %s' % str( e ) @@ -83,11 +84,9 @@ def upload( self, trans, **kwd ): if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open( uploaded_file_name, 'wb' ) - while 1: - chunk = stream.read( util.CHUNK_SIZE ) - if not chunk: - break - uploaded_file.write( chunk ) + for chunk in stream.iter_content(chunk_size=util.CHUNK_SIZE): + if chunk: + uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split( '/' )[ -1 ] isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0 diff --git a/lib/tool_shed/capsule/capsule_manager.py b/lib/tool_shed/capsule/capsule_manager.py index 02b4176281b9..1e92efa84f03 100644 --- a/lib/tool_shed/capsule/capsule_manager.py +++ b/lib/tool_shed/capsule/capsule_manager.py @@ -9,6 +9,7 @@ from time import gmtime from time import strftime +import requests from sqlalchemy import and_, false import tool_shed.repository_types.util as rt_util @@ -820,24 +821,20 @@ def upload_capsule( self, **kwd ): uploaded_file=None, capsule_file_name=None ) if url: - valid_url = True try: - stream = urllib.urlopen( url ) + stream = requests.get(url, stream=True) except Exception as e: - valid_url = False return_dict['error_message'] = 'Error importing file via http: %s' % str( e ) return_dict['status'] = 'error' return return_dict - if valid_url: - fd, uploaded_file_name = tempfile.mkstemp() - uploaded_file = open( uploaded_file_name, 'wb' ) - while 1: - chunk = stream.read( CHUNK_SIZE ) - if not chunk: - break - uploaded_file.write( chunk ) - uploaded_file.flush() - uploaded_file_filename = url.split( '/' )[ -1 ] + + fd, uploaded_file_name = tempfile.mkstemp() + uploaded_file = open( uploaded_file_name, 'wb' ) + for chunk in stream.iter_content(chunk_size=CHUNK_SIZE): + if chunk: + uploaded_file.write(chunk) + uploaded_file.flush() + uploaded_file_filename = url.split( '/' )[ -1 ] elif file_data not in ( '', None ): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name diff --git a/scripts/data_libraries/build_lucene_index.py b/scripts/data_libraries/build_lucene_index.py index 1029c0d356ec..4bf02ce9fc8c 100644 --- a/scripts/data_libraries/build_lucene_index.py +++ b/scripts/data_libraries/build_lucene_index.py @@ -14,7 +14,8 @@ import os import sys import urllib -import urllib2 + +import requests sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib'))) @@ -39,9 +40,7 @@ def main( ini_file ): def build_index( search_url, dataset_file ): url = "%s/index?%s" % ( search_url, urllib.urlencode( { "docfile": dataset_file } ) ) - request = urllib2.Request( url ) - request.get_method = lambda: "PUT" - urllib2.urlopen( request ) + requests.put(url) def create_dataset_file( dataset_iter ): diff --git a/scripts/edam_mapping.py b/scripts/edam_mapping.py index f82685a91fd6..f483db8c1f5e 100644 --- a/scripts/edam_mapping.py +++ b/scripts/edam_mapping.py @@ -16,9 +16,10 @@ import os import sys -import urllib2 from xml import etree +import requests + sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib'))) import galaxy.model @@ -35,7 +36,7 @@ if not os.path.exists("/tmp/edam.owl"): - open("/tmp/edam.owl", "w").write( urllib2.urlopen( EDAM_OWL_URL ).read() ) + open("/tmp/edam.owl", "w").write(requests.get(EDAM_OWL_URL).text) owl_xml_tree = etree.ElementTree.parse("/tmp/edam.owl") diff --git a/scripts/microbes/harvest_bacteria.py b/scripts/microbes/harvest_bacteria.py index 3b4279069691..a09e531bb0a3 100644 --- a/scripts/microbes/harvest_bacteria.py +++ b/scripts/microbes/harvest_bacteria.py @@ -8,9 +8,9 @@ import sys import time from ftplib import FTP -from urllib2 import urlopen from urllib import urlretrieve +import requests from BeautifulSoup import BeautifulSoup from util import get_bed_from_genbank, get_bed_from_glimmer3, get_bed_from_GeneMarkHMM, get_bed_from_GeneMark @@ -26,7 +26,7 @@ # number, name, chroms, kingdom, group, genbank, refseq, info_url, ftp_url def iter_genome_projects( url="http://www.ncbi.nlm.nih.gov/genomes/lproks.cgi?view=1", info_url_base="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=genomeprj&cmd=Retrieve&dopt=Overview&list_uids=" ): - for row in BeautifulSoup( urlopen( url ) ).findAll( name='tr', bgcolor=["#EEFFDD", "#E8E8DD"] ): + for row in BeautifulSoup(requests.get(url).text).findAll(name='tr', bgcolor=["#EEFFDD", "#E8E8DD"]): row = str( row ).replace( "\n", "" ).replace( "\r", "" ) fields = row.split( "" ) @@ -65,7 +65,7 @@ def get_chroms_by_project_id( org_num, base_url="http://www.ncbi.nlm.nih.gov/ent html_count += 1 url = "%s%s" % ( base_url, org_num ) try: - html = urlopen( url ) + html = requests.get(url).text except: print "GENOME PROJECT FAILED:", html_count, "org:", org_num, url html = None diff --git a/scripts/tool_shed/api/export.py b/scripts/tool_shed/api/export.py index 5773354558da..aa93c06969a1 100644 --- a/scripts/tool_shed/api/export.py +++ b/scripts/tool_shed/api/export.py @@ -11,7 +11,8 @@ import os import sys import tempfile -import urllib2 + +import requests sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, os.pardir, 'lib' ) ) from tool_shed.util import basic_util @@ -98,24 +99,11 @@ def main( options ): download_url = export_dict[ 'download_url' ] download_dir = os.path.abspath( options.download_dir ) file_path = os.path.join( download_dir, repositories_archive_filename ) - src = None - dst = None - try: - src = urllib2.urlopen( download_url ) - dst = open( file_path, 'wb' ) - while True: - chunk = src.read( CHUNK_SIZE ) + src = requests.get(download_url, stream=True) + with open(file_path, 'wb') as dst: + for chunk in src.iter_content(chunk_size=CHUNK_SIZE): if chunk: - dst.write( chunk ) - else: - break - except: - raise - finally: - if src: - src.close() - if dst: - dst.close() + dst.write(chunk) print "Successfully exported revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner print "to location ", file_path else: diff --git a/scripts/transfer.py b/scripts/transfer.py index 84f0c446fede..ae08e11ff808 100644 --- a/scripts/transfer.py +++ b/scripts/transfer.py @@ -215,6 +215,7 @@ def transfer( app, transfer_job_id ): def http_transfer( transfer_job ): """Plugin" for handling http(s) transfers.""" url = transfer_job.params['url'] + assert url.startswith('http://') or url.startswith('https://') try: f = urllib2.urlopen( url ) except urllib2.URLError as e: diff --git a/test/api/test_workflows.py b/test/api/test_workflows.py index 6ae340f96e1b..1f331e7506be 100644 --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -180,7 +180,7 @@ def read_test_data(test_dict): ) if jobs_descriptions is None: assert source_type != "path" - jobs_descriptions = yaml.load( has_workflow ) + jobs_descriptions = yaml.safe_load(has_workflow) test_data = jobs_descriptions.get("test_data", {}) diff --git a/test/api/test_workflows_from_yaml.py b/test/api/test_workflows_from_yaml.py index cc269146a54f..a2c066b754a2 100644 --- a/test/api/test_workflows_from_yaml.py +++ b/test/api/test_workflows_from_yaml.py @@ -55,11 +55,11 @@ def test_simple_upload(self): assert tool_count['random_lines1'] == 1 assert tool_count['cat1'] == 2 -# FIXME: This test fails on some machines due to (we're guessing) yaml loading +# FIXME: This test fails on some machines due to (we're guessing) yaml.safe_loading # order being not guaranteed and inconsistent across platforms. The workflow -# yaml loader probably needs to enforce order using something like the +# yaml.safe_loader probably needs to enforce order using something like the # approach described here: -# https://stackoverflow.com/questions/13297744/pyyaml-control-ordering-of-items-called-by-yaml-load +# https://stackoverflow.com/questions/13297744/pyyaml-control-ordering-of-items-called-by-yaml.safe_load # def test_multiple_input( self ): # history_id = self.dataset_populator.new_history() # self._run_jobs(""" diff --git a/test/api/workflows_format_2/converter.py b/test/api/workflows_format_2/converter.py index 2795982914d4..e8d3fd40747a 100644 --- a/test/api/workflows_format_2/converter.py +++ b/test/api/workflows_format_2/converter.py @@ -32,7 +32,7 @@ def yaml_to_workflow(has_yaml, galaxy_interface, workflow_directory): """Convert a Format 2 workflow into standard Galaxy format from supplied stream.""" - as_python = yaml.load(has_yaml) + as_python = yaml.safe_load(has_yaml) return python_to_workflow(as_python, galaxy_interface, workflow_directory) @@ -109,7 +109,7 @@ def _python_to_workflow(as_python, conversion_context): run_action_path = run_action["@import"] runnable_path = os.path.join(conversion_context.workflow_directory, run_action_path) with open(runnable_path, "r") as f: - runnable_description = yaml.load(f) + runnable_description = yaml.safe_load(f) run_action = runnable_description run_class = run_action["class"] diff --git a/test/api/workflows_format_2/main.py b/test/api/workflows_format_2/main.py index e147c52ec230..c481cc9eab7c 100644 --- a/test/api/workflows_format_2/main.py +++ b/test/api/workflows_format_2/main.py @@ -20,7 +20,7 @@ def convert_and_import_workflow(has_workflow, **kwds): if workflow_directory is None: workflow_directory = os.path.dirname(has_workflow) with open(workflow_path, "r") as f: - has_workflow = yaml.load(f) + has_workflow = yaml.safe_load(f) if workflow_directory is not None: workflow_directory = os.path.abspath(workflow_directory) diff --git a/test/unit/workflows/workflow_support.py b/test/unit/workflows/workflow_support.py index 77258905cd59..23a89a656e57 100644 --- a/test/unit/workflows/workflow_support.py +++ b/test/unit/workflows/workflow_support.py @@ -73,7 +73,7 @@ def get_tool_id( self, tool_id ): def yaml_to_model(has_dict, id_offset=100): if isinstance(has_dict, str): - has_dict = yaml.load(has_dict) + has_dict = yaml.safe_load(has_dict) workflow = model.Workflow() workflow.steps = [] From b5ded00eb50181f17b3c003e1b7c895098b85d25 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 26 Sep 2017 07:57:37 -0400 Subject: [PATCH 02/12] Fix invalid requests imports introduced in #4604. Even when we remove the requests controller the pyc files will stick around - so probably best to keep these absolute imports for these controllers indefinitely. This broke workflow import from URL coming through the GUI and so it broke a bunch of Selenium tests. --- lib/galaxy/webapps/galaxy/controllers/async.py | 2 ++ lib/galaxy/webapps/galaxy/controllers/library_common.py | 2 ++ lib/galaxy/webapps/galaxy/controllers/root.py | 2 ++ lib/galaxy/webapps/galaxy/controllers/workflow.py | 2 ++ 4 files changed, 8 insertions(+) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index 708e1f3dfeb0..6eee8972a1b0 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -2,6 +2,8 @@ Upload class """ +from __future__ import absolute_import + import logging import urllib diff --git a/lib/galaxy/webapps/galaxy/controllers/library_common.py b/lib/galaxy/webapps/galaxy/controllers/library_common.py index 02ee7428aec9..6ecc6acba5df 100644 --- a/lib/galaxy/webapps/galaxy/controllers/library_common.py +++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import + import glob import json import logging diff --git a/lib/galaxy/webapps/galaxy/controllers/root.py b/lib/galaxy/webapps/galaxy/controllers/root.py index e280a4c067b8..aad9c37b9900 100644 --- a/lib/galaxy/webapps/galaxy/controllers/root.py +++ b/lib/galaxy/webapps/galaxy/controllers/root.py @@ -1,6 +1,8 @@ """ Contains the main interface in the Universe class """ +from __future__ import absolute_import + import cgi import os diff --git a/lib/galaxy/webapps/galaxy/controllers/workflow.py b/lib/galaxy/webapps/galaxy/controllers/workflow.py index 71f855270d9d..7ce86e1548f0 100644 --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import + import base64 import httplib import json From 545d5556588dfe28c8a9857333366ba62dbcd2fd Mon Sep 17 00:00:00 2001 From: Nate Coraor Date: Fri, 13 Oct 2017 12:10:19 -0400 Subject: [PATCH 03/12] Fix additional uses of subprocess shell=True --- lib/galaxy/datatypes/converters/sam_to_bam.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/datatypes/converters/sam_to_bam.py b/lib/galaxy/datatypes/converters/sam_to_bam.py index ca3fecf59492..1ae40251ff34 100644 --- a/lib/galaxy/datatypes/converters/sam_to_bam.py +++ b/lib/galaxy/datatypes/converters/sam_to_bam.py @@ -33,8 +33,10 @@ def __main__(): # convert to SAM unsorted_bam_filename = os.path.join( tmp_dir, 'unsorted.bam' ) unsorted_stderr_filename = os.path.join( tmp_dir, 'unsorted.stderr' ) - cmd = 'samtools view -bS "%s" > "%s"' % ( input_filename, unsorted_bam_filename ) - proc = subprocess.Popen( args=cmd, stderr=open( unsorted_stderr_filename, 'wb' ), shell=True, cwd=tmp_dir ) + proc = subprocess.Popen(['samtools', 'view', '-bS', input_filename], + stdout=open(unsorted_bam_filename, 'wb'), + stderr=open(unsorted_stderr_filename, 'wb'), + cwd=tmp_dir) return_code = proc.wait() if return_code: stderr_target = sys.stderr @@ -52,8 +54,10 @@ def __main__(): # sort sam, so indexing will not fail sorted_stderr_filename = os.path.join( tmp_dir, 'sorted.stderr' ) sorting_prefix = os.path.join( tmp_dir, 'sorted_bam' ) - cmd = 'samtools sort -o "%s" "%s" > "%s"' % ( unsorted_bam_filename, sorting_prefix, output_filename ) - proc = subprocess.Popen( args=cmd, stderr=open( sorted_stderr_filename, 'wb' ), shell=True, cwd=tmp_dir ) + proc = subprocess.Popen(['samtools', 'sort', '-o', unsorted_bam_filename, sorting_prefix], + stdout=open(output_filename, 'wb'), + stderr=open(sorted_stderr_filename, 'wb'), + cwd=tmp_dir) return_code = proc.wait() if return_code: From 2f79351e3cdb1fe9705f00590038b11cba4b428e Mon Sep 17 00:00:00 2001 From: E Rasche Date: Thu, 28 Sep 2017 08:07:55 +0000 Subject: [PATCH 04/12] Rebase script integrity check improvements (#4720) from dev --- .../jobs/runners/util/job_script/__init__.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/jobs/runners/util/job_script/__init__.py b/lib/galaxy/jobs/runners/util/job_script/__init__.py index 46a0c2354a16..a41439a5af13 100644 --- a/lib/galaxy/jobs/runners/util/job_script/__init__.py +++ b/lib/galaxy/jobs/runners/util/job_script/__init__.py @@ -1,3 +1,4 @@ +import logging import os from string import Template import subprocess @@ -7,6 +8,7 @@ from six import text_type from galaxy.util import unicodify +log = logging.getLogger(__name__) DEFAULT_SHELL = '/bin/bash' DEFAULT_JOB_FILE_TEMPLATE = Template( @@ -122,6 +124,8 @@ def _handle_script_integrity(path, config): script_integrity_verified = True break + log.debug("Script integrity error: returncode was %d", returncode) + # Else we will sync and wait to see if the script becomes # executable. try: @@ -129,11 +133,13 @@ def _handle_script_integrity(path, config): # These have occurred both in Docker containers and on EC2 clusters # under high load. subprocess.check_call(INTEGRITY_SYNC_COMMAND) - except Exception: - pass - time.sleep(sleep_amt) - except Exception: - pass + except Exception as e: + log.debug("Error syncing the filesystem: %s", unicodify(e)) + + except Exception as exc: + log.debug("Script not available yet: %s", unicodify(exc)) + + time.sleep(sleep_amt) if not script_integrity_verified: raise Exception("Failed to write job script, could not verify job script integrity.") From 9fa9d42c5093ddd8f70193cc6a07ae0af0394f8c Mon Sep 17 00:00:00 2001 From: Nate Coraor Date: Fri, 13 Oct 2017 12:19:25 -0400 Subject: [PATCH 05/12] Fix backport issues --- lib/galaxy/datatypes/tabular.py | 1 + lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py | 1 - lib/galaxy/jobs/runners/util/kill.py | 1 - lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py | 1 - lib/galaxy/web/proxy/__init__.py | 2 +- lib/tool_shed/capsule/capsule_manager.py | 1 - 6 files changed, 2 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py index 16990df6c9b2..821f3d27be7a 100644 --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -8,6 +8,7 @@ import logging import os import re +import shutil import subprocess import tempfile from cgi import escape diff --git a/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py index 099227388192..bd458c061504 100644 --- a/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py +++ b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py @@ -3,7 +3,6 @@ produced by SMRT Portal. """ import logging -import urllib2 from string import Template import requests diff --git a/lib/galaxy/jobs/runners/util/kill.py b/lib/galaxy/jobs/runners/util/kill.py index e12343c05362..4abce160eb2e 100644 --- a/lib/galaxy/jobs/runners/util/kill.py +++ b/lib/galaxy/jobs/runners/util/kill.py @@ -2,7 +2,6 @@ import subprocess from platform import system from time import sleep -from subprocess import Popen try: from psutil import Process, NoSuchProcess diff --git a/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py b/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py index f1318644ed88..46e12d837566 100644 --- a/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py +++ b/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py @@ -11,7 +11,6 @@ import optparse import tarfile import tempfile -import urllib2 import math from base64 import b64decode diff --git a/lib/galaxy/web/proxy/__init__.py b/lib/galaxy/web/proxy/__init__.py index 85711dd75d2d..bd8f3a062f4e 100644 --- a/lib/galaxy/web/proxy/__init__.py +++ b/lib/galaxy/web/proxy/__init__.py @@ -254,6 +254,6 @@ def handle_requests(self, authentication, proxy_requests, route_name, container_ excp = "Could not contact proxy after %s seconds" % sum(range(sleep + 1)) raise Exception(excp) time.sleep(sleep) - self.handle_requests(authentication, proxy_requests, route_name, container_ids, container_interface, sleep=sleep + 1) + self.handle_requests(authentication, proxy_requests, route_name, container_ids, sleep=sleep + 1) # TODO: MQ diven proxy? diff --git a/lib/tool_shed/capsule/capsule_manager.py b/lib/tool_shed/capsule/capsule_manager.py index 1e92efa84f03..c150005f4da0 100644 --- a/lib/tool_shed/capsule/capsule_manager.py +++ b/lib/tool_shed/capsule/capsule_manager.py @@ -5,7 +5,6 @@ import tarfile import tempfile import threading -import urllib from time import gmtime from time import strftime From ef268027191b526b8c8dfe69c3d8352c0412ce1d Mon Sep 17 00:00:00 2001 From: E Rasche Date: Wed, 13 Sep 2017 06:03:55 +0000 Subject: [PATCH 06/12] Hardening backport differences from dev present in 16.10 but not 16.07 --- .../webhooks/demo/phdcomics/helper/__init__.py | 6 +++--- lib/galaxy/tools/deps/conda_compat.py | 2 +- .../tools/deps/container_resolvers/mulled.py | 8 ++++---- lib/galaxy/tools/deps/mulled/mulled_build.py | 17 ++++++----------- .../tools/deps/mulled/mulled_build_channel.py | 13 +++++++++---- lib/galaxy/webhooks/__init__.py | 2 +- 6 files changed, 24 insertions(+), 24 deletions(-) diff --git a/config/plugins/webhooks/demo/phdcomics/helper/__init__.py b/config/plugins/webhooks/demo/phdcomics/helper/__init__.py index 94073b9c6eec..2887f463f88d 100644 --- a/config/plugins/webhooks/demo/phdcomics/helper/__init__.py +++ b/config/plugins/webhooks/demo/phdcomics/helper/__init__.py @@ -1,7 +1,7 @@ -import urllib -import re -import random import logging +import random +import re +import urllib log = logging.getLogger(__name__) diff --git a/lib/galaxy/tools/deps/conda_compat.py b/lib/galaxy/tools/deps/conda_compat.py index c45573ef49d6..10527bf890dd 100644 --- a/lib/galaxy/tools/deps/conda_compat.py +++ b/lib/galaxy/tools/deps/conda_compat.py @@ -61,7 +61,7 @@ def _render_jinja2(recipe_dir): @_Memoized def yamlize(data): - res = yaml.load(data) + res = yaml.safe_load(data) # ensure the result is a dict if res is None: res = {} diff --git a/lib/galaxy/tools/deps/container_resolvers/mulled.py b/lib/galaxy/tools/deps/container_resolvers/mulled.py index dcf15a020d8f..37ced02ab431 100644 --- a/lib/galaxy/tools/deps/container_resolvers/mulled.py +++ b/lib/galaxy/tools/deps/container_resolvers/mulled.py @@ -2,13 +2,13 @@ import collections import logging +import subprocess from ..container_resolvers import ( ContainerResolver, ) from ..docker_util import build_docker_images_command from ..mulled.mulled_build import ( - check_output, DEFAULT_CHANNELS, ensure_installed, InvolucroContext, @@ -33,9 +33,9 @@ def list_cached_mulled_images(namespace=None): - command = build_docker_images_command(truncate=True, sudo_docker=False) - command = "%s | tail -n +2 | tr -s ' ' | cut -d' ' -f1,2" % command - images_and_versions = check_output(command) + command = build_docker_images_command(truncate=True, sudo=False) + images_and_versions = subprocess.check_output(command).strip().split('\n') + images_and_versions = [line.split()[0:2] for line in images_and_versions[1:]] name_filter = get_filter(namespace) def output_line_to_image(line): diff --git a/lib/galaxy/tools/deps/mulled/mulled_build.py b/lib/galaxy/tools/deps/mulled/mulled_build.py index 3a4e16995056..342a363a08f2 100644 --- a/lib/galaxy/tools/deps/mulled/mulled_build.py +++ b/lib/galaxy/tools/deps/mulled/mulled_build.py @@ -87,17 +87,12 @@ def get_affected_packages(args): """ recipes_dir = args.recipes_dir hours = args.diff_hours - cmd = """cd '%s' && git log --diff-filter=ACMRTUXB --name-only --pretty="" --since="%s hours ago" | grep -E '^recipes/.*/meta.yaml' | sort | uniq""" % (recipes_dir, hours) - pkg_list = check_output(cmd, shell=True) - ret = list() - for pkg in pkg_list.strip().split('\n'): - if pkg and os.path.exists(os.path.join( recipes_dir, pkg )): - ret.append( (get_pkg_name(args, pkg), get_tests(args, pkg)) ) - return ret - - -def check_output(cmd, shell=True): - return subprocess.check_output(cmd, shell=shell) + cmd = ['git', 'log', '--diff-filter=ACMRTUXB', '--name-only', '--pretty=""', '--since="%s hours ago"' % hours] + changed_files = subprocess.check_output(cmd, cwd=recipes_dir).strip().split('\n') + pkg_list = set([x for x in changed_files if x.startswith('recipes/') and x.endswith('meta.yaml')]) + for pkg in pkg_list: + if pkg and os.path.exists(os.path.join(recipes_dir, pkg)): + yield (get_pkg_name(args, pkg), get_tests(args, pkg)) def conda_versions(pkg_name, file_name): diff --git a/lib/galaxy/tools/deps/mulled/mulled_build_channel.py b/lib/galaxy/tools/deps/mulled/mulled_build_channel.py index 6463d3ea58d2..99c976b39faa 100644 --- a/lib/galaxy/tools/deps/mulled/mulled_build_channel.py +++ b/lib/galaxy/tools/deps/mulled/mulled_build_channel.py @@ -19,6 +19,7 @@ from __future__ import print_function import os +import subprocess import sys import time @@ -27,7 +28,6 @@ add_build_arguments, args_to_mull_targets_kwds, build_target, - check_output, conda_versions, get_affected_packages, mull_targets, @@ -42,7 +42,13 @@ def _fetch_repo_data(args): repo_data = "%s-repodata.json" % channel if not os.path.exists(repo_data): platform_tag = 'osx-64' if sys.platform == 'darwin' else 'linux-64' - check_output("wget --quiet https://conda.anaconda.org/%s/%s/repodata.json.bz2 -O '%s.bz2' && bzip2 -d '%s.bz2'" % (channel, platform_tag, repo_data, repo_data)) + subprocess.check_call([ + 'wget', '--quiet', 'https://conda.anaconda.org/%s/%s/repodata.json.bz2' % (channel, platform_tag), + '-O', '%s.bz2' % repo_data + ]) + subprocess.check_call([ + 'bzip2', '-d', '%s.bz2' % repo_data + ]) return repo_data @@ -55,8 +61,7 @@ def _new_versions(quay, conda): def run_channel(args, build_last_n_versions=1): """Build list of involucro commands (as shell snippet) to run.""" - pkgs = get_affected_packages(args) - for pkg_name, pkg_tests in pkgs: + for pkg_name, pkg_tests in get_affected_packages(args): repo_data = _fetch_repo_data(args) c = conda_versions(pkg_name, repo_data) # only package the most recent N versions diff --git a/lib/galaxy/webhooks/__init__.py b/lib/galaxy/webhooks/__init__.py index 9aaa2b80a1a5..b7ce4eb0ef2b 100644 --- a/lib/galaxy/webhooks/__init__.py +++ b/lib/galaxy/webhooks/__init__.py @@ -67,7 +67,7 @@ def load_webhooks(self): def load_webhook_from_config(self, config_dir, config_file): try: with open(os.path.join(config_dir, config_file)) as file: - config = yaml.load(file) + config = yaml.safe_load(file) path = os.path.normpath(os.path.join(config_dir, '..')) webhook = Webhook( config['name'], From d70713d07a3c4d98b71df9247bbd8644ab6b2b40 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 24 Jan 2017 17:00:11 +0000 Subject: [PATCH 07/12] Pin flake8 and plugins in tox.ini For all tox environments used in .travis.yml except the ones allowed to fail. --- tox.ini | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tox.ini b/tox.ini index d3158ba073d7..3e2b84370af9 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ commands = bash .ci/flake8_wrapper.sh whitelist_externals = bash deps = flake8==3.2.1 - flake8-docstrings==1.0.2 + flake8-docstrings==1.0.3 [testenv:py33-lint] commands = bash .ci/flake8_py3_wrapper.sh @@ -43,14 +43,14 @@ whitelist_externals = bash skip_install = True deps = flake8 - flake8-import-order==0.9 + flake8-import-order>=0.9 [testenv:py27-lint-imports-include-list] commands = bash .ci/flake8_wrapper_imports.sh whitelist_externals = bash skip_install = True deps = - flake8==3.0.4 + flake8==3.2.1 flake8-import-order==0.11 [testenv:qunit] @@ -79,7 +79,7 @@ whitelist_externals = bash skip_install = True deps = flake8 - flake8-docstrings==1.0.2 + flake8-docstrings [testenv:py27-lint-docstring-include-list] commands = bash .ci/flake8_wrapper_docstrings.sh --include @@ -87,4 +87,4 @@ whitelist_externals = bash skip_install = True deps = flake8 - flake8-docstrings==1.0.2 + flake8-docstrings==1.0.3 From fe9839bf7ca720061548ff3bcebdc79062e7adf5 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 11 Aug 2017 11:53:48 +0100 Subject: [PATCH 08/12] Update pin for flake8-docstrings Similar to commit 2586df45637d794c63252838573b2d1e6d692243 . --- setup.cfg | 2 +- tox.ini | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index b73b6ec5ade1..62ef47ece00d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,7 +7,7 @@ # 501 is line length # W503 is line breaks before binary operators, which has been reversed in PEP 8. # D** are docstring linting - which we mostly ignore except D302. (Hopefully we will solve more over time). -ignore = E128,E201,E202,E203,E501,E402,W503,D100,D101,D102,D103,D104,D105,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403 +ignore = E128,E201,E202,E203,E501,E402,W503,D100,D101,D102,D103,D104,D105,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403,D412,D413 exclude = lib/galaxy/util/jstree.py # For flake8-import-order # https://github.com/PyCQA/flake8-import-order/blob/master/tests/test_cases/complete_smarkets.py diff --git a/tox.ini b/tox.ini index 3e2b84370af9..7a6bb49d9b13 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ commands = bash .ci/flake8_wrapper.sh whitelist_externals = bash deps = flake8==3.2.1 - flake8-docstrings==1.0.3 + flake8-docstrings==1.1.0 [testenv:py33-lint] commands = bash .ci/flake8_py3_wrapper.sh @@ -86,5 +86,5 @@ commands = bash .ci/flake8_wrapper_docstrings.sh --include whitelist_externals = bash skip_install = True deps = - flake8 - flake8-docstrings==1.0.3 + flake8==3.2.1 + flake8-docstrings==1.1.0 From 36b974c90514332c6cb732f9c376a66767100944 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 9 Oct 2017 15:28:39 -0400 Subject: [PATCH 09/12] Pin pydocstyle... --- tox.ini | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 7a6bb49d9b13..4123db280aa4 100644 --- a/tox.ini +++ b/tox.ini @@ -8,6 +8,7 @@ whitelist_externals = bash deps = flake8==3.2.1 flake8-docstrings==1.1.0 + pydocstyle==2.0.0 [testenv:py33-lint] commands = bash .ci/flake8_py3_wrapper.sh @@ -78,8 +79,10 @@ commands = bash .ci/flake8_wrapper_docstrings.sh --exclude whitelist_externals = bash skip_install = True deps = - flake8 - flake8-docstrings + flake8==3.2.1 + flake8-docstrings==1.1.0 + pydocstyle==2.0.0 + [testenv:py27-lint-docstring-include-list] commands = bash .ci/flake8_wrapper_docstrings.sh --include @@ -88,3 +91,4 @@ skip_install = True deps = flake8==3.2.1 flake8-docstrings==1.1.0 + pydocstyle==2.0.0 From 3bb5d825c13ae2dbd366f24dc316bcdaa7171eb9 Mon Sep 17 00:00:00 2001 From: Nate Coraor Date: Fri, 13 Oct 2017 19:50:42 -0400 Subject: [PATCH 10/12] Ignore I201 errors in 16.10 --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 62ef47ece00d..dddedde167e4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,7 +7,7 @@ # 501 is line length # W503 is line breaks before binary operators, which has been reversed in PEP 8. # D** are docstring linting - which we mostly ignore except D302. (Hopefully we will solve more over time). -ignore = E128,E201,E202,E203,E501,E402,W503,D100,D101,D102,D103,D104,D105,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403,D412,D413 +ignore = E128,E201,E202,E203,E501,E402,W503,D100,D101,D102,D103,D104,D105,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403,D412,D413,I201 exclude = lib/galaxy/util/jstree.py # For flake8-import-order # https://github.com/PyCQA/flake8-import-order/blob/master/tests/test_cases/complete_smarkets.py From 8d573db729a92344ad9115b7657ace07a4718a21 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 26 Apr 2017 12:46:21 +0100 Subject: [PATCH 11/12] Use container-based infrastructure (sudo: false) in TravisCI --- .travis.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 889911f520df..810deb579d6b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +sudo: false language: python python: 2.7 os: @@ -24,9 +25,13 @@ matrix: allow_failures: - env: TOX_ENV=py27-lint-imports +addons: + apt: + packages: + - libxml2-utils + install: - pip install tox - - if [ "$TOX_ENV" == "validate-test-tools" ]; then sudo apt-get install libxml2-utils; fi - if [ "$TOX_ENV" == "qunit" ]; then bash -c 'cd test/qunit && npm install'; fi - if [ "$TOX_ENV" == "first_startup" ]; then bash -c "bash scripts/common_startup.sh && wget -q https://github.com/jmchilton/galaxy-downloads/raw/master/db_gx_rev_0127.sqlite && mv db_gx_rev_0127.sqlite database/universe.sqlite && bash manage_db.sh -c ./config/galaxy.ini.sample upgrade"; fi From 6d8f8ecb466ae75ca7d586496512cb70206a08f6 Mon Sep 17 00:00:00 2001 From: Nate Coraor Date: Fri, 13 Oct 2017 22:45:45 -0400 Subject: [PATCH 12/12] Add missing backport of hardening to scripts/grt.py --- scripts/grt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/grt.py b/scripts/grt.py index a87ea5eb1de5..7b4b4ce04789 100644 --- a/scripts/grt.py +++ b/scripts/grt.py @@ -111,10 +111,10 @@ def main(argv): print('Loading GRT ini...') try: with open(args.config) as f: - config_dict = yaml.load(f) + config_dict = yaml.safe_load(f) except Exception: with open(sample_config) as f: - config_dict = yaml.load(f) + config_dict = yaml.safe_load(f) # set to 0 by default if 'last_job_id_sent' not in config_dict: