Skip to content

Commit

Permalink
Merge pull request #653 from nsoranzo/dev
Browse files Browse the repository at this point in the history
Finish flake8 of test/ .
  • Loading branch information
bgruening committed Aug 27, 2015
2 parents c2fca17 + 93716ec commit 949b3de
Show file tree
Hide file tree
Showing 14 changed files with 229 additions and 229 deletions.
3 changes: 0 additions & 3 deletions .ci/flake8_blacklist.txt
Expand Up @@ -19,7 +19,4 @@ scripts/scramble/
scripts/tool_shed/
scripts/tools/
scripts/transfer.py
test/base/
test/casperjs/
test/install_and_test_tool_shed_repositories/
tools/
2 changes: 1 addition & 1 deletion .ci/flake8_wrapper.sh
Expand Up @@ -5,4 +5,4 @@ set -e
flake8 --exclude `paste -sd, .ci/flake8_blacklist.txt` .

# Look for obviously broken stuff lots more places.
flake8 --select=E901,E902,F821,F822,F823,F831 --exclude lib/galaxy/util/pastescript/serve.py,lib/pkg_resources.py lib/ test/{api,unit}
flake8 --select=E901,E902,F821,F822,F823,F831 --exclude lib/pkg_resources.py contrib/ lib/
7 changes: 4 additions & 3 deletions test/base/asserts/__init__.py
@@ -1,6 +1,7 @@
import inspect
import logging
import sys

log = logging.getLogger( __name__ )

assertion_module_names = ['text', 'tabular', 'xml']
Expand All @@ -14,7 +15,7 @@
full_assertion_module_name = 'base.asserts.' + assertion_module_name
log.debug(full_assertion_module_name)
try:
#Dynamically import module
# Dynamically import module
__import__(full_assertion_module_name)
assertion_module = sys.modules[full_assertion_module_name]
assertion_modules.append(assertion_module)
Expand All @@ -40,7 +41,7 @@ def verify_assertion(data, assertion_description):
if assert_function is None:
errmsg = "Unable to find test function associated with XML tag '%s'. Check your tool file syntax." % tag
raise AssertionError(errmsg)

assert_function_args = inspect.getargspec(assert_function).args
args = {}
for attribute, value in assertion_description["attributes"].iteritems():
Expand Down Expand Up @@ -72,6 +73,6 @@ def verify_assertion(data, assertion_description):

if "children" in assert_function_args:
args["children"] = assertion_description["children"]

# TODO: Verify all needed function arguments are specified.
assert_function(**args)
31 changes: 16 additions & 15 deletions test/base/interactor.py
@@ -1,17 +1,18 @@
import os
import re
from json import dumps
from logging import getLogger
from StringIO import StringIO
from galaxy.tools.parser.interface import TestCollectionDef

from galaxy import eggs
eggs.require( "requests" )
from requests import get, post

from galaxy import util
from galaxy.util.odict import odict
from galaxy.tools.parser.interface import TestCollectionDef
from galaxy.util.bunch import Bunch
from requests import get
from requests import post
from json import dumps
from galaxy.util.odict import odict

from logging import getLogger
log = getLogger( __name__ )

# Off by default because it can pound the database pretty heavily
Expand Down Expand Up @@ -57,7 +58,7 @@ def verify_output( self, history_id, jobs, output_data, output_testdef, shed_too
name = output_testdef.name
self.wait_for_jobs( history_id, jobs, maxseconds )
hid = self.__output_id( output_data )
## TODO: Twill version verifys dataset is 'ok' in here.
# TODO: Twill version verifys dataset is 'ok' in here.
self.verify_output_dataset( history_id=history_id, hda_id=hid, outfile=outfile, attributes=attributes, shed_tool_id=shed_tool_id )

primary_datasets = attributes.get( 'primary_datasets', {} )
Expand Down Expand Up @@ -155,10 +156,10 @@ def stage_data_async( self, test_data, history_id, shed_tool_id, async=True ):
file_name = self.twill_test_case.get_filename( composite_file.get( 'value' ), shed_tool_id=shed_tool_id )
files["files_%s|file_data" % i] = open( file_name, 'rb' )
tool_input.update({
#"files_%d|NAME" % i: name,
# "files_%d|NAME" % i: name,
"files_%d|type" % i: "upload_dataset",
## TODO:
#"files_%d|space_to_tab" % i: composite_file.get( 'space_to_tab', False )
# TODO:
# "files_%d|space_to_tab" % i: composite_file.get( 'space_to_tab', False )
})
name = test_data[ 'name' ]
else:
Expand All @@ -180,7 +181,7 @@ def stage_data_async( self, test_data, history_id, shed_tool_id, async=True ):
dataset = submit_response["outputs"][0]
except KeyError:
raise Exception(submit_response)
#raise Exception(str(dataset))
# raise Exception(str(dataset))
hid = dataset['id']
self.uploads[ os.path.basename(fname) ] = self.uploads[ fname ] = self.uploads[ name ] = {"src": "hda", "id": hid}
return self.__wait_for_history( history_id )
Expand All @@ -203,7 +204,7 @@ def run_tool( self, testdef, history_id ):
new_values.append( value )
inputs_tree[ key ] = new_values

# # HACK: Flatten single-value lists. Required when using expand_grouping
# HACK: Flatten single-value lists. Required when using expand_grouping
for key, value in inputs_tree.iteritems():
if isinstance(value, list) and len(value) == 1:
inputs_tree[key] = value[0]
Expand Down Expand Up @@ -255,9 +256,9 @@ def __dictify_output_collections( self, submit_response ):
return output_collections_dict

def __dictify_outputs( self, datasets_object ):
## Convert outputs list to a dictionary that can be accessed by
## output_name so can be more flexiable about ordering of outputs
## but also allows fallback to legacy access as list mode.
# Convert outputs list to a dictionary that can be accessed by
# output_name so can be more flexiable about ordering of outputs
# but also allows fallback to legacy access as list mode.
outputs_dict = odict()
index = 0
for output in datasets_object[ 'outputs' ]:
Expand Down

0 comments on commit 949b3de

Please sign in to comment.