diff --git a/.travis.yml b/.travis.yml index cd82a6f8f..bab5584df 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,9 @@ python: - "2.7" - "3.4" +env: + secure: "KTU56Bhft39FhFnV80Ek+Ht8nwAAJWlLAN104bALBzQWVraoD/znx0gQnoS+YQDjzxgpj30UKBua/o8q1IrvkjxJb8yUBzpS0P1jcGwqmpVRoNdb3pQPk8R7fB9pTFiaJUQbdQJ2/xTrB/T9Kda0J1zq81LC1zSOxAxUL47UI50=" + before_install: - sudo apt-get install -q libatlas3gf-base libatlas-dev liblapack-dev gfortran diff --git a/openml/apiconnector.py b/openml/apiconnector.py index a5d2308ea..2f7385040 100644 --- a/openml/apiconnector.py +++ b/openml/apiconnector.py @@ -947,7 +947,6 @@ def _create_run_from_xml(self, xml): url = file_[u"oml:url"] files[name] = url - print dic.keys() evaluations = dict() for evaluation in dic[u"oml:output_data"][u"oml:evaluation"]: name = evaluation[u"oml:name"] @@ -994,7 +993,7 @@ def _read_url(self, url, add_authentication=False, data=None, filePath=None): def _read_url(self, url, data=None, file_path=None): if data is None: data = {} - data['session_hash'] = self.config.get('FAKE_SECTION', 'apikey') + data['api_key'] = self.config.get('FAKE_SECTION', 'apikey') if file_path is not None: if os.path.isabs(file_path): @@ -1009,8 +1008,8 @@ def _read_url(self, url, data=None, file_path=None): try: response = requests.post(url, data=data, files=fileElement) - except URLError, error: - print error + except URLError as error: + print(error) return response.status_code, response else: diff --git a/openml/entities/dataset.py b/openml/entities/dataset.py index 459bb346a..977129423 100644 --- a/openml/entities/dataset.py +++ b/openml/entities/dataset.py @@ -81,7 +81,7 @@ def __init__(self, id, name, version, description, format, creator, else: raise Exception() - with open(self.data_pickle_file, "w") as fh: + with open(self.data_pickle_file, "wb") as fh: pickle.dump((X, categorical, attribute_names), fh, -1) logger.debug("Saved dataset %d: %s to file %s" % (self.id, self.name, self.data_pickle_file)) diff --git a/setup.py b/setup.py index a45438b30..4c1213f2c 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,8 @@ "scipy>=0.13.3", "xmltodict", "nose", - "numpydoc"], + "numpydoc", + "requests"], test_suite="nose.collector", classifiers=['Intended Audience :: Science/Research', 'Intended Audience :: Developers', diff --git a/tests/test_apiconnector.py b/tests/test_apiconnector.py index 33956c81b..2ab976bf8 100644 --- a/tests/test_apiconnector.py +++ b/tests/test_apiconnector.py @@ -27,11 +27,6 @@ class TestAPIConnector(unittest.TestCase): """ def setUp(self): - config_file = os.path.expanduser('~/.openml/config') - if not os.path.exists(config_file): - raise Exception("OpenML config file required to run unit tests. " - "See https://github.com/openml/OpenML/wiki/Client-API") - self.cwd = os.getcwd() workdir = os.path.dirname(os.path.abspath(__file__)) self.workdir = os.path.join(workdir, "tmp") @@ -44,8 +39,13 @@ def setUp(self): os.chdir(self.workdir) self.cached = True + + try: + apikey = os.environ['OPENMLAPIKEY'] + except: + apikey = None self.connector = APIConnector(cache_directory=self.workdir, - apikey='test') + apikey=apikey) def tearDown(self): os.chdir(self.cwd) @@ -66,7 +66,7 @@ def test_get_cached_datasets(self): datasets = connector.get_cached_datasets() self.assertIsInstance(datasets, dict) self.assertEqual(len(datasets), 2) - self.assertIsInstance(datasets.values()[0], OpenMLDataset) + self.assertIsInstance(list(datasets.values())[0], OpenMLDataset) def test_get_cached_dataset(self): workdir = os.path.dirname(os.path.abspath(__file__))