From 277f573eaa1a85e332e3c9f696cc592a5c7c312f Mon Sep 17 00:00:00 2001 From: Danilo Horta Date: Mon, 24 Apr 2017 16:26:37 +0100 Subject: [PATCH] Save new bench --- .asv/results/benchmarks.json | 28 -------- ...mpy-numpy-sugar-pip-pip+optimix-scipy.json | 65 +++++++++++++++++++ benchmarks/benchmarks.py | 25 +------ 3 files changed, 68 insertions(+), 50 deletions(-) create mode 100644 .asv/results/reborn/4ea83161-conda-py3.6-numba-numpy-numpy-sugar-pip-pip+optimix-scipy.json diff --git a/.asv/results/benchmarks.json b/.asv/results/benchmarks.json index 4462d1dc..d992ff65 100644 --- a/.asv/results/benchmarks.json +++ b/.asv/results/benchmarks.json @@ -13,33 +13,5 @@ "unit": "seconds", "warmup_time": -1 }, - "benchmarks.TimeSuite.time_qep_binomial_lml_learn": { - "code": "def time_qep_binomial_lml_learn(self):\n glmm = GLMM((self._nsuc, self._ntri), 'binomial', self._X, self._QS)\n\n assert_allclose(glmm.value(), -272.1213895386019)\n glmm.fix('beta')\n glmm.fix('scale')\n\n glmm.feed().maximize(progress=False)\n assert_allclose(glmm.value(), -271.367864630782)\n\n glmm.unfix('beta')\n glmm.unfix('scale')\n\n glmm.feed().maximize(progress=False)\n assert_allclose(glmm.value(), -266.9517518211878)\n", - "goal_time": 0.1, - "name": "benchmarks.TimeSuite.time_qep_binomial_lml_learn", - "number": 0, - "param_names": [], - "params": [], - "pretty_name": "benchmarks.TimeSuite.time_qep_binomial_lml_learn", - "repeat": 0, - "timeout": 60.0, - "type": "time", - "unit": "seconds", - "warmup_time": -1 - }, - "benchmarks.TimeSuite.time_qep_binomial_lml_no_learn": { - "code": "def time_qep_binomial_lml_no_learn(self):\n glmm = GLMM((self._nsuc, self._ntri), 'binomial', self._X, self._QS)\n assert_allclose(glmm.value(), -272.1213895386019)\n", - "goal_time": 0.1, - "name": "benchmarks.TimeSuite.time_qep_binomial_lml_no_learn", - "number": 0, - "param_names": [], - "params": [], - "pretty_name": "benchmarks.TimeSuite.time_qep_binomial_lml_no_learn", - "repeat": 0, - "timeout": 60.0, - "type": "time", - "unit": "seconds", - "warmup_time": -1 - }, "version": 1 } \ No newline at end of file diff --git a/.asv/results/reborn/4ea83161-conda-py3.6-numba-numpy-numpy-sugar-pip-pip+optimix-scipy.json b/.asv/results/reborn/4ea83161-conda-py3.6-numba-numpy-numpy-sugar-pip-pip+optimix-scipy.json new file mode 100644 index 00000000..bf7630e3 --- /dev/null +++ b/.asv/results/reborn/4ea83161-conda-py3.6-numba-numpy-numpy-sugar-pip-pip+optimix-scipy.json @@ -0,0 +1,65 @@ +{ + "commit_hash": "4ea831612c70c8b5b04bef8ab4945558f903dfea", + "date": 1493047105000, + "ended_at": { + "benchmarks.TimeSuite.time_qep_binomial_1k_learn": 1493047579000, + "benchmarks.TimeSuite.time_qep_binomial_lml_learn": 1493047300000, + "benchmarks.TimeSuite.time_qep_binomial_lml_no_learn": 1493047456000 + }, + "env_name": "conda-py3.6-numba-numpy-numpy-sugar-pip-pip+optimix-scipy", + "params": { + "arch": "x86_64", + "cpu": "Intel(R) Core(TM) i7-5557U CPU @ 3.10GHz", + "machine": "reborn", + "numba": "", + "numpy": "", + "numpy-sugar": "", + "os": "Darwin 16.5.0", + "pip": "", + "pip+optimix": "", + "python": "3.6", + "ram": "17179869184", + "scipy": "" + }, + "profiles": {}, + "python": "3.6", + "requirements": { + "numba": "", + "numpy": "", + "numpy-sugar": "", + "pip": "", + "pip+optimix": "", + "scipy": "" + }, + "results": { + "benchmarks.TimeSuite.time_qep_binomial_1k_learn": null, + "benchmarks.TimeSuite.time_qep_binomial_lml_learn": null, + "benchmarks.TimeSuite.time_qep_binomial_lml_no_learn": { + "result": [ + 0.23217700000000008 + ], + "stats": [ + { + "ci_99": [ + 0.23150999999999966, + 0.23872900000000108 + ], + "max": 0.23872900000000108, + "mean": 0.23413866666666694, + "min": 0.23150999999999966, + "n": 3, + "q_25": 0.23184349999999987, + "q_75": 0.23545300000000058, + "std": 0.0032572577764472402, + "systematic": 0.0 + } + ] + } + }, + "started_at": { + "benchmarks.TimeSuite.time_qep_binomial_1k_learn": 1493047519000, + "benchmarks.TimeSuite.time_qep_binomial_lml_learn": 1493047294000, + "benchmarks.TimeSuite.time_qep_binomial_lml_no_learn": 1493047452000 + }, + "version": 1 +} \ No newline at end of file diff --git a/benchmarks/benchmarks.py b/benchmarks/benchmarks.py index bc7e91a3..d8fb748a 100644 --- a/benchmarks/benchmarks.py +++ b/benchmarks/benchmarks.py @@ -1,7 +1,7 @@ from numpy.random import RandomState from numpy.testing import assert_allclose from numpy import ascontiguousarray, sqrt, ones -from numpy_sugar.linalg import economic_qs, economic_qs_linear +from numpy_sugar.linalg import economic_qs, economic_qs_linear, sum2diag from glimix_core.example import linear_eye_cov from glimix_core.glmm import GLMM @@ -20,32 +20,13 @@ def setup(self): self._nsuc = [random.randint(0, i) for i in self._ntri] self._X1k = random.randn(n1k, 5) - self._K1k = linear_eye_cov().feed().value() + self._K1k = self._X1k.dot(self._X1k.T) + sum2diag(self._K1k, 1e-3, out=self._K1k) self._QS1k = economic_qs(self._K1k) self._ntri1k = random.randint(1, 30, n1k) self._nsuc1k = [random.randint(0, i) for i in self._ntri1k] - def time_qep_binomial_lml_no_learn(self): - glmm = GLMM((self._nsuc, self._ntri), 'binomial', self._X, self._QS) - assert_allclose(glmm.value(), -272.1213895386019) - - def time_qep_binomial_lml_learn(self): - glmm = GLMM((self._nsuc, self._ntri), 'binomial', self._X, self._QS) - - assert_allclose(glmm.value(), -272.1213895386019) - glmm.fix('beta') - glmm.fix('scale') - - glmm.feed().maximize(progress=False) - assert_allclose(glmm.value(), -271.367864630782) - - glmm.unfix('beta') - glmm.unfix('scale') - - glmm.feed().maximize(progress=False) - assert_allclose(glmm.value(), -266.9517518211878) - def time_qep_binomial_1k_learn(self): glmm = GLMM((self._nsuc1k, self._ntri1k), 'binomial', self._X1k, self._QS1k)