Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Testing notebooks + fixes #67

Merged
merged 6 commits into from Nov 11, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion popmon/hist/histogram.py
Expand Up @@ -211,7 +211,7 @@ def __repr__(self):
return f"HistogramContainer(dtype={self.npdtype}, n_dims={self.n_dim})"

def __str__(self):
return str(self)
return repr(self)

def _edit_name(self, axis_name, xname, yname, convert_time_index, short_keys):
if convert_time_index and self.is_ts:
Expand Down
62 changes: 34 additions & 28 deletions popmon/notebooks/popmon_tutorial_advanced.ipynb
Expand Up @@ -4,7 +4,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
},
Expand All @@ -26,10 +25,11 @@
"metadata": {},
"outputs": [],
"source": [
"%%capture\n",
"# install popmon (if not installed yet)\n",
"import sys\n",
"\n",
"!{sys.executable} -m pip install popmon"
"!\"{sys.executable}\" -m pip install popmon"
]
},
{
Expand Down Expand Up @@ -145,11 +145,13 @@
"outputs": [],
"source": [
"# download histogrammar jar files if not already installed, used for histogramming of spark dataframe\n",
"from pyspark.sql import SparkSession\n",
"try:\n",
" from pyspark.sql import SparkSession\n",
"\n",
"spark = SparkSession.builder.config(\n",
" \"spark.jars.packages\", \"org.diana-hep:histogrammar-sparksql_2.11:1.0.4\"\n",
").getOrCreate()"
" pyspark_installed = True\n",
"except ImportError:\n",
" print(\"pyspark needs to be installed for this example\")\n",
" pyspark_installed = False"
]
},
{
Expand All @@ -158,18 +160,19 @@
"metadata": {},
"outputs": [],
"source": [
"sdf = spark.createDataFrame(df)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"sdf.pm_stability_report(\n",
" time_axis=\"DATE\", time_width=\"1w\", time_offset=\"2015-07-02\", extended_report=False\n",
")"
"if pyspark_installed:\n",
" spark = SparkSession.builder.config(\n",
" \"spark.jars.packages\", \"org.diana-hep:histogrammar-sparksql_2.11:1.0.4\"\n",
" ).getOrCreate()\n",
"\n",
" sdf = spark.createDataFrame(df)\n",
"\n",
" sdf.pm_stability_report(\n",
" time_axis=\"DATE\",\n",
" time_width=\"1w\",\n",
" time_offset=\"2015-07-02\",\n",
" extended_report=False,\n",
" )"
]
},
{
Expand Down Expand Up @@ -287,7 +290,7 @@
"outputs": [],
"source": [
"split_hist = split_hists.query(\"date == '2015-07-05 12:00:00'\")\n",
"split_hist.histogram[0].hist.plot.matplotlib();"
"split_hist.histogram[0].hist.plot.matplotlib()"
]
},
{
Expand All @@ -303,7 +306,7 @@
"metadata": {},
"outputs": [],
"source": [
"split_hist.histogram_ref[0].hist.plot.matplotlib();"
"split_hist.histogram_ref[0].hist.plot.matplotlib()"
]
},
{
Expand All @@ -320,11 +323,14 @@
"metadata": {},
"outputs": [],
"source": [
"import pickle\n",
"# As HTML report\n",
"report.to_file(\"report.html\")\n",
"\n",
"# Alternatively, as serialized Python object\n",
"# import pickle\n",
"\n",
"with open(\"report.pkl\", \"wb\") as f:\n",
" pickle.dump(report, f)\n",
"report.to_file(\"report.html\")"
"# with open(\"report.pkl\", \"wb\") as f:\n",
"# pickle.dump(report, f)"
]
},
{
Expand All @@ -350,7 +356,7 @@
" report_filepath=None,\n",
" store_key=\"html_report\",\n",
" sections_key=\"report_sections\",\n",
")\n",
")"
]
},
{
Expand Down Expand Up @@ -473,18 +479,18 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.7"
"version": "3.8.6"
},
"nteract": {
"version": "0.15.0"
},
"pycharm": {
"stem_cell": {
"cell_type": "raw",
"source": [],
"metadata": {
"collapsed": false
}
},
"source": []
}
}
},
Expand Down
4 changes: 2 additions & 2 deletions popmon/notebooks/popmon_tutorial_basic.ipynb
Expand Up @@ -4,7 +4,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
},
Expand Down Expand Up @@ -36,10 +35,11 @@
"metadata": {},
"outputs": [],
"source": [
"%%capture\n",
"# install popmon (if not installed yet)\n",
"import sys\n",
"\n",
"!{sys.executable} -m pip install popmon"
"!\"{sys.executable}\" -m pip install popmon"
]
},
{
Expand Down
3 changes: 2 additions & 1 deletion popmon/notebooks/popmon_tutorial_incremental_data.ipynb
Expand Up @@ -28,10 +28,11 @@
"metadata": {},
"outputs": [],
"source": [
"%%capture\n",
"# install popmon (if not installed yet)\n",
"import sys\n",
"\n",
"!{sys.executable} -m pip install popmon"
"!\"{sys.executable}\" -m pip install popmon"
]
},
{
Expand Down
5 changes: 3 additions & 2 deletions requirements-test.txt
@@ -1,7 +1,8 @@
flake8>=3.7.8
pytest>=4.0.2
nbconvert>=5.3.1
pytest-notebook>=0.6.1
nbconvert~=5.6.1
jupyter_client>=5.2.3
ipykernel>=5.1.3
black>=19.10b0
isort>=5.0.7
isort>=5.0.7
65 changes: 20 additions & 45 deletions tests/popmon/notebooks/test_notebooks.py
@@ -1,55 +1,30 @@
import os
import unittest

import nbformat
import pytest
from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors.execute import CellExecutionError

from popmon import resources

kernel_name = "python3"

# check if jupyter python3 kernel can be opened. if kernel not found, skip unit tests below.
try:
km = KernelSpecManager()
km.get_kernel_spec(kernel_name)
kernel_found = True
except NoSuchKernel:
kernel_found = False
from pytest_notebook.nb_regression import NBRegressionFixture

from popmon.resources import notebook

class NotebookTest(unittest.TestCase):
"""Unit test notebook"""

def run_notebook(self, notebook):
""" Test notebook """
@pytest.fixture(scope="module")
def nb_tester():
"""Test notebooks using pytest-notebook"""
nb_regression = NBRegressionFixture(
diff_ignore=(
"/metadata/language_info",
"/cells/*/execution_count",
"/cells/*/outputs/*",
),
exec_timeout=1800,
)
return nb_regression

# load notebook
with open(notebook) as f:
nb = nbformat.read(f, as_version=4)

# execute notebook
ep = ExecutePreprocessor(timeout=600, kernel_name=kernel_name)
try:
ep.preprocess(nb, {})
status = True
except CellExecutionError:
# store if failed
status = False
executed_notebook = os.getcwd() + "/" + notebook.split("/")[-1]
with open(executed_notebook, mode="wt") as f:
nbformat.write(nb, f)
def test_notebook_basic(nb_tester):
nb_tester.check(notebook("popmon_tutorial_basic.ipynb"))

# check status
self.assertTrue(status, "Notebook execution failed (%s)" % notebook)

def test_notebook_advanced(nb_tester):
nb_tester.check(notebook("popmon_tutorial_advanced.ipynb"))

@pytest.mark.filterwarnings("ignore:Session._key_changed is deprecated")
@pytest.mark.skipif(not kernel_found, reason=f"{kernel_name} kernel not found.")
class PipelineNotebookTest(NotebookTest):
"""Unit test notebook"""

def test_basic_tutorial(self):
self.run_notebook(resources.notebook("popmon_tutorial_basic.ipynb"))
def test_notebook_incremental_data(nb_tester):
nb_tester.check(notebook("popmon_tutorial_incremental_data.ipynb"))