Skip to content

Commit

Permalink
Merge 9487fad into bd83681
Browse files Browse the repository at this point in the history
  • Loading branch information
federicotdn committed Nov 25, 2019
2 parents bd83681 + 9487fad commit aecf356
Show file tree
Hide file tree
Showing 7 changed files with 73 additions and 64 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Expand Up @@ -42,7 +42,7 @@ jobs:
name: "Test 3.6"
python: "3.6"
script:
- make test -j 4
- make test
- <<: *run-tests
name: "Test 3.7"
python: '3.7'
Expand Down
10 changes: 4 additions & 6 deletions Makefile
Expand Up @@ -3,6 +3,7 @@
JOBS ?= 1

help:
@echo "make"
@echo " clean"
@echo " Remove Python/build artifacts."
@echo " formatter"
Expand All @@ -19,6 +20,7 @@ help:
@echo " Download all additional project files needed to run tests."
@echo " test"
@echo " Run pytest on tests/."
@echo " Use the JOBS environment variable to configure number of workers (default: 1)."
@echo " check-readme"
@echo " Check if the README can be converted from .md to .rst for PyPI."
@echo " doctest"
Expand Down Expand Up @@ -46,7 +48,7 @@ types:
pytype --keep-going rasa

prepare-tests-macos: prepare-tests-files
brew install graphviz wget
brew install graphviz wget || true

prepare-tests-ubuntu: prepare-tests-files
sudo apt-get -y install graphviz graphviz-dev python3-tk
Expand All @@ -58,14 +60,10 @@ prepare-tests-files:
python -m spacy link de_core_news_sm de --force
wget --progress=dot:giga -N -P data/ https://s3-eu-west-1.amazonaws.com/mitie/total_word_feature_extractor.dat

test: clean get-num-jobs
test: clean
# OMP_NUM_THREADS can improve overral performance using one thread by process (on tensorflow), avoiding overload
OMP_NUM_THREADS=1 pytest tests -n $(JOBS) --cov rasa

get-num-jobs:
$(eval JOBS := $(if $(findstring -j, $(MAKEFLAGS)), $(shell echo $(MAKEFLAGS) | sed -E "s@.*-j([0-9]+).*@\1@"), $(JOBS)))
$(eval JOBS := $(if $(findstring -j, $(JOBS)), auto, $(JOBS)))

doctest: clean
cd docs && make doctest

Expand Down
31 changes: 0 additions & 31 deletions tests/cli/test_rasa_train.py
Expand Up @@ -263,31 +263,6 @@ def test_train_core_no_domain_exists(run_in_default_project: Callable[..., RunRe
assert not os.path.isfile("train_rasa_models_no_domain/rasa-model.tar.gz")


def count_rasa_temp_files() -> int:
count = 0
for entry in os.scandir(tempfile.gettempdir()):
if not entry.is_dir():
continue

try:
for f in os.listdir(entry.path):
if f.endswith("_nlu.md") or f.endswith("_stories.md"):
count += 1
except PermissionError:
# Ignore permission errors
pass

return count


def test_train_core_temp_files(
run_in_default_project: Callable[..., RunResult]
) -> None:
count = count_rasa_temp_files()
run_in_default_project("train", "core")
assert count == count_rasa_temp_files()


def test_train_nlu(run_in_default_project: Callable[..., RunResult]):
run_in_default_project(
"train",
Expand Down Expand Up @@ -341,12 +316,6 @@ def test_train_nlu_persist_nlu_data(
)


def test_train_nlu_temp_files(run_in_default_project: Callable[..., RunResult]):
count = count_rasa_temp_files()
run_in_default_project("train", "nlu")
assert count == count_rasa_temp_files()


def test_train_help(run):
output = run("train", "--help")

Expand Down
1 change: 0 additions & 1 deletion tests/nlu/base/test_interpreter.py
Expand Up @@ -15,7 +15,6 @@
from tests.nlu import utilities


@utilities.slowtest
@pytest.mark.parametrize(
"pipeline_template", list(registry.registered_pipeline_templates.keys())
)
Expand Down
4 changes: 0 additions & 4 deletions tests/nlu/training/test_train.py
Expand Up @@ -74,7 +74,6 @@ def test_all_components_are_in_at_least_one_test_pipeline():
), "`all_components` template is missing component."


@utilities.slowtest
@pytest.mark.parametrize(
"pipeline_template", list(registry.registered_pipeline_templates.keys())
)
Expand All @@ -93,7 +92,6 @@ async def test_train_model(pipeline_template, component_builder, tmpdir):
assert loaded.parse("Hello today is Monday, again!") is not None


@utilities.slowtest
async def test_random_seed(component_builder, tmpdir):
"""test if train result is the same for two runs of tf embedding"""

Expand Down Expand Up @@ -121,7 +119,6 @@ async def test_random_seed(component_builder, tmpdir):
assert result_a == result_b


@utilities.slowtest
@pytest.mark.parametrize("language, pipeline", pipelines_for_tests())
async def test_train_model_on_test_pipelines(
language, pipeline, component_builder, tmpdir
Expand All @@ -140,7 +137,6 @@ async def test_train_model_on_test_pipelines(
assert loaded.parse("Hello today is Monday, again!") is not None


@utilities.slowtest
@pytest.mark.parametrize("language, pipeline", pipelines_for_tests())
async def test_train_model_no_events(language, pipeline, component_builder, tmpdir):
_config = RasaNLUModelConfig({"pipeline": pipeline, "language": language})
Expand Down
2 changes: 0 additions & 2 deletions tests/nlu/utilities.py
Expand Up @@ -7,8 +7,6 @@
from rasa.nlu.model import Interpreter
from rasa.nlu.train import train

slowtest = pytest.mark.slowtest


def base_test_conf(pipeline_template):
# 'response_log': temp_log_file_dir(),
Expand Down
87 changes: 68 additions & 19 deletions tests/test_train.py
@@ -1,16 +1,17 @@
import tempfile
import os
import shutil
from typing import Text

import pytest
from _pytest.monkeypatch import MonkeyPatch
from _pytest.tmpdir import TempdirFactory

import rasa.model

from rasa.train import train
from rasa.train import train_core, train_nlu, train
from tests.core.test_model import _fingerprint

TEST_TEMP = "test_tmp"


@pytest.mark.parametrize(
"parameters",
Expand Down Expand Up @@ -45,32 +46,43 @@ def test_package_model(trained_rasa_model, parameters):
assert file_name.endswith(".tar.gz")


@pytest.fixture
def move_tempdir():
# Create a new *empty* tmp directory
shutil.rmtree(TEST_TEMP, ignore_errors=True)
os.mkdir(TEST_TEMP)
tempfile.tempdir = TEST_TEMP
yield
tempfile.tempdir = None
shutil.rmtree(TEST_TEMP)
def count_temp_rasa_files(directory: Text) -> int:
return len(
[
entry
for entry in os.listdir(directory)
if not any(
[
# Ignore the following files/directories:
entry == "__pycache__", # Python bytecode
entry.endswith(".py") # Temp .py files created by TF
# Anything else is considered to be created by Rasa
]
)
]
)


def test_train_temp_files(
move_tempdir,
default_domain_path,
default_stories_file,
default_stack_config,
default_nlu_data,
tmp_path: Text,
monkeypatch: MonkeyPatch,
default_domain_path: Text,
default_stories_file: Text,
default_stack_config: Text,
default_nlu_data: Text,
):
monkeypatch.setattr(tempfile, "tempdir", tmp_path)
output = "test_train_temp_files_models"

train(
default_domain_path,
default_stack_config,
[default_stories_file, default_nlu_data],
output=output,
force_training=True,
)

assert len(os.listdir(TEST_TEMP)) == 0
assert count_temp_rasa_files(tempfile.tempdir) == 0

# After training the model, try to do it again. This shouldn't try to train
# a new model because nothing has been changed. It also shouldn't create
Expand All @@ -79,6 +91,43 @@ def test_train_temp_files(
default_domain_path,
default_stack_config,
[default_stories_file, default_nlu_data],
output=output,
)

assert count_temp_rasa_files(tempfile.tempdir) == 0


def test_train_core_temp_files(
tmp_path: Text,
monkeypatch: MonkeyPatch,
default_domain_path: Text,
default_stories_file: Text,
default_stack_config: Text,
):
monkeypatch.setattr(tempfile, "tempdir", tmp_path)

train_core(
default_domain_path,
default_stack_config,
default_stories_file,
output="test_train_core_temp_files_models",
)

assert count_temp_rasa_files(tempfile.tempdir) == 0


def test_train_nlu_temp_files(
tmp_path: Text,
monkeypatch: MonkeyPatch,
default_stack_config: Text,
default_nlu_data: Text,
):
monkeypatch.setattr(tempfile, "tempdir", tmp_path)

train_nlu(
default_stack_config,
default_nlu_data,
output="test_train_nlu_temp_files_models",
)

assert len(os.listdir(TEST_TEMP)) == 0
assert count_temp_rasa_files(tempfile.tempdir) == 0

0 comments on commit aecf356

Please sign in to comment.