Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable JSON response from the validator #502

Merged
merged 3 commits into from
Sep 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ plugins:
- "!__json_encoder__$"
- "!__all__$"
- "!__config__$"
- "!ValidatorResults$"
members: true
inherited_members: false
docstring_style: google
Expand Down
7 changes: 7 additions & 0 deletions optimade/validator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,12 @@ def validate():
default=0,
help="""Increase the verbosity of the output.""",
)
parser.add_argument(
"-j",
"--json",
action="store_true",
help="""Only a JSON summary of the validator results will be printed to stdout.""",
)
parser.add_argument(
"-t",
"--as-type",
Expand Down Expand Up @@ -97,6 +103,7 @@ def validate():
validator = ImplementationValidator(
base_url=args["base_url"],
verbosity=args["verbosity"],
respond_json=args["json"],
as_type=args["as_type"],
index=args["index"],
run_optional_tests=not args["skip_optional"],
Expand Down
32 changes: 18 additions & 14 deletions optimade/validator/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,6 @@ def get(self, request: str):
if status_code != 429:
break

print("Hit rate limit, sleeping for 1 s...")
time.sleep(1)

else:
Expand Down Expand Up @@ -242,19 +241,20 @@ def wrapper(
if not isinstance(result, Exception):
if not multistage:
if not optional:
validator.success_count += 1
validator.results.success_count += 1
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OooOooohh ✨

else:
validator.optional_success_count += 1
validator.results.optional_success_count += 1
message = f"✔: {request} - {msg}"
if validator.verbosity > 0:
if optional:
print(message)
else:
print_success(message)
elif optional:
print(".", end="", flush=True)
else:
print_success(".", end="", flush=True)
elif validator.verbosity == 0:
if optional:
print(".", end="", flush=True)
else:
print_success(".", end="", flush=True)
else:
internal_error = False
request = request.replace("\n", "")
Expand All @@ -267,17 +267,21 @@ def wrapper(

if isinstance(result, InternalError):
internal_error = True
validator.internal_failure_count += 1
validator.results.internal_failure_count += 1
summary = f"!: {request} - {test_fn.__name__} - failed with internal error"
validator.internal_failure_messages.append((summary, message))
validator.results.internal_failure_messages.append(
(summary, message)
)
else:
summary = f"✖: {request} - {test_fn.__name__} - failed with error"
if not optional:
validator.failure_count += 1
validator.failure_messages.append((summary, message))
validator.results.failure_count += 1
validator.results.failure_messages.append((summary, message))
else:
validator.optional_failure_count += 1
validator.optional_failure_messages.append((summary, message))
validator.results.optional_failure_count += 1
validator.results.optional_failure_messages.append(
(summary, message)
)

if validator.verbosity > 0:
if internal_error:
Expand All @@ -292,7 +296,7 @@ def wrapper(
print_failure(summary)
for line in message:
print_warning(f"\t{line}")
else:
elif validator.verbosity == 0:
if internal_error:
print_notify("!", end="", flush=True)
elif optional:
Expand Down
72 changes: 50 additions & 22 deletions optimade/validator/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ class that can be pointed at an OPTIMADE implementation and validated
import logging
import random
import urllib.parse
import dataclasses
from typing import Union, Tuple, Any, List, Dict, Optional

try:
Expand All @@ -37,6 +38,24 @@ class that can be pointed at an OPTIMADE implementation and validated

VERSIONS_REGEXP = r"/v[0-9]+(\.[0-9]+){,2}"

__all__ = ("ImplementationValidator",)


@dataclasses.dataclass
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just a question: Is there a reason you're not using pydantic here instead?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Uhh, guess this is slightly more lightweight but you could use either

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Was just wondering if this would solve the documentation issue as well...

class ValidatorResults:
success_count: int = 0
failure_count: int = 0
internal_failure_count: int = 0
optional_success_count: int = 0
optional_failure_count: int = 0
failure_messages: List[Tuple[str, str]] = dataclasses.field(default_factory=list)
internal_failure_messages: List[Tuple[str, str]] = dataclasses.field(
default_factory=list
)
optional_failure_messages: List[Tuple[str, str]] = dataclasses.field(
default_factory=list
)


class ImplementationValidator:
"""Class used to make a series of checks against a particular
Expand All @@ -63,6 +82,7 @@ def __init__( # pylint: disable=too-many-arguments
client: Any = None,
base_url: str = None,
verbosity: int = 0,
respond_json: bool = False,
page_limit: int = 5,
max_retries: int = 5,
run_optional_tests: bool = True,
Expand All @@ -82,6 +102,8 @@ def __init__( # pylint: disable=too-many-arguments
base of the OPTIMADE implementation.
verbosity: The verbosity of the output and logging as an integer
(`0`: critical, `1`: warning, `2`: info, `3`: debug).
respond_json: If `True`, print only a JSON representation of the
results of validation to stdout.
page_limit: The default page limit to apply to filters.
max_retries: Argument is passed to the client for how many
attempts to make for a request before failing.
Expand All @@ -101,6 +123,7 @@ def __init__( # pylint: disable=too-many-arguments
self.index = index
self.run_optional_tests = run_optional_tests
self.fail_fast = fail_fast
self.respond_json = respond_json

if as_type is None:
self.as_type_cls = None
Expand Down Expand Up @@ -147,14 +170,7 @@ def __init__( # pylint: disable=too-many-arguments
self._test_id_by_type = {}
self._entry_info_by_type = {}

self.success_count = 0
self.failure_count = 0
self.internal_failure_count = 0
self.optional_success_count = 0
self.optional_failure_count = 0
self.failure_messages = []
self.internal_failure_messages = []
self.optional_failure_messages = []
self.results = ValidatorResults()

def _setup_log(self):
""" Define stdout log based on given verbosity. """
Expand All @@ -164,7 +180,12 @@ def _setup_log(self):
stdout_handler.setFormatter(
logging.Formatter("%(asctime)s - %(name)s | %(levelname)8s: %(message)s")
)
self._log.addHandler(stdout_handler)

if not self.respond_json:
self._log.addHandler(stdout_handler)
else:
self.verbosity = -1

if self.verbosity == 0:
self._log.setLevel(logging.CRITICAL)
elif self.verbosity == 1:
Expand All @@ -176,23 +197,27 @@ def _setup_log(self):

def print_summary(self):
""" Print a summary of the results of validation. """
if self.failure_messages:
if self.respond_json:
print(json.dumps(dataclasses.asdict(self.results), indent=2))
return

if self.results.failure_messages:
print("\n\nFAILURES")
print("========\n")
for message in self.failure_messages:
for message in self.results.failure_messages:
print_failure(message[0])
for line in message[1]:
print_warning("\t" + line)

if self.optional_failure_messages:
if self.results.optional_failure_messages:
print("\n\nOPTIONAL TEST FAILURES")
print("======================\n")
for message in self.optional_failure_messages:
for message in self.results.optional_failure_messages:
print_notify(message[0])
for line in message[1]:
print_warning("\t" + line)

if self.internal_failure_messages:
if self.results.internal_failure_messages:
print("\n\nINTERNAL FAILURES")
print("=================\n")
print(
Expand All @@ -201,22 +226,22 @@ def print_summary(self):
"https://github.com/Materials-Consortia/optimade-python-tools/issues/new.\n"
)

for message in self.internal_failure_messages:
for message in self.results.internal_failure_messages:
print_warning(message[0])
for line in message[1]:
print_warning("\t" + line)

if self.valid or (not self.valid and not self.fail_fast):
final_message = f"\n\nPassed {self.success_count} out of {self.success_count + self.failure_count + self.internal_failure_count} tests."
final_message = f"\n\nPassed {self.results.success_count} out of {self.results.success_count + self.results.failure_count + self.results.internal_failure_count} tests."
if not self.valid:
print_failure(final_message)
else:
print_success(final_message)

if self.run_optional_tests and not self.fail_fast:
print(
f"Additionally passed {self.optional_success_count} out of "
f"{self.optional_success_count + self.optional_failure_count} optional tests."
f"Additionally passed {self.results.optional_success_count} out of "
f"{self.results.optional_success_count + self.results.optional_failure_count} optional tests."
)

def validate_implementation(self):
Expand All @@ -238,11 +263,12 @@ def validate_implementation(self):
self.as_type_cls,
)
self._test_as_type()
self.valid = not bool(self.failure_count)
self.valid = not bool(self.results.failure_count)
return

# Test entire implementation
print(f"Testing entire implementation at {self.base_url}...")
if self.verbosity >= 0:
print(f"Testing entire implementation at {self.base_url}")
info_endp = CONF.info_endpoint
self._log.debug("Testing base info endpoint of %s", info_endp)

Expand Down Expand Up @@ -297,7 +323,9 @@ def validate_implementation(self):
self._log.debug("Testing %s endpoint", CONF.links_endpoint)
self._test_info_or_links_endpoint(CONF.links_endpoint)

self.valid = not (self.failure_count or self.internal_failure_count)
self.valid = not (
self.results.failure_count or self.results.internal_failure_count
)

self.print_summary()

Expand Down Expand Up @@ -390,7 +418,7 @@ def _test_must_properties(
f"Some 'MUST' properties were missing from info/{endp}: {missing}"
)

return True, "Found all required properties in entry info for endpoint {endp}"
return True, f"Found all required properties in entry info for endpoint {endp}"

@test_case
def _get_archetypal_entry(self, endp: str) -> Tuple[Dict[str, Any], str]:
Expand Down
32 changes: 28 additions & 4 deletions tests/server/test_server_validation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import os
import json
import dataclasses
from traceback import print_exc

import pytest
Expand All @@ -14,10 +16,32 @@ def test_with_validator(both_fake_remote_clients):
index=both_fake_remote_clients.app == app,
verbosity=5,
)
try:
validator.validate_implementation()
except Exception:
print_exc()

validator.validate_implementation()
assert validator.valid


def test_with_validator_json_response(both_fake_remote_clients, capsys):
""" Test that the validator writes compliant JSON when requested. """
from optimade.server.main_index import app

validator = ImplementationValidator(
client=both_fake_remote_clients,
index=both_fake_remote_clients.app == app,
respond_json=True,
)
validator.validate_implementation()

captured = capsys.readouterr()
json_response = json.loads(captured.out)
assert json_response["failure_count"] == 0
assert json_response["internal_failure_count"] == 0
assert json_response["optional_failure_count"] == 0
assert validator.results.failure_count == 0
assert validator.results.internal_failure_count == 0
assert validator.results.optional_failure_count == 0
assert dataclasses.asdict(validator.results) == json_response

ml-evs marked this conversation as resolved.
Show resolved Hide resolved
assert validator.valid


Expand Down