diff --git a/conf/settings.py.example b/conf/settings.py.example index 27fc184..f31eccc 100644 --- a/conf/settings.py.example +++ b/conf/settings.py.example @@ -46,16 +46,6 @@ LOGGING = { }, } -# Specify which fields are required (in addition to those already required) -# when creating result/group/testcase. -# If you want to set some result's extra-data as required, you can do so by -# prepending 'data.' to the name (e.g. 'data.arch'). -REQUIRED_DATA = { - 'create_result': [], - 'create_group': [], - 'create_testcase': [], - } - # Extend the list of allowed outcomes. ADDITIONAL_RESULT_OUTCOMES = [] diff --git a/poetry.lock b/poetry.lock index 6616f97..839f2a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -15,17 +15,6 @@ SQLAlchemy = ">=1.3.0" [package.extras] tz = ["python-dateutil"] -[[package]] -name = "aniso8601" -version = "9.0.1" -description = "A library for parsing ISO 8601 strings." -category = "main" -optional = false -python-versions = "*" - -[package.extras] -dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] - [[package]] name = "atomicwrites" version = "1.4.1" @@ -146,7 +135,7 @@ python-versions = "*" [[package]] name = "coverage" -version = "6.4.4" +version = "6.5.0" description = "Code coverage measurement for Python" category = "main" optional = true @@ -302,21 +291,16 @@ oauth2client = "*" six = "*" [[package]] -name = "flask-restful" -version = "0.3.9" -description = "Simple framework for creating REST APIs" +name = "flask-pydantic" +version = "0.11.0" +description = "Flask extension for integration with Pydantic library" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -aniso8601 = ">=0.82" -Flask = ">=0.8" -pytz = "*" -six = ">=1.3.0" - -[package.extras] -docs = ["sphinx"] +Flask = "*" +pydantic = ">=1.7" [[package]] name = "flask-sqlalchemy" @@ -637,6 +621,21 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pydantic" +version = "1.10.2" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pyflakes" version = "2.5.0" @@ -1041,16 +1040,15 @@ test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [extras] -test = ["flake8", "pytest", "pytest-cov", "tox-docker"] +test = ["flake8", "pytest", "pytest-cov", "tox", "tox-docker"] [metadata] lock-version = "1.1" python-versions = ">=3.8,<3.10" -content-hash = "9db56f80f1cd9b0c9a8f02b56cafe42ee14451a525de3177dd7ef91da58a8c43" +content-hash = "f735710b01ca79820c122156e81b385a4dfdebe5eb73b3259231d10238f55f3e" [metadata.files] alembic = [] -aniso8601 = [] atomicwrites = [] attrs = [] automat = [ @@ -1091,7 +1089,7 @@ filelock = [] flake8 = [] flask = [] flask-oidc = [] -flask-restful = [] +flask-pydantic = [] flask-sqlalchemy = [] greenlet = [] gunicorn = [ @@ -1280,6 +1278,7 @@ pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +pydantic = [] pyflakes = [] pyopenssl = [ {file = "pyOpenSSL-22.0.0-py2.py3-none-any.whl", hash = "sha256:ea252b38c87425b64116f808355e8da644ef9b07e429398bfece610f893ee2e0"}, diff --git a/pyproject.toml b/pyproject.toml index 0e8d1b4..04af8d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,12 +36,12 @@ tox = {version = "4.0.0b2", optional = true} tox-docker = {version = "4.0.0a2", optional = true} flask-oidc = "^1.4.0" -Flask-RESTful = "^0.3.9" Flask-SQLAlchemy = "^2.5.1" SQLAlchemy = {version = "^1.4.39"} psycopg2-binary = {version = "^2.9.3"} alembic = "^1.8.1" iso8601 = "^1.0.2" +Flask-Pydantic = "^0.11.0" [tool.poetry.extras] test = [ diff --git a/resultsdb/config.py b/resultsdb/config.py index 06a50d7..c0ca1b0 100644 --- a/resultsdb/config.py +++ b/resultsdb/config.py @@ -67,16 +67,6 @@ class Config(object): }, } - # Specify which fields are required (in addition to those already required) - # when creating result/group/testcase. - # If you want to set some result's extra-data as required, you can do so by - # prepending 'data.' to the name (e.g. 'data.arch'). - REQUIRED_DATA = { - 'create_result': [], - 'create_group': [], - 'create_testcase': [], - } - # Extend the list of allowed outcomes. ADDITIONAL_RESULT_OUTCOMES = () diff --git a/resultsdb/controllers/api_v2.py b/resultsdb/controllers/api_v2.py index 485ed1a..bb15758 100644 --- a/resultsdb/controllers/api_v2.py +++ b/resultsdb/controllers/api_v2.py @@ -20,27 +20,26 @@ import re import uuid -import string -import random -from functools import partial from flask import Blueprint, jsonify, request, url_for -from flask_restful import reqparse +from flask_pydantic import validate from sqlalchemy.orm import exc as orm_exc -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import BadRequest as JSONBadRequest - -import iso8601 from resultsdb import app, db from resultsdb.serializers.api_v2 import Serializer +from resultsdb.parsers.api_v2 import ( + CreateGroupParams, + CreateResultParams, + CreateTestcaseParams, + GroupsParams, + ResultsParams, + TestcasesParams, + QUERY_LIMIT, +) from resultsdb.models.results import Group, Result, Testcase, ResultData from resultsdb.models.results import RESULT_OUTCOME from resultsdb.messaging import load_messaging_plugin, create_message, publish_taskotron_message -from resultsdb.lib.helpers import non_empty, dict_or_string, list_or_none, submit_time - -QUERY_LIMIT = 20 api = Blueprint('api_v2', __name__) @@ -54,6 +53,7 @@ except NameError: unicode = str + # TODO: find out why error handler works for 404 but not for 400 @app.errorhandler(400) def bad_request(error): @@ -72,77 +72,8 @@ def not_found(error): RE_CALLBACK = re.compile(r"([?&])callback=[^&]*&?") RE_CLEAN_AMPERSANDS = re.compile(r'&+') -# RP contains request parsers (reqparse.RequestParser). -# Parsers are added in each 'resource section' for better readability -RP = {} - SERIALIZE = Serializer().serialize - -def _validate_create_result_extra_data(required_fields, data, *args, **kwargs): - """Check whether data dict contains required_fields as keys.""" - if args or kwargs: - raise TypeError("Unexpected arguments") - if isinstance(data, dict) or data is None: - if required_fields: - if data is None: - raise ValueError("Expected dict, got None") - # check whether all required field are present in data - missing = set(required_fields) - set(data.keys()) - if missing: - raise ValueError("Missing required fields in data: %s" % list(missing)) - # check that the required fields have non-empty value - for field in required_fields: - try: - non_empty(type(data[field]), data[field]) - except ValueError: - raise ValueError("Required field %r missing value (got %r)" % (field, data[field])) - return data - raise ValueError("Expected dict or None, got %r" % type(data)) - - -def setup_request_parser_from_config(): - """ - This makes sure the configuration in REQUIRED_DATA is applied. - For values set in the config, either the request parser is changed, to make - the value required. Or if the value is not yet in the request-parser (which now - realistically only applies to the `data.` values in result) it is added. - """ - for key, values in app.config.get('REQUIRED_DATA', {}).items(): - if key not in RP: - app.logger.error("Error in config: REQUIRED_DATA contains unknown endpoint %r.", key) - continue - - arguments = dict([(arg.name, arg) for arg in RP[key].args]) - - # handle data. for create_result (effectively results extra-data) - if key == 'create_result': - extra_data = [v for v in values if v.startswith('data.')] - values = list(set(values) - set(extra_data)) - - if extra_data: - required_values = [v[len('data.'):] for v in extra_data] - arg = arguments['data'] - arg.type = partial(_validate_create_result_extra_data, required_values) - arg.required = True - app.logger.info("Seting %s in %r as required-non-empty" % (extra_data, key)) - - for value in values: - arg = arguments.get(value, None) - if arg is not None and not arg.required: - arg.required = True - arg.type = partial(non_empty, arg.type) - app.logger.info("Seting argument %r in %r as required-non-empty" % (value, key)) - else: - app.logger.error( - "Error in config: REQUIRED_DATA contains unknown value %r for endpoint %r.", - value, key - ) - - -@app.before_first_request -def do_before_first_request(): - setup_request_parser_from_config() # ============================================================================= # GLOBAL METHODS # ============================================================================= @@ -200,60 +131,25 @@ def prev_next_urls(data, limit=QUERY_LIMIT): return data, prev, next -def parse_since(since): - since_start = None - since_end = None - if since is not None: - s = since.split(',') - since_start = iso8601.parse_date(s[0]) - try: - since_start = since_start.replace(tzinfo=None) # we need to strip timezone info - since_end = iso8601.parse_date(s[1]) - since_end = since_end.replace(tzinfo=None) # we need to strip timezone info - # Yes, this library sucks in Exception handling.. - except IndexError: - pass - except (TypeError, ValueError, iso8601.iso8601.ParseError): - raise iso8601.iso8601.ParseError() - return since_start, since_end - - # ============================================================================= # GROUPS # ============================================================================= -RP['get_groups'] = reqparse.RequestParser() -RP['get_groups'].add_argument('page', default=0, type=int, location='args') -RP['get_groups'].add_argument('limit', default=QUERY_LIMIT, type=int, location='args') -RP['get_groups'].add_argument('uuid', default=None, location='args') -RP['get_groups'].add_argument('description', default=None, location='args') -RP['get_groups'].add_argument('description:like', default=None, location='args') -# These two are ignored. They're present so reqparse isn't confused by JSONP. -RP['get_groups'].add_argument('callback', location='args') -RP['get_groups'].add_argument('_', location='args') - - @api.route('/groups', methods=['GET']) -def get_groups(): - try: - args = RP['get_groups'].parse_args() - except JSONBadRequest as error: - return jsonify({"message": "Malformed Request: %s" % error}), error.code - except HTTPException as error: - return jsonify(error.data), error.code - +@validate() +def get_groups(query: GroupsParams): q = db.session.query(Group).order_by(db.desc(Group.id)) desc_filters = [] - if args['description']: - for description in args['description'].split(','): + if query.description: + for description in query.description.split(','): if not description.strip(): continue desc_filters.append(Group.description == description) -# desc_filters.append(Group.description.in_(args['description'].split(','))) - elif args['description:like']: - for description in args['description:like'].split(','): +# desc_filters.append(Group.description.in_(query.description.split(','))) + elif query.description_like_: + for description in query.description_like_.split(','): if not description.strip(): continue desc_filters.append(Group.description.like(description.replace("*", "%"))) @@ -261,11 +157,11 @@ def get_groups(): q = q.filter(db.or_(*desc_filters)) # Filter by uuid - if args['uuid']: - q = q.filter(Group.uuid.in_(args['uuid'].split(','))) + if query.uuid: + q = q.filter(Group.uuid.in_(query.uuid.split(','))) - q = pagination(q, args['page'], args['limit']) - data, prev, next = prev_next_urls(q.all(), args['limit']) + q = pagination(q, query.page, query.limit) + data, prev, next = prev_next_urls(q.all(), query.limit) return jsonify(dict( prev=prev, @@ -284,32 +180,20 @@ def get_group(group_id): return jsonify(SERIALIZE(group)) -RP['create_group'] = reqparse.RequestParser() -RP['create_group'].add_argument('uuid', default=None, location='json') -RP['create_group'].add_argument('ref_url', location='json') -RP['create_group'].add_argument('description', default=None, location='json') - - @api.route('/groups', methods=['POST']) -def create_group(): - try: - args = RP['create_group'].parse_args() - except JSONBadRequest as error: - return jsonify({"message": "Malformed Request: %s" % error}), error.code - except HTTPException as error: - return jsonify(error.data), error.code - - if args['uuid']: - group = Group.query.filter_by(uuid=args['uuid']).first() +@validate() +def create_group(body: CreateGroupParams): + if body.uuid: + group = Group.query.filter_by(uuid=body.uuid).first() if not group: - group = Group(uuid=args['uuid']) + group = Group(uuid=body.uuid) else: group = Group(uuid=str(uuid.uuid1())) - if args['ref_url']: - group.ref_url = args['ref_url'] - if args['description']: - group.description = args['description'] + if body.ref_url: + group.ref_url = body.ref_url + if body.description: + group.description = body.description db.session.add(group) db.session.commit() @@ -390,37 +274,18 @@ def select_results(since_start=None, since_end=None, outcomes=None, groups=None, return q -def __get_results_parse_args(): - retval = {"args": None, "error": None, "result_data": None} - try: - args = RP['get_results'].parse_args() - except JSONBadRequest as error: - retval["error"] = (jsonify({"message": "Malformed Request: %s" % error}), error.code) - return retval - except HTTPException as error: - retval["error"] = (jsonify(error.data), error.code) - return retval - - if args.get('outcome', None) is not None: - args['outcome'] = [outcome.strip().upper() for outcome in args['outcome'].split(',')] - for outcome in args['outcome']: - if outcome not in RESULT_OUTCOME: - retval["error"] = ( - jsonify({'message': "outcome %r not one of %r" % (outcome, RESULT_OUTCOME,)}), 400) - return retval - - try: - s, e = parse_since(args.get('since', None)) - except iso8601.iso8601.ParseError: - retval["error"] = (jsonify({"message": "'since' parameter not in ISO8601 format"}), 400) - return retval - - args['since'] = {'start': s, 'end': e} - args['testcases'] = [tc.strip() for tc in args['testcases'].split(',') if tc.strip()] - args['testcases:like'] = [tc.strip() for tc in args['testcases:like'].split(',') if tc.strip()] - args['groups'] = [group.strip() for group in args['groups'].split(',') if group.strip()] - args['_distinct_on'] = [_distinct_on.strip() for _distinct_on in args['_distinct_on'].split(',') if _distinct_on.strip()] - retval['args'] = args +def __get_results_parse_args(query: ResultsParams): + args = { + '_sort': query.sort_, + 'limit': query.limit, + 'page': query.page, + 'testcases': query.testcases, + 'testcases:like': query.testcases_like_, + 'groups': query.groups, + '_distinct_on': query.distinct_on_, + 'outcome': query.outcome, + 'since': query.since, + } # find results_data with the query parameters # these are the paramters other than those defined in RequestParser @@ -435,34 +300,14 @@ def __get_results_parse_args(): # flatten the list results_data[param] = [item for sublist in results_data[param] for item in sublist] - if results_data != {}: - retval['result_data'] = results_data - - return retval + return { + 'result_data': results_data if results_data else None, + 'args': args, + } -RP['get_results'] = reqparse.RequestParser() -RP['get_results'].add_argument('page', default=0, type=int, location='args') -RP['get_results'].add_argument('limit', default=QUERY_LIMIT, type=int, location='args') -RP['get_results'].add_argument('since', location='args') -RP['get_results'].add_argument('outcome', location='args') -RP['get_results'].add_argument('groups', default="", location='args') -RP['get_results'].add_argument('_sort', default="", location='args') -RP['get_results'].add_argument('_distinct_on', default="", location='args') -# TODO - can this be done any better? -RP['get_results'].add_argument('testcases', default="", location='args') -RP['get_results'].add_argument('testcases:like', default="", location='args') -# These two are ignored. They're present so reqparse isn't confused by JSONP. -RP['get_results'].add_argument('callback', location='args') -RP['get_results'].add_argument('_', location='args') - - -@api.route('/results', methods=['GET']) -def get_results(group_ids=None, testcase_names=None): - - p = __get_results_parse_args() - if p['error'] is not None: - return p['error'] +def __get_results(query: ResultsParams, group_ids=None, testcase_names=None): + p = __get_results_parse_args(query) args = p['args'] groups = group_ids if group_ids is not None else args['groups'] @@ -489,12 +334,16 @@ def get_results(group_ids=None, testcase_names=None): )) -@api.route('/results/latest', methods=['GET']) -def get_results_latest(): - p = __get_results_parse_args() - if p['error'] is not None: - return p['error'] +@api.route('/results', methods=['GET']) +@validate() +def get_results(query: ResultsParams): + return __get_results(query) + +@api.route('/results/latest', methods=['GET']) +@validate() +def get_results_latest(query: ResultsParams): + p = __get_results_parse_args(query) args = p['args'] since_start = args['since'].get('start', None) since_end = args['since'].get('end', None) @@ -565,22 +414,21 @@ def get_results_latest(): @api.route('/groups//results', methods=['GET']) -@api.route('/testcases//results', methods=['GET']) -def get_results_by_group_testcase(group_id=None, testcase_name=None): - # check whether the group/testcase exists. If not, throw 404 - if group_id is not None: - group = Group.query.filter_by(uuid=group_id).first() - if not group: - return jsonify({'message': "Group not found: %s" % (group_id,)}), 404 - group_id = [group.uuid] +@validate() +def get_results_by_group(group_id: str, query: ResultsParams): + group = Group.query.filter_by(uuid=group_id).first() + if not group: + return jsonify({'message': "Group not found: %s" % (group_id,)}), 404 + return __get_results(query, group_ids=[group.uuid]) - if testcase_name is not None: - testcase = Testcase.query.filter_by(name=testcase_name).first() - if not testcase: - return jsonify({'message': "Testcase not found"}), 404 - testcase_name = [testcase.name] - return get_results(group_id, testcase_name) +@api.route('/testcases//results', methods=['GET']) +@validate() +def get_results_by_testcase(testcase_name: str, query: ResultsParams): + testcase = Testcase.query.filter_by(name=testcase_name).first() + if not testcase: + return jsonify({'message': "Testcase not found"}), 404 + return __get_results(query, testcase_names=[testcase.name]) @api.route('/results/', methods=['GET']) @@ -593,47 +441,16 @@ def get_result(result_id): return jsonify(SERIALIZE(result)) -RP['create_result'] = reqparse.RequestParser() -RP['create_result'].add_argument('outcome', type=partial(non_empty, basestring), required=True, location='json') -RP['create_result'].add_argument('testcase', type=dict_or_string, required=True, location='json') -RP['create_result'].add_argument('groups', type=list_or_none, location='json') -RP['create_result'].add_argument('note', location='json') -RP['create_result'].add_argument('data', type=dict, location='json') -RP['create_result'].add_argument('ref_url', location='json') -RP['create_result'].add_argument('submit_time', type=submit_time, location='json') - - @api.route('/results', methods=['POST']) -def create_result(): - try: - args = RP['create_result'].parse_args() - except JSONBadRequest as error: - return jsonify({"message": "Malformed Request: %s" % error}), error.code - except HTTPException as error: - return jsonify(error.data), error.code - - outcome = args['outcome'].strip().upper() - if outcome not in RESULT_OUTCOME: - app.logger.warning("Invalid result outcome submitted: %s", outcome) - return jsonify({'message': "outcome must be one of %r" % (RESULT_OUTCOME,)}), 400 - - if args['data']: - invalid_keys = [key for key in args['data'].keys() if ':' in key] +@validate() +def create_result(body: CreateResultParams): + if body.data: + invalid_keys = [key for key in body.data.keys() if ':' in key] if invalid_keys: app.logger.warning("Colon not allowed in key name: %s", invalid_keys) return jsonify({'message': "Colon not allowed in key name: %r" % invalid_keys}), 400 - # args[testcase] can be either string or object - # non-existing testcases are created automatically - tc = args['testcase'] - if isinstance(tc, basestring): - tc = dict(name=args['testcase']) - if not tc['name']: - app.logger.warning("Result submitted without valid testcase.name: %s", tc) - return jsonify({'message': "testcase name not set"}), 400 - elif isinstance(tc, dict) and 'name' not in tc: - app.logger.warning("Result submitted without testcase.name: %s", tc) - return jsonify({'message': "testcase.name not set"}), 400 + tc = body.testcase testcase = Testcase.query.filter_by(name=tc['name']).first() if not testcase: @@ -642,13 +459,13 @@ def create_result(): testcase.ref_url = tc.get('ref_url', testcase.ref_url) db.session.add(testcase) - # args[groups] is a list of strings(uuid) or dicts(group object) + # groups is a list of strings(uuid) or dicts(group object) # when a group defined by the string is not found, new is created # group defined by the object, is updated/created with the values from the object # non-existing groups are created automatically groups = [] - if args['groups']: - for grp in args['groups']: + if body.groups: + for grp in body.groups: if isinstance(grp, basestring): grp = dict(uuid=grp) elif isinstance(grp, dict): @@ -664,25 +481,17 @@ def create_result(): db.session.add(group) groups.append(group) - result = Result( - testcase, - outcome, - groups, - args['ref_url'], - args['note'], - args['submit_time'], - ) - + result = Result(testcase, body.outcome, groups, body.ref_url, body.note, body.submit_time) # Convert result_data - # for each key-value pair in args['data'] + # for each key-value pair in body.data # convert keys to unicode # if value is string: NOP # if value is list or tuple: convert values to unicode, create key-value pair for each value from the list # if value is something else: convert to unicode # Store all the key-value pairs - if isinstance(args['data'], dict): + if isinstance(body.data, dict): to_store = [] - for key, value in args['data'].items(): + for key, value in body.data.items(): if not (isinstance(key, str) or isinstance(key, unicode)): key = unicode(key) @@ -704,7 +513,8 @@ def create_result(): db.session.add(result) db.session.commit() - app.logger.debug("Created new result for testcase %s with outcome %s", testcase.name, outcome) + app.logger.debug( + "Created new result for testcase %s with outcome %s", testcase.name, body.outcome) if app.config['MESSAGE_BUS_PUBLISH']: app.logger.debug("Preparing to publish message for result id %d", result.id) @@ -741,29 +551,12 @@ def select_testcases(args_name=None, args_name_like=None): return q -RP['get_testcases'] = reqparse.RequestParser() -RP['get_testcases'].add_argument('page', default=0, type=int, location='args') -RP['get_testcases'].add_argument('limit', default=QUERY_LIMIT, type=int, location='args') -RP['get_testcases'].add_argument('name', location='args') -RP['get_testcases'].add_argument('name:like', location='args') -# These two are ignored. They're present so reqparse isn't confused by JSONP. -RP['get_testcases'].add_argument('callback', location='args') -RP['get_testcases'].add_argument('_', location='args') - - @api.route('/testcases', methods=['GET']) -def get_testcases(): # page = None, limit = QUERY_LIMIT): - - try: - args = RP['get_testcases'].parse_args() - except JSONBadRequest as error: - return jsonify({"message": "Malformed Request: %s" % error}), error.code - except HTTPException as error: - return jsonify(error.data), error.code - - q = select_testcases(args['name'], args['name:like']) - q = pagination(q, args['page'], args['limit']) - data, prev, next = prev_next_urls(q.all(), args['limit']) +@validate() +def get_testcases(query: TestcasesParams): + q = select_testcases(query.name, query.name_like_) + q = pagination(q, query.page, query.limit) + data, prev, next = prev_next_urls(q.all(), query.limit) return jsonify(dict( prev=prev, @@ -782,25 +575,14 @@ def get_testcase(testcase_name): return jsonify(SERIALIZE(testcase)) -RP['create_testcase'] = reqparse.RequestParser() -RP['create_testcase'].add_argument('name', type=partial(non_empty, basestring), required=True, location='json') -RP['create_testcase'].add_argument('ref_url', location='json') - - @api.route('/testcases', methods=['POST']) -def create_testcase(): - try: - args = RP['create_testcase'].parse_args() - except JSONBadRequest as error: - return jsonify({"message": "Malformed Request: %s" % error}), error.code - except HTTPException as error: - return jsonify(error.data), error.code - - testcase = Testcase.query.filter_by(name=args['name']).first() +@validate() +def create_testcase(body: CreateTestcaseParams): + testcase = Testcase.query.filter_by(name=body.name).first() if not testcase: - testcase = Testcase(name=args['name']) - if args['ref_url'] is not None: - testcase.ref_url = args['ref_url'] + testcase = Testcase(name=body.name) + if body.ref_url is not None: + testcase.ref_url = body.ref_url db.session.add(testcase) db.session.commit() diff --git a/resultsdb/lib/helpers.py b/resultsdb/lib/helpers.py deleted file mode 100644 index b66011f..0000000 --- a/resultsdb/lib/helpers.py +++ /dev/null @@ -1,77 +0,0 @@ -import numbers -from datetime import datetime, timezone - -try: - basestring -except NameError: - basestring = (str, bytes) - - - -def non_empty(typ, value, *args, **kwargs): - if args or kwargs: - raise TypeError("Unexpected arguments") - try: - if not isinstance(value, typ): - raise ValueError("Expected %s got %s" % (typ, type(value))) - # sometimes, type can be checked via a function - except TypeError: - value = typ(value) - # special case for numbers, where 0 is "false" but legal value - if isinstance(value, numbers.Number): - return value - if not value: - raise ValueError("Expected non-empty value, got %r" % value) - return value - - -def dict_or_string(value, *args, **kwargs): - if args or kwargs: - raise TypeError("Unexpected arguments") - if isinstance(value, dict): - return value - if isinstance(value, basestring): - return value - raise ValueError("Expected dict or string, got %r" % type(value)) - - -def list_or_none(value, *args, **kwargs): - if args or kwargs: - raise TypeError("Unexpected arguments") - if isinstance(value, list): - return value - if value is None: - return value - raise ValueError("Expected list or None, got %r" % type(value)) - - -def time_from_milliseconds(value): - seconds, milliseconds = divmod(value, 1000) - time = datetime.fromtimestamp(seconds, tz=timezone.utc) - return time.replace(microsecond=milliseconds * 1000) - - -def submit_time(value, *args, **kwargs): - if args or kwargs: - raise TypeError("Unexpected arguments") - if isinstance(value, datetime): - return value - if value is None: - return value - if isinstance(value, numbers.Number): - return time_from_milliseconds(value) - if isinstance(value, str): - try: - return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f') - except ValueError: - pass - - try: - return time_from_milliseconds(int(value)) - except ValueError: - pass - raise ValueError( - "Expected timestamp in milliseconds or datetime" - " (in format YYYY-MM-DDTHH:MM:SS.ffffff)," - " got %r" % type(value) - ) diff --git a/resultsdb/parsers/api_v2.py b/resultsdb/parsers/api_v2.py new file mode 100644 index 0000000..42429c7 --- /dev/null +++ b/resultsdb/parsers/api_v2.py @@ -0,0 +1,162 @@ +# SPDX-License-Identifier: LGPL-2.0-or-later +from datetime import datetime, timezone +from numbers import Number +from typing import Any, List, Optional + +import iso8601 +from pydantic import BaseModel, Field, validator +from pydantic.types import constr + +from resultsdb.models.results import RESULT_OUTCOME + +QUERY_LIMIT = 20 + + +def parse_since(since): + since_start = None + since_end = None + s = since.split(',') + since_start = iso8601.parse_date(s[0]) + try: + since_start = since_start.replace(tzinfo=None) # we need to strip timezone info + since_end = iso8601.parse_date(s[1]) + since_end = since_end.replace(tzinfo=None) # we need to strip timezone info + # Yes, this library sucks in Exception handling.. + except IndexError: + pass + except (TypeError, ValueError, iso8601.iso8601.ParseError): + raise iso8601.iso8601.ParseError() + return since_start, since_end + + +def time_from_milliseconds(value): + seconds, milliseconds = divmod(value, 1000) + time = datetime.fromtimestamp(seconds, tz=timezone.utc) + return time.replace(microsecond=milliseconds * 1000) + + +class BaseListParams(BaseModel): + page: int = 0 + limit: int = QUERY_LIMIT + # These two are ignored. They're present so reqparse isn't confused by JSONP. + callback: Optional[str] + _: Optional[str] + + +class GroupsParams(BaseListParams): + uuid: Optional[str] + description: Optional[str] + description_like_: Optional[str] = Field(alias='description:like') + + +class CreateGroupParams(BaseModel): + uuid: Optional[str] + ref_url: Optional[str] + description: Optional[str] + + +class QueryList(List[str]): + @classmethod + def __get_validators__(cls): + yield cls.validate + + @classmethod + def validate(cls, v): + if isinstance(v, str): + return cls([ + x for x in (x.strip() for x in v.split(',')) if x + ]) + if isinstance(v, list) and len(v) == 1 and isinstance(v[0], str): + return cls([ + x for x in (x.strip() for x in v[0].split(',')) if x + ]) + return cls(v) + + +class ResultsParams(BaseListParams): + sort_: str = Field(alias='_sort', default='') + since: dict = {'start': None, 'end': None} + outcome: Optional[QueryList] + groups: Optional[QueryList] + testcases: Optional[QueryList] + testcases_like_: Optional[QueryList] = Field(alias='testcases:like') + distinct_on_: Optional[QueryList] = Field(alias='_distinct_on') + + @validator('since', pre=True) + def parse_since(cls, v): + try: + s, e = parse_since(v[0]) + except iso8601.iso8601.ParseError: + raise ValueError('must be in ISO8601 format') + return {'start': s, 'end': e} + + @validator('outcome') + def outcome_must_be_valid(cls, v): + outcomes = [x.upper() for x in v] + if any(x not in RESULT_OUTCOME for x in outcomes): + raise ValueError(f'must be one of: {", ".join(RESULT_OUTCOME)}') + return outcomes + + +class CreateResultParams(BaseModel): + outcome: constr(min_length=1, strip_whitespace=True, to_upper=True) + testcase: dict + groups: Optional[list] + note: Optional[str] + data: Optional[dict] + ref_url: Optional[str] + submit_time: Any + + @validator('testcase', pre=True) + def parse_testcase(cls, v): + if not v or (isinstance(v, dict) and not v.get('name')): + raise ValueError('testcase name must be non-empty') + if isinstance(v, str): + return {'name': v} + return v + + @validator('submit_time', pre=True) + def parse_submit_time(cls, v): + if isinstance(v, datetime): + return v + if v is None: + return v + if isinstance(v, Number): + return time_from_milliseconds(v) + if isinstance(v, str): + try: + return datetime.strptime(v, '%Y-%m-%dT%H:%M:%S.%f') + except ValueError: + pass + + try: + return time_from_milliseconds(int(v)) + except ValueError: + pass + raise ValueError( + "Expected timestamp in milliseconds or datetime" + " (in format YYYY-MM-DDTHH:MM:SS.ffffff)," + " got %r" % type(v) + ) + + @validator('testcase') + def testcase_must_be_valid(cls, v): + if isinstance(v, dict) and not v.get('name'): + raise "" + return v + + @validator('outcome') + def outcome_must_be_valid(cls, v): + if v not in RESULT_OUTCOME: + raise ValueError(f'must be one of: {", ".join(RESULT_OUTCOME)}') + return v + + +class TestcasesParams(BaseListParams): + name: Optional[str] + name_like_: Optional[str] = Field(alias='name:like') + + +class CreateTestcaseParams(BaseModel): + name: constr(min_length=1) + ref_url: Optional[str] diff --git a/testing/functest_api_v20.py b/testing/functest_api_v20.py index 52c8ddd..c297a27 100644 --- a/testing/functest_api_v20.py +++ b/testing/functest_api_v20.py @@ -27,7 +27,6 @@ import resultsdb import resultsdb.cli import resultsdb.messaging -import resultsdb.controllers.api_v2 as api_v2 try: basestring @@ -120,71 +119,6 @@ def setup_method(self, method): def teardown_method(self, method): # Reset this for each test. resultsdb.messaging.DummyPlugin.history = [] - # =============== CONFIG ================== - - def helper_setup_request_parser_from_config(self): - ref_data = dict( - outcome=self.ref_result_outcome, - testcase=self.ref_testcase, - ) - - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - assert r.status_code == 201 - - # Test setting optional value as required - resultsdb.app.config['REQUIRED_DATA'] = {"create_result": ["ref_url"]} - api_v2.setup_request_parser_from_config() - - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - data = json.loads(r.data) - assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') - - ref_data['ref_url'] = '' - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - data = json.loads(r.data) - assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') - - ref_data['ref_url'] = self.ref_result_ref_url - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - assert r.status_code == 201 - - # Test setting result.data requirement - resultsdb.app.config['REQUIRED_DATA'] = {"create_result": ["data.foobar"]} - api_v2.setup_request_parser_from_config() - - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - data = json.loads(r.data) - assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') - - ref_data['data'] = ['foo', 'bar'] - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - data = json.loads(r.data) - assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') - - ref_data['data'] = {'foo': 'bar'} - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - data = json.loads(r.data) - assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') - - ref_data['data'] = {'foobar': 'bar'} - r = self.app.post('/api/v2.0/results', data=json.dumps(ref_data), content_type='application/json') - assert r.status_code == 201 - - def test_setup_request_parser_from_config(self): - bkp_config = copy.deepcopy(resultsdb.app.config) - bkp_request_parser = copy.deepcopy(api_v2.RP) - try: - self.helper_setup_request_parser_from_config() - except: - raise - finally: - api_v2.RP = bkp_request_parser - resultsdb.app.config = bkp_config # =============== TESTCASES ================== @@ -207,10 +141,32 @@ def test_create_testcase_missing_data(self): ref_data = json.dumps({'ref_url': self.ref_testcase_ref_url}) r = self.app.post('/api/v2.0/testcases', data=ref_data, content_type='application/json') - data = json.loads(r.data) + assert r.status_code == 400 + assert r.json == { + 'validation_error': { + 'body_params': [{ + 'loc': ['name'], + 'msg': 'field required', + 'type': 'value_error.missing' + }] + } + } + def test_create_testcase_empty_name(self): + ref_data = json.dumps({'name': ''}) + + r = self.app.post('/api/v2.0/testcases', data=ref_data, content_type='application/json') assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') + assert r.json == { + 'validation_error': { + 'body_params': [{ + 'ctx': {'limit_value': 1}, + 'loc': ['name'], + 'msg': 'ensure this value has at least 1 characters', + 'type': 'value_error.any_str.min_length' + }] + } + } def test_update_testcase(self): self.test_create_testcase() @@ -453,20 +409,93 @@ def test_create_result_custom_outcome(self): assert r.status_code == 201 assert data == ref_result - def test_create_result_missing_data(self): - ref_data = json.dumps({'outcome': 'FAKEOUTCOME'}) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + def test_create_result_with_testcase_name(self): + self.test_create_group() + self.test_create_testcase() + testcase_name = self.ref_result['testcase']['name'] + + r, data = self.helper_create_result(outcome='AMAZING', testcase=testcase_name) + + assert r.status_code == 201 + assert data['testcase']['name'] == testcase_name + + def test_create_result_empty_testcase(self): + r = self.app.post('/api/v2.0/results', json={'outcome': 'passed', 'testcase': ''}) + data = json.loads(r.data) + + assert r.status_code == 400 + assert data == { + 'validation_error': { + 'body_params': [{ + 'loc': ['testcase'], + 'msg': 'testcase name must be non-empty', + 'type': 'value_error' + }] + } + } + + def test_create_result_empty_testcase_name(self): + r = self.app.post( + '/api/v2.0/results', json={'outcome': 'passed', 'testcase': {'name': ''}}) + data = json.loads(r.data) + + assert r.status_code == 400 + assert data == { + 'validation_error': { + 'body_params': [{ + 'loc': ['testcase'], + 'msg': 'testcase name must be non-empty', + 'type': 'value_error' + }] + } + } + + def test_create_result_empty_testcase_dict(self): + r = self.app.post( + '/api/v2.0/results', json={'outcome': 'passed', 'testcase': {}}) + data = json.loads(r.data) + + assert r.status_code == 400 + assert data == { + 'validation_error': { + 'body_params': [{ + 'loc': ['testcase'], + 'msg': 'testcase name must be non-empty', + 'type': 'value_error' + }] + } + } + + def test_create_result_missing_testcase(self): + r = self.app.post('/api/v2.0/results', json={'outcome': 'passed'}) data = json.loads(r.data) assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') + assert data == { + 'validation_error': { + 'body_params': [{ + 'loc': ['testcase'], + 'msg': 'field required', + 'type': 'value_error.missing' + }] + } + } + def test_create_result_missing_outcome(self): ref_data = json.dumps({'testcase': self.ref_testcase}) r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') data = json.loads(r.data) assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') + assert data == { + 'validation_error': { + 'body_params': [{ + 'loc': ['outcome'], + 'msg': 'field required', + 'type': 'value_error.missing' + }] + } + } def test_create_result_multiple_groups(self): uuid2 = '1c26effb-7c07-4d90-9428-86aac053288c' @@ -545,7 +574,15 @@ def test_create_result_invalid_outcome(self): data = json.loads(r.data) assert r.status_code == 400 - assert data['message'].startswith("outcome must be one of") + assert data == { + 'validation_error': { + 'body_params': [{ + 'loc': ['outcome'], + 'msg': 'must be one of: PASSED, INFO, FAILED, NEEDS_INSPECTION, AMAZING', + 'type': 'value_error' + }] + } + } def test_create_result_invalid_data(self): ref_data = json.dumps({ @@ -570,7 +607,7 @@ def test_create_result_submit_time_as_number(self): r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') data = json.loads(r.data) - assert r.status_code == 201 + assert r.status_code == 201, data assert data['submit_time'] == '2022-08-24T06:54:57.123000' def test_create_result_submit_time_as_number_string(self): @@ -583,7 +620,7 @@ def test_create_result_submit_time_as_number_string(self): r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') data = json.loads(r.data) - assert r.status_code == 201 + assert r.status_code == 201, data assert data['submit_time'] == '2022-08-24T06:54:57.123000' def test_create_result_submit_time_as_datetime(self): @@ -596,7 +633,7 @@ def test_create_result_submit_time_as_datetime(self): r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') data = json.loads(r.data) - assert r.status_code == 201 + assert r.status_code == 201, data assert data['submit_time'] == '2022-08-24T06:54:57.123456' def test_create_result_submit_time_as_invalid(self): @@ -609,8 +646,19 @@ def test_create_result_submit_time_as_invalid(self): r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') data = json.loads(r.data) - assert r.status_code == 400 - assert data['message'].startswith('Malformed Request') + assert r.status_code == 400, data + assert data == { + "validation_error": { + "body_params": [{ + "loc": ["submit_time"], + "msg": ( + "Expected timestamp in milliseconds or datetime" + " (in format YYYY-MM-DDTHH:MM:SS.ffffff), got " + ), + "type": "value_error" + }] + } + } def test_get_result(self): self.test_create_result() @@ -670,7 +718,8 @@ def test_get_results_by_group(self): data1 = json.loads(r1.data) data2 = json.loads(r2.data) - assert r1.status_code == r2.status_code == 200 + assert r1.status_code == 200, r1.text + assert r2.status_code == 200, r2.text assert len(data1['data']) == len(data2['data']) == 1 assert data1 == data2 assert data1['data'][0] == self.ref_result @@ -694,9 +743,10 @@ def test_get_results_by_testcase(self): data1 = json.loads(r1.data) data2 = json.loads(r2.data) - assert r1.status_code == r2.status_code == 200 - assert data1 == data2 + assert r1.status_code == 200, r1.text + assert r2.status_code == 200, r2.text assert data1['data'][0] == self.ref_result + assert data2['data'][0] == self.ref_result r = self.app.get('/api/v2.0/results?testcases=%s,%s' % (self.ref_testcase_name, name2)) data = json.loads(r.data) @@ -717,9 +767,10 @@ def test_get_results_by_testcase_like(self): data1 = json.loads(r1.data) data2 = json.loads(r2.data) - assert r1.status_code == r2.status_code == 200 - assert data1 == data2 + assert r1.status_code == 200, r1.text + assert r2.status_code == 200, r2.text assert data1['data'][0] == self.ref_result + assert data2['data'][0] == self.ref_result r1 = self.app.get('/api/v2.0/results?testcases:like=%s*' % (self.ref_testcase_name,)) r2 = self.app.get('/api/v2.0/results?testcases:like=%s,%s*' % diff --git a/testing/test_general.py b/testing/test_general.py index a320263..efd99ca 100644 --- a/testing/test_general.py +++ b/testing/test_general.py @@ -1,15 +1,9 @@ import datetime -import pytest -import functools import resultsdb.controllers.api_v2 as apiv2 -import resultsdb.lib.helpers as helpers import resultsdb.messaging as messaging +from resultsdb.parsers.api_v2 import parse_since -try: - basestring -except NameError: - basestring = (str, bytes) class MyRequest(object): @@ -17,69 +11,6 @@ def __init__(self, url): self.url = url -class TestTypeHelpers(): - - def test_dict_or_string(self): - assert helpers.dict_or_string('') == '' - assert helpers.dict_or_string(u'') == u'' - assert helpers.dict_or_string({}) == {} - assert helpers.dict_or_string({"foo": "bar"}) == {"foo": "bar"} - with pytest.raises(ValueError): - helpers.dict_or_string([]) - - def test_list_or_none(self): - assert helpers.list_or_none(None) is None - assert helpers.list_or_none([]) == [] - assert helpers.list_or_none(["foo", "bar"]) == ["foo", "bar"] - with pytest.raises(ValueError): - assert helpers.list_or_none("") - - def test_non_empty(self): - assert helpers.non_empty(basestring, "foobar") == "foobar" - assert helpers.non_empty(int, 0) == 0 - assert helpers.non_empty(int, 1) == 1 - assert helpers.non_empty(float, 0.0) == 0.0 - assert helpers.non_empty(float, 1.0) == 1.0 - assert helpers.non_empty(list, ["foo"]) == ["foo"] - assert helpers.non_empty(dict, {"foo": "bar"}) == {"foo": "bar"} - - with pytest.raises(ValueError): - helpers.non_empty(basestring, "") - with pytest.raises(ValueError): - helpers.non_empty(list, []) - with pytest.raises(ValueError): - helpers.non_empty(dict, {}) - - def test_non_empty_with_lambda(self): - assert helpers.non_empty(helpers.list_or_none, ['foo']) == ['foo'] - assert helpers.non_empty(functools.partial(helpers.non_empty, helpers.list_or_none), ['foo']) == ['foo'] - with pytest.raises(ValueError): - helpers.non_empty(helpers.list_or_none, []) - with pytest.raises(ValueError): - helpers.non_empty(helpers.list_or_none, None) - with pytest.raises(ValueError): - helpers.non_empty(functools.partial(helpers.non_empty, helpers.list_or_none), []) - - -class TestExtraDataValidation(): - def test__validate_create_result_extra_data(self): - data = {"foobar": 0, "moo": "1"} - assert apiv2._validate_create_result_extra_data(None, data) == data - assert apiv2._validate_create_result_extra_data([], data) == data - assert apiv2._validate_create_result_extra_data(['foobar'], data) == data - assert apiv2._validate_create_result_extra_data(['moo'], data) == data - with pytest.raises(ValueError): - apiv2._validate_create_result_extra_data(['foobar'], None) - with pytest.raises(ValueError): - apiv2._validate_create_result_extra_data(['foobar'], {}) - with pytest.raises(ValueError): - apiv2._validate_create_result_extra_data(['foobar'], {'foobar': None}) - with pytest.raises(ValueError): - apiv2._validate_create_result_extra_data(['foobar'], {'foobar': ''}) - with pytest.raises(ValueError): - apiv2._validate_create_result_extra_data(None, "") - - class TestPrevNextURL(): def setup_method(self, method): @@ -153,6 +84,7 @@ def test_data_page_and_limit_in_url(self, monkeypatch): assert prev == 'URL&limit=1&page=0' assert next == 'URL&limit=1&page=2' + class TestParseSince(): def setup_method(self, method): @@ -160,21 +92,21 @@ def setup_method(self, method): self.date_obj = datetime.datetime.strptime(self.date_str, "%Y-%m-%dT%H:%M:%S.%f") def test_parse_start(self): - start, end = apiv2.parse_since(self.date_str) + start, end = parse_since(self.date_str) assert start == self.date_obj assert end is None def test_parse_start_with_timezone_info(self): - start, end = apiv2.parse_since(self.date_str + 'Z') + start, end = parse_since(self.date_str + 'Z') assert start == self.date_obj assert end is None - start, end = apiv2.parse_since(self.date_str + '+01') + start, end = parse_since(self.date_str + '+01') assert start == self.date_obj assert end is None def test_parse_end(self): - start, end = apiv2.parse_since(self.date_str + ',' + self.date_str) + start, end = parse_since(self.date_str + ',' + self.date_str) assert start == self.date_obj assert end == self.date_obj