diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..8dee19b6 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,26 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.2.3 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-docstring-first + - id: check-json + - id: check-yaml + - id: debug-statements + - repo: https://github.com/asottile/seed-isort-config + rev: v1.9.1 + hooks: + - id: seed-isort-config + language_version: python3.6 + args: [--application-directories=./src, --settings-path=./src] + - repo: https://github.com/pre-commit/mirrors-isort + rev: v4.3.20 + hooks: + - id: isort + language_version: python3.6 + - repo: https://github.com/ambv/black + rev: stable + hooks: + - id: black + language_version: python3.6 diff --git a/.travis.yml b/.travis.yml index 76147067..92fe3fc7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,6 +26,6 @@ before_script: - pip install -r ./src/requirements-dev.txt script: + - black --check ./src - tox -c ./src/tox.ini -e coverage - - tox -c ./src/tox.ini -e lint - docker ps | grep api | grep -q healthy diff --git a/DEVELOPING.md b/DEVELOPING.md index c9b5c07e..bc7bb7cd 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -38,14 +38,46 @@ recommend you initialize a virtual development environment using a tool such a ```bash $ cd src -$ python2 -m pip install -r requirements-dev.txt +$ python3 -m pip install -r requirements-dev.txt $ pytest # faster, but less thorough $ tox # tests code in clean virtualenv $ tox --recreate # if you change `requirements.txt` -$ tox -e lint # check that code meets widely accepted coding standards $ tox -e coverage # check where test coverage lacks ``` +Committing +---------- + +Besides running the test suite and ensuring that all tests are passing, we also +expect all python code that's checked in to have been run through an +auto-formatter. + +This project uses a Python auto-formatter called Black. You probably won't like +every decision it makes, but our continuous integration test-runner will reject +your commit if it's not properly formatted. + +Additionally, import statement sorting is handled by `isort`. + +The continuous integration test-runner verifies the code is auto-formatted by +checking that neither `isort` nor `black` would recommend any changes to the +code. Occasionally, this can fail if these two autoformatters disagree. The +only time I've seen this happen is with a commented-out import statement, which +`isort` parses, and `black` treats as a comment. Solution: don't leave +commented-out import statements in the code. + +There are several ways to autoformat your code before committing. First, IDE +integration with on-save hooks is very useful. Second, there is a script, +`scripts/autoformat_python.sh`, that will run both `isort` and `black` over the +codebase. Lastly, if you've already pip-installed the dev requirements from the +section above, you already have a utility called `pre-commit` installed that +will automate setting up this project's git pre-commit hooks. Simply type the +following _once_, and each time you make a commit, it will be appropriately +autoformatted. + + +```bash +$ pre-commit install +``` Running Production Server with Local Changes -------------------------------------------- @@ -100,7 +132,7 @@ $ ./manage.py runserver outside of it, you may need to allow access to system sitepackages. For example, if you're using a virtualenv called `scos-sensor`, you can remove the following text file: `rm -f - ~/.virtualenvs/scos-sensor/lib/python2.7/no-global-site-packages.txt`, and + ~/.virtualenvs/scos-sensor/lib/python3.6/no-global-site-packages.txt`, and thereafter use the `ignore-installed` flag to pip: `pip install -I -r requirements.txt`. This should let the devserver fall back to system sitepackages for the SDR driver only. diff --git a/docker-compose.yml b/docker-compose.yml index 78bcedd0..c51a869c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -57,7 +57,7 @@ services: command: /entrypoints/api_entrypoint.sh nginx: - image: nginx:1.14-alpine + image: nginx:1.16-alpine restart: always ports: - '80:80' @@ -82,14 +82,13 @@ services: # https://github.com/moby/moby/issues/28400 # https://github.com/willfarrell/docker-autoheal autoheal: - image: alpine + image: smsntia/autoheal:${DOCKER_TAG} # DOCKER_TAG will always be 'latest' for GitHub source restart: always depends_on: - api build: context: . dockerfile: docker/Dockerfile-autoheal - image: smsntia/autoheal:${DOCKER_TAG} # DOCKER_TAG will always be 'latest' for GitHub source volumes: - /var/run/docker.sock:/var/run/docker.sock - ./entrypoints/autoheal_entrypoint.sh:/entrypoints/autoheal_entrypoint.sh:ro diff --git a/docker/Dockerfile-autoheal b/docker/Dockerfile-autoheal index c4ec16ac..3ef47bda 100644 --- a/docker/Dockerfile-autoheal +++ b/docker/Dockerfile-autoheal @@ -1,3 +1,3 @@ -FROM alpine +FROM alpine:3.9 RUN apk add --no-cache jq curl diff --git a/docs/openapi.json b/docs/openapi.json index 74e6c9c7..884ce4c5 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -53,57 +53,14 @@ }, "parameters": [] }, - "/v1/acquisitions/": { + "/v1/capabilities/": { "get": { - "operationId": "v1_acquisitions_list", - "description": "Returns an overview of how many acquisitions are available per schedule\nentry.", - "parameters": [ - { - "name": "limit", - "in": "query", - "description": "Number of results to return per page.", - "required": false, - "type": "integer" - }, - { - "name": "offset", - "in": "query", - "description": "The initial index from which to return the results.", - "required": false, - "type": "integer" - } - ], + "operationId": "v1_capabilities_list", + "description": "The capabilites of the sensor.", + "parameters": [], "responses": { "200": { - "description": "", - "schema": { - "required": [ - "count", - "results" - ], - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "next": { - "type": "string", - "format": "uri", - "x-nullable": true - }, - "previous": { - "type": "string", - "format": "uri", - "x-nullable": true - }, - "results": { - "type": "array", - "items": { - "$ref": "#/definitions/AcquisitionsOverview" - } - } - } - } + "description": "" } }, "tags": [ @@ -112,10 +69,33 @@ }, "parameters": [] }, - "/v1/acquisitions/{schedule_entry_name}/": { + "/v1/capabilities/{format}": { + "get": { + "operationId": "v1_capabilities_read", + "description": "The capabilites of the sensor.", + "parameters": [], + "responses": { + "200": { + "description": "" + } + }, + "tags": [ + "v1" + ] + }, + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + } + ] + }, + "/v1/schedule/": { "get": { - "operationId": "v1_acquisitions_list", - "description": "Returns a list of all acquisitions created by the given schedule entry.", + "operationId": "v1_schedule_list", + "description": "Retrieves the current schedule.", "parameters": [ { "name": "search", @@ -172,7 +152,7 @@ "results": { "type": "array", "items": { - "$ref": "#/definitions/Acquisition" + "$ref": "#/definitions/AdminScheduleEntry" } } } @@ -183,107 +163,68 @@ "v1" ] }, - "delete": { - "operationId": "v1_acquisitions_delete", - "description": "Deletes all acquisitions created by the given schedule entry.", + "post": { + "operationId": "v1_schedule_create", + "description": "Return NO CONTENT when input is valid but validate_only is True.", "parameters": [ { - "name": "search", - "in": "query", - "description": "A search term.", - "required": false, - "type": "string" - }, - { - "name": "ordering", - "in": "query", - "description": "Which field to use when ordering the results.", - "required": false, - "type": "string" + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AdminScheduleEntry" + } } ], "responses": { - "204": { - "description": "" + "201": { + "description": "", + "schema": { + "$ref": "#/definitions/AdminScheduleEntry" + } } }, "tags": [ "v1" ] }, - "parameters": [ - { - "name": "schedule_entry_name", - "in": "path", - "required": true, - "type": "string" - } - ] + "parameters": [] }, - "/v1/acquisitions/{schedule_entry_name}/archive": { + "/v1/schedule/{name}/": { "get": { - "operationId": "v1_acquisitions_archive", - "description": "", + "operationId": "v1_schedule_read", + "description": "Returns the specified schedule entry.", + "parameters": [], + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/AdminScheduleEntry" + } + } + }, + "tags": [ + "v1" + ] + }, + "put": { + "operationId": "v1_schedule_update", + "description": "Updates the specified schedule entry.", "parameters": [ { - "name": "search", - "in": "query", - "description": "A search term.", - "required": false, - "type": "string" - }, - { - "name": "ordering", - "in": "query", - "description": "Which field to use when ordering the results.", - "required": false, - "type": "string" - }, - { - "name": "limit", - "in": "query", - "description": "Number of results to return per page.", - "required": false, - "type": "integer" - }, - { - "name": "offset", - "in": "query", - "description": "The initial index from which to return the results.", - "required": false, - "type": "integer" + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AdminScheduleEntry" + } } ], "responses": { "200": { "description": "", "schema": { - "required": [ - "count", - "results" - ], - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "next": { - "type": "string", - "format": "uri", - "x-nullable": true - }, - "previous": { - "type": "string", - "format": "uri", - "x-nullable": true - }, - "results": { - "type": "array", - "items": { - "$ref": "#/definitions/Acquisition" - } - } - } + "$ref": "#/definitions/AdminScheduleEntry" } } }, @@ -291,25 +232,24 @@ "v1" ] }, - "parameters": [ - { - "name": "schedule_entry_name", - "in": "path", - "required": true, - "type": "string" - } - ] - }, - "/v1/acquisitions/{schedule_entry_name}/{task_id}/": { - "get": { - "operationId": "v1_acquisitions_read", - "description": "Returns all available metadata about an acquisition.", - "parameters": [], + "patch": { + "operationId": "v1_schedule_partial_update", + "description": "Partially updates the specified schedule entry.", + "parameters": [ + { + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AdminScheduleEntry" + } + } + ], "responses": { "200": { "description": "", "schema": { - "$ref": "#/definitions/Acquisition" + "$ref": "#/definitions/AdminScheduleEntry" } } }, @@ -318,8 +258,8 @@ ] }, "delete": { - "operationId": "v1_acquisitions_delete", - "description": "Deletes the specified acquisition.", + "operationId": "v1_schedule_delete", + "description": "Deletes the specified schedule entry.", "parameters": [], "responses": { "204": { @@ -332,31 +272,40 @@ }, "parameters": [ { - "name": "schedule_entry_name", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "task_id", + "name": "name", "in": "path", - "description": "The id of the task relative to the acquisition", + "description": "[Required] The unique identifier used in URLs and filenames", "required": true, - "type": "integer" + "type": "string", + "format": "slug", + "pattern": "^[-a-zA-Z0-9_]+$" } ] }, - "/v1/acquisitions/{schedule_entry_name}/{task_id}/archive": { + "/v1/status": { "get": { - "operationId": "v1_acquisitions_archive", - "description": "Downloads the acquisition's SigMF archive.", + "operationId": "v1_status_list", + "description": "The status overview of the sensor.", "parameters": [], "responses": { "200": { - "description": "", - "schema": { - "$ref": "#/definitions/Acquisition" - } + "description": "" + } + }, + "tags": [ + "v1" + ] + }, + "parameters": [] + }, + "/v1/status{format}": { + "get": { + "operationId": "v1_read", + "description": "The status overview of the sensor.", + "parameters": [], + "responses": { + "200": { + "description": "" } }, "tags": [ @@ -365,24 +314,17 @@ }, "parameters": [ { - "name": "schedule_entry_name", + "name": "format", "in": "path", "required": true, "type": "string" - }, - { - "name": "task_id", - "in": "path", - "description": "The id of the task relative to the acquisition", - "required": true, - "type": "integer" } ] }, - "/v1/capabilities/": { + "/v1/tasks/": { "get": { - "operationId": "v1_capabilities_list", - "description": "The capabilites of the sensor.", + "operationId": "v1_tasks_list", + "description": "Provides links to upcoming and completed tasks", "parameters": [], "responses": { "200": { @@ -395,9 +337,9 @@ }, "parameters": [] }, - "/v1/results/": { + "/v1/tasks/completed/": { "get": { - "operationId": "v1_results_list", + "operationId": "v1_tasks_completed_list", "description": "Returns an overview of how many results are available per schedule\nentry.", "parameters": [ { @@ -454,9 +396,9 @@ }, "parameters": [] }, - "/v1/results/{schedule_entry_name}/": { + "/v1/tasks/completed/{schedule_entry_name}/": { "get": { - "operationId": "v1_results_list", + "operationId": "v1_tasks_completed_list", "description": "Returns a list of all results created by the given schedule entry.", "parameters": [ { @@ -525,26 +467,13 @@ "v1" ] }, - "parameters": [ - { - "name": "schedule_entry_name", - "in": "path", - "required": true, - "type": "string" - } - ] - }, - "/v1/results/{schedule_entry_name}/{task_id}/": { - "get": { - "operationId": "v1_results_read", - "description": "Returns a specific result.", + "delete": { + "operationId": "v1_tasks_completed_delete", + "description": "Deletes all results created by the given schedule entry.", "parameters": [], "responses": { - "200": { - "description": "", - "schema": { - "$ref": "#/definitions/TaskResult" - } + "204": { + "description": "" } }, "tags": [ @@ -557,20 +486,13 @@ "in": "path", "required": true, "type": "string" - }, - { - "name": "task_id", - "in": "path", - "description": "The id of the task relative to the result", - "required": true, - "type": "integer" } ] }, - "/v1/schedule/": { + "/v1/tasks/completed/{schedule_entry_name}/archive/": { "get": { - "operationId": "v1_schedule_list", - "description": "Retrieves the current schedule.", + "operationId": "v1_tasks_completed_archive", + "description": "Downloads the acquisition's SigMF archive.", "parameters": [ { "name": "search", @@ -627,150 +549,7 @@ "results": { "type": "array", "items": { - "required": [ - "name", - "action" - ], - "type": "object", - "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "maxLength": 50, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ] - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean", - "readOnly": true - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" - }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", - "type": "string", - "format": "uri", - "maxLength": 200, - "x-nullable": true - }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true - }, - "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false - } - } + "$ref": "#/definitions/TaskResult" } } } @@ -781,308 +560,77 @@ "v1" ] }, - "post": { - "operationId": "v1_schedule_create", - "description": "Return NO CONTENT when input is valid but validate_only is True.", + "parameters": [ + { + "name": "schedule_entry_name", + "in": "path", + "required": true, + "type": "string" + } + ] + }, + "/v1/tasks/completed/{schedule_entry_name}/archive{format}": { + "get": { + "operationId": "v1_tasks_archive", + "description": "Downloads the acquisition's SigMF archive.", "parameters": [ { - "name": "data", - "in": "body", - "required": true, - "schema": { - "required": [ - "name", - "action" - ], - "type": "object", - "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "maxLength": 50, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ] - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean", - "readOnly": true - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" - }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", - "type": "string", - "format": "uri", - "maxLength": 200, - "x-nullable": true - }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true - }, - "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false - } - } - } + "name": "search", + "in": "query", + "description": "A search term.", + "required": false, + "type": "string" + }, + { + "name": "ordering", + "in": "query", + "description": "Which field to use when ordering the results.", + "required": false, + "type": "string" + }, + { + "name": "limit", + "in": "query", + "description": "Number of results to return per page.", + "required": false, + "type": "integer" + }, + { + "name": "offset", + "in": "query", + "description": "The initial index from which to return the results.", + "required": false, + "type": "integer" } ], "responses": { - "201": { + "200": { "description": "", "schema": { "required": [ - "name", - "action" + "count", + "results" ], "type": "object", "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "maxLength": 50, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ] - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean", - "readOnly": true - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" + "count": { + "type": "integer" }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", + "next": { "type": "string", "format": "uri", - "maxLength": 200, "x-nullable": true }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", + "previous": { "type": "string", "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true + "x-nullable": true }, "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false + "type": "array", + "items": { + "$ref": "#/definitions/TaskResult" + } } } } @@ -1092,161 +640,31 @@ "v1" ] }, - "parameters": [] + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "schedule_entry_name", + "in": "path", + "required": true, + "type": "string" + } + ] }, - "/v1/schedule/{name}/": { + "/v1/tasks/completed/{schedule_entry_name}/{task_id}/": { "get": { - "operationId": "v1_schedule_read", - "description": "Returns the specified schedule entry.", + "operationId": "v1_tasks_completed_read", + "description": "Returns a specific result.", "parameters": [], "responses": { "200": { "description": "", "schema": { - "required": [ - "name", - "action" - ], - "type": "object", - "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "maxLength": 50, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ] - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean", - "readOnly": true - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" - }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", - "type": "string", - "format": "uri", - "maxLength": 200, - "x-nullable": true - }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true - }, - "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false - } - } + "$ref": "#/definitions/TaskResult" } } }, @@ -1254,302 +672,78 @@ "v1" ] }, - "put": { - "operationId": "v1_schedule_update", - "description": "Updates the specified schedule entry.", - "parameters": [ - { - "name": "data", - "in": "body", - "required": true, - "schema": { - "type": "object", - "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "readOnly": true, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ], - "readOnly": true - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean" - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" - }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", - "type": "string", - "format": "uri", - "maxLength": 200, - "x-nullable": true - }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true - }, - "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false - } - } + "delete": { + "operationId": "v1_tasks_completed_delete", + "description": "Deletes the specified acquisition.", + "parameters": [], + "responses": { + "204": { + "description": "" + } + }, + "tags": [ + "v1" + ] + }, + "parameters": [ + { + "name": "schedule_entry_name", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "task_id", + "in": "path", + "description": "The id of the task relative to the result", + "required": true, + "type": "integer" + } + ] + }, + "/v1/tasks/completed/{schedule_entry_name}/{task_id}/archive": { + "get": { + "operationId": "v1_tasks_completed_archive", + "description": "Downloads the acquisition's SigMF archive.", + "parameters": [], + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/TaskResult" } } - ], + }, + "tags": [ + "v1" + ] + }, + "parameters": [ + { + "name": "schedule_entry_name", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "task_id", + "in": "path", + "description": "The id of the task relative to the result", + "required": true, + "type": "integer" + } + ] + }, + "/v1/tasks/completed/{schedule_entry_name}/{task_id}/archive{format}": { + "get": { + "operationId": "v1_tasks_archive", + "description": "Downloads the acquisition's SigMF archive.", + "parameters": [], "responses": { "200": { "description": "", "schema": { - "type": "object", - "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "readOnly": true, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ], - "readOnly": true - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean" - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" - }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", - "type": "string", - "format": "uri", - "maxLength": 200, - "x-nullable": true - }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true - }, - "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false - } - } + "$ref": "#/definitions/TaskResult" } } }, @@ -1557,300 +751,227 @@ "v1" ] }, - "patch": { - "operationId": "v1_schedule_partial_update", - "description": "Partially updates the specified schedule entry.", + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "schedule_entry_name", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "task_id", + "in": "path", + "description": "The id of the task relative to the result", + "required": true, + "type": "integer" + } + ] + }, + "/v1/tasks/completed/{schedule_entry_name}/{task_id}{format}": { + "get": { + "operationId": "v1_tasks_completed_read", + "description": "Returns a specific result.", + "parameters": [], + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/TaskResult" + } + } + }, + "tags": [ + "v1" + ] + }, + "delete": { + "operationId": "v1_tasks_completed_delete", + "description": "Deletes the specified acquisition.", + "parameters": [], + "responses": { + "204": { + "description": "" + } + }, + "tags": [ + "v1" + ] + }, + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "schedule_entry_name", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "task_id", + "in": "path", + "description": "The id of the task relative to the result", + "required": true, + "type": "integer" + } + ] + }, + "/v1/tasks/completed/{schedule_entry_name}{format}": { + "get": { + "operationId": "v1_tasks_completed_list", + "description": "Returns a list of all results created by the given schedule entry.", "parameters": [ { - "name": "data", - "in": "body", - "required": true, + "name": "search", + "in": "query", + "description": "A search term.", + "required": false, + "type": "string" + }, + { + "name": "ordering", + "in": "query", + "description": "Which field to use when ordering the results.", + "required": false, + "type": "string" + }, + { + "name": "limit", + "in": "query", + "description": "Number of results to return per page.", + "required": false, + "type": "integer" + }, + { + "name": "offset", + "in": "query", + "description": "The initial index from which to return the results.", + "required": false, + "type": "integer" + } + ], + "responses": { + "200": { + "description": "", "schema": { + "required": [ + "count", + "results" + ], "type": "object", "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "readOnly": true, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ], - "readOnly": true - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean" - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" + "count": { + "type": "integer" }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", + "next": { "type": "string", "format": "uri", - "maxLength": 200, "x-nullable": true }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", + "previous": { "type": "string", "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true + "x-nullable": true }, "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false + "type": "array", + "items": { + "$ref": "#/definitions/TaskResult" + } } } } } + }, + "tags": [ + "v1" + ] + }, + "delete": { + "operationId": "v1_tasks_completed_delete", + "description": "Deletes all results created by the given schedule entry.", + "parameters": [], + "responses": { + "204": { + "description": "" + } + }, + "tags": [ + "v1" + ] + }, + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "schedule_entry_name", + "in": "path", + "required": true, + "type": "string" + } + ] + }, + "/v1/tasks/completed{format}": { + "get": { + "operationId": "v1_tasks_list", + "description": "Returns an overview of how many results are available per schedule\nentry.", + "parameters": [ + { + "name": "limit", + "in": "query", + "description": "Number of results to return per page.", + "required": false, + "type": "integer" + }, + { + "name": "offset", + "in": "query", + "description": "The initial index from which to return the results.", + "required": false, + "type": "integer" + } ], "responses": { "200": { "description": "", "schema": { + "required": [ + "count", + "results" + ], "type": "object", "properties": { - "url": { - "title": "Url", - "description": "The url of the entry", - "type": "string", - "format": "uri", - "readOnly": true - }, - "name": { - "title": "Name", - "description": "[Required] The unique identifier used in URLs and filenames", - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$", - "readOnly": true, - "minLength": 1 - }, - "action": { - "title": "Action", - "description": "[Required] The name of the action to be scheduled", - "type": "string", - "enum": [ - "acquire_700_band_iq", - "acquire_700c_dl", - "logger", - "mock_acquire", - "admin_logger", - "monitor_usrp", - "sync_gps" - ], - "readOnly": true - }, - "priority": { - "title": "Priority", - "description": "Lower number is higher priority (default=10)", - "type": "integer", - "maximum": 19, - "minimum": -20, - "x-nullable": true - }, - "start": { - "title": "Start", - "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "stop": { - "title": "Absolute stop", - "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", - "type": "string", - "format": "date-time", - "x-nullable": true - }, - "relative_stop": { - "title": "Relative stop", - "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "interval": { - "title": "Interval", - "description": "Seconds between tasks, or leave blank to run once", - "type": "integer", - "minimum": 1, - "x-nullable": true - }, - "is_active": { - "title": "Is active", - "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", - "type": "boolean" - }, - "is_private": { - "title": "Is private", - "description": "Indicates whether the entry, and resulting data, are only visible to admins", - "type": "boolean" + "count": { + "type": "integer" }, - "callback_url": { - "title": "Callback url", - "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", + "next": { "type": "string", "format": "uri", - "maxLength": 200, "x-nullable": true }, - "next_task_time": { - "title": "Next task time", - "description": "UTC time (ISO 8601) the next task is scheduled for", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "next_task_id": { - "title": "Next task id", - "description": "The id of the next task to be executed", - "type": "integer", - "readOnly": true - }, - "created": { - "title": "Created", - "description": "The date the entry was created", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "modified": { - "title": "Modified", - "description": "The date the entry was modified", - "type": "string", - "format": "date-time", - "readOnly": true - }, - "owner": { - "title": "Owner", - "description": "The name of the user who owns the entry", + "previous": { "type": "string", "format": "uri", - "readOnly": true - }, - "acquisitions": { - "title": "Acquisitions", - "description": "The list of acquisitions related to the entry", - "type": "string", - "readOnly": true + "x-nullable": true }, "results": { - "title": "Results", - "description": "The list of results related to the entry", - "type": "string", - "readOnly": true - }, - "validate_only": { - "title": "Validate only", - "description": "Only validate the input, do not modify the schedule", - "type": "boolean", - "default": false + "type": "array", + "items": { + "$ref": "#/definitions/TaskResultsOverview" + } } } } @@ -1860,12 +981,38 @@ "v1" ] }, - "delete": { - "operationId": "v1_schedule_delete", - "description": "Deletes the specified schedule entry.", + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + } + ] + }, + "/v1/tasks/upcoming/": { + "get": { + "operationId": "v1_tasks_upcoming_list", + "description": "Returns a snapshot of upcoming tasks.", "parameters": [], "responses": { - "204": { + "200": { + "description": "" + } + }, + "tags": [ + "v1" + ] + }, + "parameters": [] + }, + "/v1/tasks/upcoming{format}": { + "get": { + "operationId": "v1_tasks_read", + "description": "Returns a snapshot of upcoming tasks.", + "parameters": [], + "responses": { + "200": { "description": "" } }, @@ -1875,20 +1022,17 @@ }, "parameters": [ { - "name": "name", + "name": "format", "in": "path", - "description": "[Required] The unique identifier used in URLs and filenames", "required": true, - "type": "string", - "format": "slug", - "pattern": "^[-a-zA-Z0-9_]+$" + "type": "string" } ] }, - "/v1/status": { + "/v1/tasks/{format}": { "get": { - "operationId": "v1_status_list", - "description": "The status overview of the sensor.", + "operationId": "v1_tasks_read", + "description": "Provides links to upcoming and completed tasks", "parameters": [], "responses": { "200": { @@ -1899,95 +1043,198 @@ "v1" ] }, - "parameters": [] + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + } + ] + }, + "/v1/{format}": { + "get": { + "operationId": "v1_read", + "description": "SCOS sensor API root.", + "parameters": [], + "responses": { + "200": { + "description": "" + } + }, + "tags": [ + "v1" + ] + }, + "parameters": [ + { + "name": "format", + "in": "path", + "required": true, + "type": "string" + } + ] } }, "definitions": { - "AcquisitionsOverview": { + "AdminScheduleEntry": { + "required": [ + "name", + "action" + ], "type": "object", "properties": { - "url": { - "title": "Url", - "description": "The url of the list of acquisitions", + "self": { + "title": "Self", + "description": "The url of the entry", "type": "string", "format": "uri", "readOnly": true }, - "acquisitions_available": { - "title": "Acquisitions available", - "description": "The number of available acquisitions", + "name": { + "title": "Name", + "description": "[Required] The unique identifier used in URLs and filenames", + "type": "string", + "format": "slug", + "pattern": "^[-a-zA-Z0-9_]+$", + "maxLength": 50, + "minLength": 1 + }, + "action": { + "title": "Action", + "description": "[Required] The name of the action to be scheduled", "type": "string", - "readOnly": true + "enum": [ + "acquire_700c_dl", + "logger", + "survey_700_band_iq", + "mock_acquire", + "mock_multirec_acquire", + "admin_logger", + "monitor_usrp", + "sync_gps" + ] }, - "archive": { - "title": "Archive", - "description": "The url to download a SigMF archive of all acquisitions", + "priority": { + "title": "Priority", + "description": "Lower number is higher priority (default=10)", + "type": "integer", + "maximum": 19, + "minimum": -20, + "x-nullable": true + }, + "start": { + "title": "Start", + "description": "UTC time (ISO 8601) to start, or leave blank for 'now'", "type": "string", - "readOnly": true + "format": "date-time", + "x-nullable": true }, - "schedule_entry": { - "title": "Schedule entry", - "description": "The related schedule entry for the acquisition", + "stop": { + "title": "Absolute stop", + "description": "UTC time (ISO 8601) to stop, or leave blank for 'never' (not valid with relative stop)", "type": "string", + "format": "date-time", + "x-nullable": true + }, + "relative_stop": { + "title": "Relative stop", + "description": "Integer seconds after start to stop, or leave blank for 'never' (not valid with absolute stop)", + "type": "integer", + "minimum": 1, + "x-nullable": true + }, + "interval": { + "title": "Interval", + "description": "Seconds between tasks, or leave blank to run once", + "type": "integer", + "minimum": 1, + "x-nullable": true + }, + "is_active": { + "title": "Is active", + "description": "Indicates whether the entry should be removed from the scheduler without removing it from the system", + "type": "boolean", "readOnly": true - } - } - }, - "Acquisition": { - "required": [ - "task_id", - "sigmf_metadata" - ], - "type": "object", - "properties": { - "url": { - "title": "Url", - "description": "The url of the acquisition", + }, + "is_private": { + "title": "Is private", + "description": "Indicates whether the entry, and resulting data, are only visible to admins", + "type": "boolean" + }, + "callback_url": { + "title": "Callback url", + "description": "If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", "type": "string", "format": "uri", + "maxLength": 200, + "x-nullable": true + }, + "next_task_time": { + "title": "Next task time", + "description": "UTC time (ISO 8601) the next task is scheduled for", + "type": "string", + "format": "date-time", "readOnly": true }, - "task_id": { - "title": "Task id", - "description": "The id of the task relative to the acquisition", - "type": "integer" + "next_task_id": { + "title": "Next task id", + "description": "The id of the next task to be executed", + "type": "integer", + "readOnly": true }, "created": { "title": "Created", - "description": "The time the acquisition was created", + "description": "The date the entry was created", "type": "string", "format": "date-time", "readOnly": true }, - "archive": { - "title": "Archive", - "description": "The url to download a SigMF archive of this acquisition", + "modified": { + "title": "Modified", + "description": "The date the entry was modified", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "owner": { + "title": "Owner", + "description": "The name of the user who owns the entry", "type": "string", "format": "uri", "readOnly": true }, - "sigmf_metadata": { - "title": "Sigmf metadata", - "description": "The sigmf meta data for the acquisition", - "type": "object", - "additionalProperties": { - "type": "string" - } + "task_results": { + "title": "Task results", + "description": "The list of results related to the entry", + "type": "string", + "readOnly": true + }, + "validate_only": { + "title": "Validate only", + "description": "Only validate the input, do not modify the schedule", + "type": "boolean", + "default": false } } }, "TaskResultsOverview": { "type": "object", "properties": { - "url": { - "title": "Url", - "description": "The url of the list of results", + "archive": { + "title": "Archive", + "description": "The link to a multi-recording archive of all available acquisitions", "type": "string", - "format": "uri", "readOnly": true }, - "results_available": { - "title": "Results available", + "task_results": { + "title": "Task results", + "description": "The link to the task results", + "type": "string", + "readOnly": true + }, + "task_results_available": { + "title": "Task results available", "description": "The number of available results", "type": "string", "readOnly": true @@ -2000,28 +1247,75 @@ } } }, + "Acquisition": { + "required": [ + "metadata" + ], + "type": "object", + "properties": { + "recording_id": { + "title": "Recording id", + "description": "The id of the recording relative to the task", + "type": "integer" + }, + "archive": { + "title": "Archive", + "description": "The url to download a SigMF archive of this acquisition", + "type": "string", + "format": "uri", + "readOnly": true + }, + "metadata": { + "title": "Metadata", + "description": "The SigMF metadata for the acquisition", + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, "TaskResult": { "required": [ "task_id", - "started", - "finished", - "duration", - "result" + "data" ], "type": "object", "properties": { - "url": { - "title": "Url", + "self": { + "title": "Self", "description": "The url of the result", "type": "string", "format": "uri", "readOnly": true }, + "schedule_entry": { + "title": "Schedule entry", + "description": "The url of the parent schedule entry", + "type": "string", + "readOnly": true + }, "task_id": { "title": "Task id", "description": "The id of the task relative to the result", "type": "integer" }, + "status": { + "title": "Status", + "description": "\"success\" or \"failure\"", + "type": "string", + "enum": [ + 1, + 2, + 3 + ] + }, + "detail": { + "title": "Detail", + "description": "Arbitrary detail string", + "type": "string", + "maxLength": 512 + }, "started": { "title": "Started", "description": "The time the task started", @@ -2039,28 +1333,13 @@ "description": "Task duration in seconds", "type": "number" }, - "result": { - "title": "Result", - "description": "\"success\" or \"failure\"", - "type": "string", - "enum": [ - 1, - 2 - ] - }, - "detail": { - "title": "Detail", - "description": "Arbitrary detail string", - "type": "string", - "maxLength": 512 - }, - "schedule_entry": { - "title": "Schedule entry", - "description": "The url of the parent schedule entry", - "type": "string", - "readOnly": true + "data": { + "type": "array", + "items": { + "$ref": "#/definitions/Acquisition" + } } } } } -} \ No newline at end of file +} diff --git a/env.template b/env.template index cc78b857..2da3f685 100644 --- a/env.template +++ b/env.template @@ -15,7 +15,7 @@ SECRET_KEY='!j1&*$wnrkrtc-74cc7_^#n6r3om$6s#!fy=zkd_xp(gkikl+8' DEBUG=false # A space-separated list of domain names and IPs -DOMAINS="$(hostname -d) $(hostname -s).local localhost" +DOMAINS="localhost $(hostname -d) $(hostname -s).local" IPS="$(hostname -I) 127.0.0.1" FQDN="$(hostname -f)" diff --git a/nginx/conf.template b/nginx/conf.template index 20d86433..cbd6c555 100644 --- a/nginx/conf.template +++ b/nginx/conf.template @@ -7,7 +7,6 @@ upstream wsgi-server { server api:8000 fail_timeout=0; } - server { listen 80 default_server; listen [::]:80 default_server; @@ -18,7 +17,6 @@ server { return 307 https://$host$request_uri; } - server { # SSL configuration listen 443 ssl; @@ -29,7 +27,6 @@ server { proxy_buffers 16 16k; proxy_buffer_size 16k; - ssl on; ssl_certificate /etc/ssl/certs/ssl-cert.pem; ssl_certificate_key /etc/ssl/private/ssl-cert.key; ssl_protocols TLSv1.2; diff --git a/scripts/autoformat_python.sh b/scripts/autoformat_python.sh new file mode 100755 index 00000000..7b4557dc --- /dev/null +++ b/scripts/autoformat_python.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# Autoformat python - sort imports and then "blacken" code + +REPO_ROOT=${REPO_ROOT:=$(git rev-parse --show-toplevel)} +SRC_ROOT=${REPO_ROOT}/src + +echo "Sorting imports with isort... " +seed-isort-config --application-directories=${SRC_ROOT} --settings-path=${SRC_ROOT} +isort --recursive ${SRC_ROOT} +echo +echo "Formatting code with black... " +black ${SRC_ROOT} diff --git a/src/.isort.cfg b/src/.isort.cfg new file mode 100644 index 00000000..5dc108fc --- /dev/null +++ b/src/.isort.cfg @@ -0,0 +1,7 @@ +[settings] +multi_line_output=3 +include_trailing_comma=True +force_grid_wrap=0 +use_parentheses=True +line_length=88 +known_third_party=django,drf_yasg,jsonfield,numpy,pytest,requests_futures,requests_mock,rest_framework,ruamel,sigmf diff --git a/src/acquisitions/apps.py b/src/acquisitions/apps.py deleted file mode 100644 index 5c7a70d8..00000000 --- a/src/acquisitions/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class AcquisitionsConfig(AppConfig): - name = 'acquisitions' diff --git a/src/acquisitions/migrations/0001_initial.py b/src/acquisitions/migrations/0001_initial.py deleted file mode 100644 index f8d33e41..00000000 --- a/src/acquisitions/migrations/0001_initial.py +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 - -from django.db import migrations, models -import jsonfield.fields - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='Acquisition', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('task_id', models.IntegerField(help_text='The id of the task relative to the acquisition')), - ('recording_id', models.IntegerField(default=0, help_text='The id of the recording relative to the task')), - ('sigmf_metadata', jsonfield.fields.JSONField(help_text='The sigmf meta data for the acquisition')), - ('data', models.BinaryField(null=True)), - ('created', models.DateTimeField(auto_now_add=True, help_text='The time the acquisition was created')), - ], - options={ - 'db_table': 'acquisitions', - 'ordering': ('created',), - }, - ), - ] diff --git a/src/acquisitions/migrations/0002_auto_20190515_2055.py b/src/acquisitions/migrations/0002_auto_20190515_2055.py deleted file mode 100644 index 81440f89..00000000 --- a/src/acquisitions/migrations/0002_auto_20190515_2055.py +++ /dev/null @@ -1,26 +0,0 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ('acquisitions', '0001_initial'), - ('schedule', '0001_initial'), - ] - - operations = [ - migrations.AddField( - model_name='acquisition', - name='schedule_entry', - field=models.ForeignKey(help_text='The schedule entry relative to the acquisition', on_delete=django.db.models.deletion.PROTECT, related_name='acquisitions', to='schedule.ScheduleEntry'), - ), - migrations.AlterUniqueTogether( - name='acquisition', - unique_together={('schedule_entry', 'task_id', 'recording_id')}, - ), - ] diff --git a/src/acquisitions/models.py b/src/acquisitions/models.py deleted file mode 100644 index 64fa1083..00000000 --- a/src/acquisitions/models.py +++ /dev/null @@ -1,44 +0,0 @@ -from django.db import models -from jsonfield import JSONField - -from schedule.models import ScheduleEntry - - -class Acquisition(models.Model): - """Map between schedule entries and their task data and metadata. - - Schedule Entry and Task ID map the acquisition to a specific task on the - sensor, while recording ID allows for a single task to create more than one - SigMF recording. - - It is an error to create more than one Acquisition with the same schedule - entry, task id, and recording id. - - """ - schedule_entry = models.ForeignKey( - ScheduleEntry, - on_delete=models.PROTECT, - related_name='acquisitions', - help_text="The schedule entry relative to the acquisition") - task_id = models.IntegerField( - help_text="The id of the task relative to the acquisition") - recording_id = models.IntegerField( - default=0, - help_text="The id of the recording relative to the task") - sigmf_metadata = JSONField( - help_text="The sigmf meta data for the acquisition") - data = models.BinaryField(help_text="", null=True) - created = models.DateTimeField( - help_text="The time the acquisition was created", auto_now_add=True) - - class Meta: - db_table = 'acquisitions' - ordering = ('created', ) - unique_together = (('schedule_entry', 'task_id', 'recording_id'), ) - - def __str__(self): - return '{}/{}:{}'.format( - self.schedule_entry.name, - self.task_id, - self.recording_id - ) diff --git a/src/acquisitions/serializers.py b/src/acquisitions/serializers.py deleted file mode 100644 index 9fc95bbe..00000000 --- a/src/acquisitions/serializers.py +++ /dev/null @@ -1,87 +0,0 @@ -from rest_framework import serializers -from rest_framework.reverse import reverse - -from schedule.models import ScheduleEntry -from sensor import V1 -from .models import Acquisition - - -class AcquisitionsOverviewSerializer(serializers.HyperlinkedModelSerializer): - results = serializers.SerializerMethodField( - help_text="The link to the acquisitions") - schedule_entry = serializers.SerializerMethodField( - help_text="The related schedule entry for the acquisition") - acquisitions_available = serializers.SerializerMethodField( - help_text="The number of available acquisitions") - archive = serializers.SerializerMethodField( - help_text="The url to download a SigMF archive of all acquisitions" - ) - - class Meta: - model = ScheduleEntry - fields = ('results', 'acquisitions_available', 'archive', - 'schedule_entry') - - def get_results(self, obj): - request = self.context['request'] - route = 'acquisition-list' - kws = {'schedule_entry_name': obj.name} - kws.update(V1) - url = reverse(route, kwargs=kws, request=request) - return url - - def get_acquisitions_available(self, obj): - return obj.acquisitions.count() - - def get_schedule_entry(self, obj): - request = self.context['request'] - kwargs = {'pk': obj.name} - url = reverse('schedule-detail', kwargs=kwargs, request=request) - return url - - def get_archive(self, obj): - request = self.context['request'] - kwargs = {'schedule_entry_name': obj.name} - url = reverse('acquisition-list-archive', kwargs=kwargs, - request=request) - return url - - -class AcquisitionHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): - # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields - def get_url(self, obj, view_name, request, format): - kws = { - 'schedule_entry_name': obj.schedule_entry.name, - 'task_id': obj.task_id - } - kws.update(V1) - url = reverse(view_name, kwargs=kws, request=request, format=format) - return url - - -class AcquisitionSerializer(serializers.ModelSerializer): - # `self` here refers to the self url field - this seems to work - self = AcquisitionHyperlinkedRelatedField( - view_name='acquisition-detail', - read_only=True, - help_text="The url of the acquisition", - source='*' # pass whole object - ) - archive = AcquisitionHyperlinkedRelatedField( - view_name='acquisition-archive', - read_only=True, - help_text="The url to download a SigMF archive of this acquisition", - source='*' # pass whole object - ) - sigmf_metadata = serializers.DictField( - help_text="The sigmf meta data for the acquisition") - - class Meta: - model = Acquisition - fields = ('self', 'task_id', 'created', 'archive', 'sigmf_metadata') - extra_kwargs = { - 'schedule_entry': { - 'view_name': 'schedule-detail', - 'lookup_field': 'name' - } - } diff --git a/src/acquisitions/tests/test_admin_detail_view.py b/src/acquisitions/tests/test_admin_detail_view.py deleted file mode 100644 index ff592e81..00000000 --- a/src/acquisitions/tests/test_admin_detail_view.py +++ /dev/null @@ -1,128 +0,0 @@ -from rest_framework import status - -from acquisitions.tests.utils import (reverse_acquisition_detail, - update_acquisition_detail, - simulate_acquisitions, HTTPS_KWARG) -from sensor.tests.utils import validate_response - - -def test_admin_can_create_private_acquisition(admin_client, user_client, - test_scheduler): - private_entry_name = simulate_acquisitions(admin_client, is_private=True) - private_acq_url = reverse_acquisition_detail(private_entry_name, 1) - - user_response = user_client.get(private_acq_url, **HTTPS_KWARG) - - validate_response(user_response, status.HTTP_403_FORBIDDEN) - - -def test_admin_can_view_all_acquisitions(admin_client, alt_admin_client, - user_client, test_scheduler): - # alt admin schedule entry - alt_admin_entry_name = simulate_acquisitions( - alt_admin_client, name='alt_admin_single_acq') - alt_admin_acq_url = reverse_acquisition_detail(alt_admin_entry_name, 1) - - admin_view_alt_admin_response = admin_client.get(alt_admin_acq_url, - **HTTPS_KWARG) - - # user schedule entry - user_acq_name = simulate_acquisitions(user_client, name='admin_single_acq') - user_acq_url = reverse_acquisition_detail(user_acq_name, 1) - - admin_view_user_response = admin_client.get(user_acq_url, **HTTPS_KWARG) - - validate_response(admin_view_alt_admin_response, status.HTTP_200_OK) - validate_response(admin_view_user_response, status.HTTP_200_OK) - - -def test_admin_can_view_private_acquisitions(admin_client, alt_admin_client, - test_scheduler): - private_entry_name = simulate_acquisitions( - alt_admin_client, is_private=True) - private_acq_url = reverse_acquisition_detail(private_entry_name, 1) - - response = admin_client.get(private_acq_url, **HTTPS_KWARG) - - validate_response(response, status.HTTP_200_OK) - - -def test_admin_can_delete_their_acquisition(admin_client, test_scheduler): - entry_name = simulate_acquisitions(admin_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - first_response = admin_client.delete(acq_url, **HTTPS_KWARG) - second_response = admin_client.delete(acq_url, **HTTPS_KWARG) - - validate_response(first_response, status.HTTP_204_NO_CONTENT) - validate_response(second_response, status.HTTP_404_NOT_FOUND) - - -def test_admin_can_delete_other_acquisitions(admin_client, alt_admin_client, - user_client, test_scheduler): - # alt admin private schedule entry - alt_admin_entry_name = simulate_acquisitions( - alt_admin_client, name='alt_admin_single_acq', is_private=True) - alt_admin_acq_url = reverse_acquisition_detail(alt_admin_entry_name, 1) - - admin_delete_alt_admin_response = admin_client.delete( - alt_admin_acq_url, **HTTPS_KWARG) - - # user schedule entry - user_acq_name = simulate_acquisitions(user_client, name='admin_single_acq') - user_acq_url = reverse_acquisition_detail(user_acq_name, 1) - - admin_delete_user_response = admin_client.delete(user_acq_url, - **HTTPS_KWARG) - - validate_response(admin_delete_user_response, status.HTTP_204_NO_CONTENT) - validate_response(admin_delete_alt_admin_response, - status.HTTP_204_NO_CONTENT) - - -def test_admin_cant_modify_their_acquisition(admin_client, test_scheduler): - entry_name = simulate_acquisitions(admin_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - new_acquisition_detail = admin_client.get(acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - response = update_acquisition_detail(admin_client, entry_name, 1, - new_acquisition_detail) - - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) - - -def test_user_cant_modify_other_acquisitions(admin_client, alt_admin_client, - user_client, test_scheduler): - # alt admin schedule entry - alt_admin_entry_name = simulate_acquisitions( - alt_admin_client, name='alt_admin_single_acq') - alt_admin_acq_url = reverse_acquisition_detail(alt_admin_entry_name, 1) - - new_acquisition_detail = user_client.get(alt_admin_acq_url, **HTTPS_KWARG) - - new_acquisition_detail = new_acquisition_detail.data - - new_acquisition_detail['task_id'] = 2 - - admin_modify_alt_admin_response = update_acquisition_detail( - admin_client, alt_admin_entry_name, 1, new_acquisition_detail) - - # user schedule entry - user_entry_name = simulate_acquisitions( - user_client, name='admin_single_acq') - user_acq_url = reverse_acquisition_detail(user_entry_name, 1) - - new_acquisition_detail = admin_client.get(user_acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - admin_modify_user_response = update_acquisition_detail( - admin_client, user_entry_name, 1, new_acquisition_detail) - - validate_response(admin_modify_alt_admin_response, - status.HTTP_405_METHOD_NOT_ALLOWED) - validate_response(admin_modify_user_response, - status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/acquisitions/tests/test_archive_view.py b/src/acquisitions/tests/test_archive_view.py deleted file mode 100644 index 8ab23b65..00000000 --- a/src/acquisitions/tests/test_archive_view.py +++ /dev/null @@ -1,42 +0,0 @@ -import os -import tempfile - -import numpy as np -from rest_framework import status - -import sigmf.sigmffile - -import sensor.settings -from acquisitions.tests.utils import (reverse_acquisition_archive, - simulate_acquisitions, HTTPS_KWARG) - - -def test_archive_download(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - task_id = 1 - url = reverse_acquisition_archive(entry_name, task_id) - disposition = 'attachment; filename="{}_test_acq_1.sigmf"' - disposition = disposition.format(sensor.settings.FQDN) - response = user_client.get(url, **HTTPS_KWARG) - - assert response.status_code == status.HTTP_200_OK - assert response['content-disposition'] == disposition - assert response['content-type'] == 'application/x-tar' - - with tempfile.NamedTemporaryFile() as tf: - for content in response.streaming_content: - tf.write(content) - - sigmf_archive_contents = sigmf.sigmffile.fromarchive(tf.name) - md = sigmf_archive_contents._metadata - datafile = sigmf_archive_contents.data_file - datafile_actual_size = os.stat(datafile).st_size - claimed_sha512 = md['global']['core:sha512'] - number_of_sample_arrays = len(md['annotations']) - samples_per_array = md['annotations'][0]['core:sample_count'] - sample_array_size = samples_per_array * np.float32(0.0).nbytes - datafile_expected_size = number_of_sample_arrays * sample_array_size - actual_sha512 = sigmf.sigmf_hash.calculate_sha512(datafile) - - assert datafile_actual_size == datafile_expected_size - assert claimed_sha512 == actual_sha512 diff --git a/src/acquisitions/tests/test_detail_view.py b/src/acquisitions/tests/test_detail_view.py deleted file mode 100644 index 726151c5..00000000 --- a/src/acquisitions/tests/test_detail_view.py +++ /dev/null @@ -1,70 +0,0 @@ -import pytest -from rest_framework import status -from rest_framework.reverse import reverse - -from acquisitions.tests.utils import (get_acquisition_detail, - reverse_acquisition_detail, - simulate_acquisitions, HTTPS_KWARG) -from sensor import V1 -from sensor.tests.utils import validate_response - - -def test_non_existent_entry(user_client): - with pytest.raises(AssertionError): - get_acquisition_detail(user_client, 'doesntexist', 1) - - -def test_non_existent_task_id(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - with pytest.raises(AssertionError): - non_existent_task_id = 2 - get_acquisition_detail(user_client, entry_name, non_existent_task_id) - - -def test_get_detail_from_single(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - task_id = 1 - acq = get_acquisition_detail(user_client, entry_name, task_id) - - assert acq['task_id'] == task_id - - -def test_get_detail_from_multiple(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - task_id = 3 - acq = get_acquisition_detail(user_client, entry_name, task_id) - - assert acq['task_id'] == task_id - - -def test_delete_single(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - task_id_to_delete = 2 - url = reverse_acquisition_detail(entry_name, task_id_to_delete) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_204_NO_CONTENT) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_404_NOT_FOUND) - - # other 2 acquisitions should be unaffected - get_acquisition_detail(user_client, entry_name, 1) - get_acquisition_detail(user_client, entry_name, 3) - - -def test_private_entries_have_private_acquisitons(admin_client, user_client, - test_scheduler): - entry_name = simulate_acquisitions(admin_client, is_private=True) - kws = {'pk': entry_name} - kws.update(V1) - entry_url = reverse('schedule-detail', kwargs=kws) - - admin_response = admin_client.get(entry_url, **HTTPS_KWARG) - admin_acquisition_url = admin_response.data['acquisitions'] - - user_respose = user_client.get(admin_acquisition_url, **HTTPS_KWARG) - admin_respose = admin_client.get(admin_acquisition_url, **HTTPS_KWARG) - - validate_response(user_respose, status.HTTP_403_FORBIDDEN) - validate_response(admin_respose, status.HTTP_200_OK) diff --git a/src/acquisitions/tests/test_list_view.py b/src/acquisitions/tests/test_list_view.py deleted file mode 100644 index 75958c79..00000000 --- a/src/acquisitions/tests/test_list_view.py +++ /dev/null @@ -1,56 +0,0 @@ -import pytest -from rest_framework import status - -from acquisitions.tests.utils import ( - get_acquisition_list, reverse_acquisition_detail, reverse_acquisition_list, - simulate_acquisitions) -from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY -from sensor.tests.utils import validate_response, HTTPS_KWARG - - -def test_non_existent_entry(user_client, test_scheduler): - with pytest.raises(AssertionError): - get_acquisition_list(user_client, 'doesntexist') - - -@pytest.mark.django_db -def test_entry_with_no_acquisition_response(user_client, test_scheduler): - entry = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - - with pytest.raises(AssertionError): - assert get_acquisition_list(user_client, entry['name']) - - -@pytest.mark.django_db -def test_single_acquisition_response(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - acquisition, = get_acquisition_list(user_client, entry_name) - task_id = 1 - expected_url = reverse_acquisition_detail(entry_name, task_id) - - assert acquisition['self'] == expected_url - assert acquisition['task_id'] == task_id - - -@pytest.mark.django_db -def test_multiple_acquisition_response(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - acquisitions = get_acquisition_list(user_client, entry_name) - assert len(acquisitions) == 3 - - for i, acq in enumerate(acquisitions, start=1): - expected_url = reverse_acquisition_detail(entry_name, i) - assert acq['self'] == expected_url - assert acq['task_id'] == i - - -@pytest.mark.django_db -def test_delete_list(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - url = reverse_acquisition_list(entry_name) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_204_NO_CONTENT) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_404_NOT_FOUND) diff --git a/src/acquisitions/tests/test_overview_view.py b/src/acquisitions/tests/test_overview_view.py deleted file mode 100644 index c9720f47..00000000 --- a/src/acquisitions/tests/test_overview_view.py +++ /dev/null @@ -1,56 +0,0 @@ -from rest_framework import status - -from acquisitions.tests.utils import ( - SINGLE_ACQUISITION, EMPTY_ACQUISITIONS_RESPONSE, - reverse_acquisitions_overview, reverse_acquisition_list, - simulate_acquisitions, get_acquisitions_overview) -from schedule.tests.utils import post_schedule -from sensor.tests.utils import validate_response, HTTPS_KWARG - - -def test_empty_overview_response(user_client): - response = get_acquisitions_overview(user_client) - assert response == EMPTY_ACQUISITIONS_RESPONSE - - -def test_overview_exists_when_entry_created(user_client, test_scheduler): - post_schedule(user_client, SINGLE_ACQUISITION) - overview, = get_acquisitions_overview(user_client) - assert overview['acquisitions_available'] == 0 - - -def test_get_overview(user_client, test_scheduler): - entry1_name = simulate_acquisitions(user_client) - overview, = get_acquisitions_overview(user_client) - - assert overview['results'] == reverse_acquisition_list(entry1_name) - assert overview['acquisitions_available'] == 1 - - entry2_name = simulate_acquisitions(user_client, n=3) - overview_list = get_acquisitions_overview(user_client) - - assert len(overview_list) == 2 - - (overview1, overview2) = overview_list - - assert overview1 == overview - assert overview2['results'] == reverse_acquisition_list(entry2_name) - assert overview2['acquisitions_available'] == 3 - - -def test_overview_for_private_entry_is_private(admin_client, user_client, - test_scheduler): - simulate_acquisitions(admin_client, is_private=True) - overview = get_acquisitions_overview(user_client) - assert overview == [] - - overview, = get_acquisitions_overview(admin_client) - assert overview['acquisitions_available'] == 1 - assert overview['results'] # is non-empty string - assert overview['schedule_entry'] # is non-empty string - - -def test_delete_overview_not_allowed(user_client, test_scheduler): - url = reverse_acquisitions_overview() - response = user_client.delete(url, **HTTPS_KWARG) - assert validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/acquisitions/tests/test_user_detail_view.py b/src/acquisitions/tests/test_user_detail_view.py deleted file mode 100644 index 46c15a0a..00000000 --- a/src/acquisitions/tests/test_user_detail_view.py +++ /dev/null @@ -1,138 +0,0 @@ -from rest_framework import status - -from acquisitions.tests.utils import (reverse_acquisition_detail, - update_acquisition_detail, - simulate_acquisitions, HTTPS_KWARG) -from sensor.tests.utils import validate_response - - -def test_user_can_create_nonprivate_acquisition(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - response = user_client.get(acq_url, **HTTPS_KWARG) - - validate_response(response, status.HTTP_200_OK) - - -def test_user_cant_create_private_acquisition(user_client, alt_user_client, - test_scheduler): - # The alt user attempts to create a private acquisition. - entry_name = simulate_acquisitions(alt_user_client, is_private=True) - acq_url = reverse_acquisition_detail(entry_name, 1) - - # The user attempts to GET the acquisition that the alt user created. - response = user_client.get(acq_url, **HTTPS_KWARG) - - # The user successfully GETs the acquistion that the alt user - # created; meaning that the acquisition was not, in fact, private. - validate_response(response, status.HTTP_200_OK) - - -def test_user_can_view_other_nonprivate_acquisitions( - admin_client, user_client, alt_user_client, test_scheduler): - # alt user schedule entry - alt_user_entry_name = simulate_acquisitions( - alt_user_client, name='alt_user_single_acq') - alt_user_acq_url = reverse_acquisition_detail(alt_user_entry_name, 1) - - user_view_alt_user_response = user_client.get(alt_user_acq_url, - **HTTPS_KWARG) - - # admin user schedule entry - admin_acq_name = simulate_acquisitions( - admin_client, name='admin_single_acq') - admin_acq_url = reverse_acquisition_detail(admin_acq_name, 1) - - user_view_admin_response = user_client.get(admin_acq_url, **HTTPS_KWARG) - - validate_response(user_view_alt_user_response, status.HTTP_200_OK) - validate_response(user_view_admin_response, status.HTTP_200_OK) - - -def test_user_cant_view_private_acquisitions(admin_client, user_client, - test_scheduler): - private_entry_name = simulate_acquisitions(admin_client, is_private=True) - private_acq_url = reverse_acquisition_detail(private_entry_name, 1) - - response = user_client.get(private_acq_url, **HTTPS_KWARG) - - validate_response(response, status.HTTP_403_FORBIDDEN) - - -def test_user_can_delete_their_acquisition(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - first_response = user_client.delete(acq_url, **HTTPS_KWARG) - second_response = user_client.delete(acq_url, **HTTPS_KWARG) - - validate_response(first_response, status.HTTP_204_NO_CONTENT) - validate_response(second_response, status.HTTP_404_NOT_FOUND) - - -def test_user_cant_delete_other_acquisitions(admin_client, user_client, - alt_user_client, test_scheduler): - # alt user schedule entry - alt_user_entry_name = simulate_acquisitions( - alt_user_client, name='alt_user_single_acq') - alt_user_acq_url = reverse_acquisition_detail(alt_user_entry_name, 1) - - user_delete_alt_user_response = user_client.delete(alt_user_acq_url, - **HTTPS_KWARG) - - # admin user schedule entry - admin_acq_name = simulate_acquisitions( - admin_client, name='admin_single_acq') - admin_acq_url = reverse_acquisition_detail(admin_acq_name, 1) - - user_delete_admin_response = user_client.delete(admin_acq_url, - **HTTPS_KWARG) - - validate_response(user_delete_admin_response, status.HTTP_403_FORBIDDEN) - validate_response(user_delete_alt_user_response, status.HTTP_403_FORBIDDEN) - - -def test_user_cant_modify_their_acquisition(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - new_acquisition_detail = user_client.get(acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - response = update_acquisition_detail(user_client, entry_name, 1, - new_acquisition_detail) - - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) - - -def test_user_cant_modify_other_acquisitions(admin_client, user_client, - alt_user_client, test_scheduler): - # alt user schedule entry - alt_user_entry_name = simulate_acquisitions( - alt_user_client, name='alt_user_single_acq') - alt_user_acq_url = reverse_acquisition_detail(alt_user_entry_name, 1) - - new_acquisition_detail = user_client.get(alt_user_acq_url, **HTTPS_KWARG) - - new_acquisition_detail = new_acquisition_detail.data - - new_acquisition_detail['task_id'] = 2 - - user_modify_alt_user_response = update_acquisition_detail( - user_client, alt_user_entry_name, 1, new_acquisition_detail) - - # admin user schedule entry - admin_entry_name = simulate_acquisitions( - admin_client, name='admin_single_acq') - admin_acq_url = reverse_acquisition_detail(admin_entry_name, 1) - - new_acquisition_detail = user_client.get(admin_acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - user_modify_admin_response = update_acquisition_detail( - user_client, admin_entry_name, 1, new_acquisition_detail) - - validate_response(user_modify_alt_user_response, status.HTTP_403_FORBIDDEN) - validate_response(user_modify_admin_response, status.HTTP_403_FORBIDDEN) diff --git a/src/acquisitions/tests/utils.py b/src/acquisitions/tests/utils.py deleted file mode 100644 index 2abad9db..00000000 --- a/src/acquisitions/tests/utils.py +++ /dev/null @@ -1,113 +0,0 @@ -import json -from django.test import RequestFactory -from rest_framework.reverse import reverse -from rest_framework import status - -from schedule.tests.utils import post_schedule -from scheduler.tests.utils import simulate_scheduler_run -from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG - -EMPTY_ACQUISITIONS_RESPONSE = [] - -SINGLE_ACQUISITION = { - 'name': 'test_acq', - 'start': None, - 'stop': None, - 'interval': None, - 'action': 'mock_acquire' -} - -MULTIPLE_ACQUISITIONS = { - 'name': 'test_multiple_acq', - 'start': None, - 'relative_stop': 5, - 'interval': 1, - 'action': 'mock_acquire' -} - - -def simulate_acquisitions(client, n=1, is_private=False, name=None): - assert 0 < n <= 10 - - if n == 1: - schedule_entry = SINGLE_ACQUISITION.copy() - else: - schedule_entry = MULTIPLE_ACQUISITIONS.copy() - schedule_entry['relative_stop'] = n + 1 - - schedule_entry['is_private'] = is_private - - if name is not None: - schedule_entry['name'] = name - - entry = post_schedule(client, schedule_entry) - simulate_scheduler_run(n) - - return entry['name'] - - -def reverse_acquisitions_overview(): - rf = RequestFactory() - request = rf.get('/acquisitions/', **HTTPS_KWARG) - return reverse('acquisitions-overview', kwargs=V1, request=request) - - -def reverse_acquisition_list(schedule_entry_name): - rf = RequestFactory() - request = rf.get('/acquisitions/' + schedule_entry_name, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name} - kws.update(V1) - return reverse('acquisition-list', kwargs=kws, request=request) - - -def reverse_acquisition_detail(schedule_entry_name, task_id): - rf = RequestFactory() - url = '/acquisitions/' + schedule_entry_name + '/' + str(task_id) - request = rf.get(url, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} - kws.update(V1) - return reverse('acquisition-detail', kwargs=kws, request=request) - - -def reverse_acquisition_archive(schedule_entry_name, task_id): - rf = RequestFactory() - entry_name = schedule_entry_name - url = '/'.join(['/acquisitions', entry_name, str(task_id), 'archive']) - request = rf.get(url, **HTTPS_KWARG) - kws = {'schedule_entry_name': entry_name, 'task_id': task_id} - kws.update(V1) - return reverse('acquisition-archive', kwargs=kws, request=request) - - -def get_acquisitions_overview(client): - url = reverse_acquisitions_overview() - response = client.get(url, **HTTPS_KWARG) - rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] - - -def get_acquisition_list(client, schedule_entry_name): - url = reverse_acquisition_list(schedule_entry_name) - response = client.get(url, **HTTPS_KWARG) - rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] - - -def get_acquisition_detail(client, schedule_entry_name, task_id): - url = reverse_acquisition_detail(schedule_entry_name, task_id) - response = client.get(url, **HTTPS_KWARG) - return validate_response(response, status.HTTP_200_OK) - - -def update_acquisition_detail(client, schedule_entry_name, task_id, - new_acquisition): - url = reverse_acquisition_detail(schedule_entry_name, task_id) - - kwargs = { - 'data': json.dumps(new_acquisition), - 'content_type': 'application/json', - 'wsgi.url_scheme': 'https' - } - - return client.put(url, **kwargs) diff --git a/src/acquisitions/urls.py b/src/acquisitions/urls.py deleted file mode 100644 index 5665efed..00000000 --- a/src/acquisitions/urls.py +++ /dev/null @@ -1,34 +0,0 @@ -from django.urls import path - -from .views import (AcquisitionsOverviewViewSet, AcquisitionListViewSet, - AcquisitionInstanceViewSet) - -urlpatterns = ( - path('', - view=AcquisitionsOverviewViewSet.as_view({ - 'get': 'list' - }), - name='acquisitions-overview'), - path('/', - view=AcquisitionListViewSet.as_view({ - 'get': 'list', - 'delete': 'destroy_all' - }), - name='acquisition-list'), - path('/archive/', - view=AcquisitionListViewSet.as_view({ - 'get': 'archive', - }), - name='acquisition-list-archive'), - path('//', - view=AcquisitionInstanceViewSet.as_view({ - 'get': 'retrieve', - 'delete': 'destroy' - }), - name='acquisition-detail'), - path('//archive', - view=AcquisitionInstanceViewSet.as_view({ - 'get': 'archive', - }), - name='acquisition-archive') -) diff --git a/src/acquisitions/views.py b/src/acquisitions/views.py deleted file mode 100644 index 40bf9b51..00000000 --- a/src/acquisitions/views.py +++ /dev/null @@ -1,177 +0,0 @@ -import logging -import tempfile - -from django.http import Http404, FileResponse -from rest_framework import status, filters -from rest_framework.decorators import action -from rest_framework.generics import get_object_or_404 -from rest_framework.mixins import (ListModelMixin, RetrieveModelMixin, - DestroyModelMixin) -from rest_framework.response import Response -from rest_framework.settings import api_settings -from rest_framework.viewsets import GenericViewSet - -import sigmf.archive -import sigmf.sigmffile - -import sensor.settings -from schedule.models import ScheduleEntry -from .models import Acquisition -from .permissions import IsAdminOrOwnerOrReadOnly -from .serializers import (AcquisitionsOverviewSerializer, - AcquisitionSerializer) - - -logger = logging.getLogger(__name__) - - -class AcquisitionsOverviewViewSet(ListModelMixin, GenericViewSet): - """ - list: - Returns an overview of how many acquisitions are available per schedule - entry. - """ - lookup_field = 'schedule_entry_name' - queryset = ScheduleEntry.objects.all() - serializer_class = AcquisitionsOverviewSerializer - - def get_queryset(self): - # .list() does not call .get_object(), which triggers permissions - # checks, so we need to filter our queryset based on `is_private` and - # request user. - base_queryset = self.filter_queryset(self.queryset) - if self.request.user.is_staff: - return base_queryset - else: - return base_queryset.filter(is_private=False) - - -class MultipleFieldLookupMixin(object): - """Get multiple field filtering based on a `lookup_fields` attribute.""" - - def get_queryset(self): - base_queryset = super(MultipleFieldLookupMixin, self).get_queryset() - base_queryset = self.filter_queryset(base_queryset) - - filter = {'schedule_entry__name': self.kwargs['schedule_entry_name']} - - queryset = base_queryset.filter(**filter) - - if not queryset.exists(): - raise Http404 - - return queryset - - def get_object(self): - queryset = self.get_queryset() - filter = {'task_id': self.kwargs['task_id']} - - return get_object_or_404(queryset, **filter) - - -class AcquisitionListViewSet(MultipleFieldLookupMixin, ListModelMixin, - GenericViewSet): - """ - list: - Returns a list of all acquisitions created by the given schedule entry. - - destroy_all: - Deletes all acquisitions created by the given schedule entry. - """ - queryset = Acquisition.objects.all() - serializer_class = AcquisitionSerializer - permission_classes = ( - api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) - filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('task_id', 'created') - search_fields = ('sigmf_metadata', ) - - @action(detail=False, methods=('delete', )) - def destroy_all(self, request, version, schedule_entry_name): - queryset = self.get_queryset() - queryset = queryset.filter(schedule_entry__name=schedule_entry_name) - - if not queryset.exists(): - raise Http404 - - queryset.delete() - - return Response(status=status.HTTP_204_NO_CONTENT) - - @action(detail=False) - def archive(self, request, version, schedule_entry_name): - queryset = self.get_queryset() - queryset = queryset.filter(schedule_entry__name=schedule_entry_name) - fqdn = sensor.settings.FQDN - fname = fqdn + '_' + schedule_entry_name + '.sigmf' - - if not queryset.exists(): - raise Http404 - - # FileResponse handles closing the file - tmparchive = tempfile.TemporaryFile() - build_sigmf_archive(tmparchive, schedule_entry_name, queryset) - content_type = 'application/x-tar' - response = FileResponse(tmparchive, as_attachment=True, filename=fname, - content_type=content_type) - return response - - -class AcquisitionInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, - DestroyModelMixin, GenericViewSet): - """ - destroy: - Deletes the specified acquisition. - - retrieve: - Returns all available metadata about an acquisition. - - archive: - Downloads the acquisition's SigMF archive. - """ - queryset = Acquisition.objects.all() - serializer_class = AcquisitionSerializer - permission_classes = ( - api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) - lookup_fields = ('schedule_entry__name', 'task_id') - - @action(detail=True) - def archive(self, request, version, schedule_entry_name, task_id): - entry_name = schedule_entry_name - fqdn = sensor.settings.FQDN - fname = fqdn + '_' + entry_name + '_' + str(task_id) + '.sigmf' - acq = self.get_object() - - # FileResponse handles closing the file - tmparchive = tempfile.TemporaryFile() - build_sigmf_archive(tmparchive, schedule_entry_name, [acq]) - content_type = 'application/x-tar' - response = FileResponse(tmparchive, as_attachment=True, filename=fname, - content_type=content_type) - return response - - -def build_sigmf_archive(fileobj, schedule_entry_name, acquisitions): - """Build a SigMF archive containing `acquisitions` and save to fileobj. - - @param fileobj: a fileobj open for writing - @param schedule_entry_name: the name of the parent schedule entry - @param acquisitions: an iterable of Acquisition objects from the database - @return: None - - """ - logger.debug("building sigmf archive") - - for acq in acquisitions: - with tempfile.NamedTemporaryFile() as tmpdata: - tmpdata.write(acq.data) - tmpdata.seek(0) # move fd ptr to start of data for reading - name = schedule_entry_name + '_' + str(acq.task_id) - sigmf_file = sigmf.sigmffile.SigMFFile(metadata=acq.sigmf_metadata, - name=name) - sigmf_file.set_data_file(tmpdata.name) - - sigmf.archive.SigMFArchive(sigmf_file, path=name, fileobj=fileobj) - - logger.debug("sigmf archive built") diff --git a/src/actions/__init__.py b/src/actions/__init__.py index d30de9a9..0a9fe902 100644 --- a/src/actions/__init__.py +++ b/src/actions/__init__.py @@ -5,12 +5,11 @@ from sensor import settings -from . import acquire_single_freq_fft -from . import acquire_stepped_freq_tdomain_iq from . import logger as logger_action -from . import monitor_usrp -from . import sync_gps - +from .acquire_single_freq_fft import SingleFrequencyFftAcquisition +from .acquire_stepped_freq_tdomain_iq import SteppedFrequencyTimeDomainIqAcquisition +from .monitor_usrp import UsrpMonitor +from .sync_gps import SyncGps logger = logging.getLogger(__name__) @@ -18,10 +17,11 @@ # Actions initialized here are made available through the API registered_actions = { "logger": logger_action.Logger(), - "admin_logger": - logger_action.Logger(loglvl=logger_action.LOGLVL_ERROR, admin_only=True), - "monitor_usrp": monitor_usrp.UsrpMonitor(admin_only=True), - "sync_gps": sync_gps.SyncGps(admin_only=True) + "admin_logger": logger_action.Logger( + loglvl=logger_action.LOGLVL_ERROR, admin_only=True + ), + "monitor_usrp": UsrpMonitor(admin_only=True), + "sync_gps": SyncGps(admin_only=True), } by_name = registered_actions @@ -31,12 +31,10 @@ # The YAML loader can key an object with parameters on these class names action_classes = { "logger": logger_action.Logger, - "usrp_monitor": monitor_usrp.UsrpMonitor, - "sync_gps": sync_gps.SyncGps, - "single_frequency_fft": - acquire_single_freq_fft.SingleFrequencyFftAcquisition, - "stepped_frequency_time_domain_iq": - acquire_stepped_freq_tdomain_iq.SteppedFrequencyTimeDomainIqAcquisition + "usrp_monitor": UsrpMonitor, + "sync_gps": SyncGps, + "single_frequency_fft": SingleFrequencyFftAcquisition, + "stepped_frequency_time_domain_iq": SteppedFrequencyTimeDomainIqAcquisition, } @@ -63,9 +61,9 @@ def get_summary(action_fn): def load_from_yaml(yaml_dir=settings.ACTION_DEFINITIONS_DIR): """Load any YAML files in yaml_dir.""" - yaml = YAML(typ='safe') + yaml = YAML(typ="safe") yaml_path = Path(yaml_dir) - for yaml_file in yaml_path.glob('*.yml'): + for yaml_file in yaml_path.glob("*.yml"): defn = yaml.load(yaml_file) for class_name, parameters in defn.items(): try: diff --git a/src/actions/acquire_single_freq_fft.py b/src/actions/acquire_single_freq_fft.py index 470d556f..82862aec 100644 --- a/src/actions/acquire_single_freq_fft.py +++ b/src/actions/acquire_single_freq_fft.py @@ -57,7 +57,7 @@ ## Frequency-domain processing After windowing, the data matrix is converted into the frequency domain using -an FFT'd, doing the equivalent of the DFT defined as +an FFT, doing the equivalent of the DFT defined as $$A_k = \sum_{{m=0}}^{{n-1}} a_m \exp\left\\{{-2\pi i{{mk \over n}}\right\\}} \qquad k = 0,\ldots,n-1$$ @@ -77,19 +77,15 @@ """ -from __future__ import absolute_import - import logging - -import numpy as np from enum import Enum -from rest_framework.reverse import reverse +import numpy as np from sigmf.sigmffile import SigMFFile from capabilities import capabilities -from hardware import usrp_iface -from sensor import V1, settings, utils +from hardware import sdr +from sensor import settings, utils from .base import Action @@ -97,7 +93,7 @@ GLOBAL_INFO = { "core:datatype": "f32_le", # 32-bit float, Little Endian - "core:version": "0.0.1" + "core:version": "0.0.1", } @@ -110,7 +106,7 @@ class M4sDetector(Enum): # The sigmf-ns-scos version targeted by this action -SCOS_TRANSFER_SPEC_VER = '0.1' +SCOS_TRANSFER_SPEC_VER = "0.2" def m4s_detector(array): @@ -119,8 +115,7 @@ def m4s_detector(array): Detector is applied along each column. :param array: an (m x n) array of real frequency-domain linear power values - :returns: a (5 x n) in the order min, max, mean, median, sample in the case - that `detector` is `m4s`, otherwise a (1 x n) array + :returns: a (5 x n) in the order min, max, mean, median, sample """ amin = np.min(array, axis=0) @@ -146,7 +141,7 @@ class SingleFrequencyFftAcquisition(Action): """ def __init__(self, name, frequency, gain, sample_rate, fft_size, nffts): - super(SingleFrequencyFftAcquisition, self).__init__() + super().__init__() self.name = name self.frequency = frequency @@ -154,67 +149,65 @@ def __init__(self, name, frequency, gain, sample_rate, fft_size, nffts): self.sample_rate = sample_rate self.fft_size = fft_size self.nffts = nffts - self.usrp = usrp_iface # make instance variable to allow mocking + self.sdr = sdr # make instance variable to allow mocking self.enbw = None def __call__(self, schedule_entry_name, task_id): """This is the entrypoint function called by the scheduler.""" - from schedule.models import ScheduleEntry + from tasks.models import TaskResult - # raises ScheduleEntry.DoesNotExist if no matching schedule entry - parent_entry = ScheduleEntry.objects.get(name=schedule_entry_name) + # Raises TaskResult.DoesNotExist if no matching task result + task_result = TaskResult.objects.get( + schedule_entry__name=schedule_entry_name, task_id=task_id + ) self.test_required_components() - self.configure_usrp() - data = self.acquire_data(parent_entry, task_id) + self.configure_sdr() + data = self.acquire_data() m4s_data = self.apply_detector(data) sigmf_md = self.build_sigmf_md() - self.archive(m4s_data, sigmf_md, parent_entry, task_id) - - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} - kws.update(V1) - detail = reverse( - 'acquisition-detail', kwargs=kws, request=parent_entry.request) - - return detail + self.archive(task_result, m4s_data, sigmf_md) def test_required_components(self): """Fail acquisition if a required component is not available.""" - self.usrp.connect() - if not self.usrp.is_available: - msg = "acquisition failed: USRP required but not available" + self.sdr.connect() + if not self.sdr.is_available: + msg = "acquisition failed: SDR required but not available" raise RuntimeError(msg) - def configure_usrp(self): - self.set_usrp_clock_rate() - self.set_usrp_sample_rate() - self.set_usrp_frequency() - self.set_usrp_gain() + def configure_sdr(self): + self.set_sdr_clock_rate() + self.set_sdr_sample_rate() + self.set_sdr_frequency() + self.set_sdr_gain() - def set_usrp_gain(self): - self.usrp.radio.gain = self.gain + def set_sdr_gain(self): + self.sdr.radio.gain = self.gain - def set_usrp_sample_rate(self): - self.usrp.radio.sample_rate = self.sample_rate - self.sample_rate = self.usrp.radio.sample_rate + def set_sdr_sample_rate(self): + self.sdr.radio.sample_rate = self.sample_rate + self.sample_rate = self.sdr.radio.sample_rate - def set_usrp_clock_rate(self): + def set_sdr_clock_rate(self): clock_rate = self.sample_rate while clock_rate < 10e6: clock_rate *= 4 - self.usrp.radio.clock_rate = clock_rate + self.sdr.radio.clock_rate = clock_rate - def set_usrp_frequency(self): + def set_sdr_frequency(self): requested_frequency = self.frequency - self.usrp.radio.frequency = requested_frequency - self.frequency = self.usrp.radio.frequency + self.sdr.radio.frequency = requested_frequency + self.frequency = self.sdr.radio.frequency - def acquire_data(self, parent_entry, task_id): + def acquire_data(self): msg = "Acquiring {} FFTs at {} MHz" logger.debug(msg.format(self.nffts, self.frequency / 1e6)) - data = self.usrp.radio.acquire_samples(self.nffts * self.fft_size) + # Drop ~10 ms of samples + nskip = int(0.01 * self.sample_rate) + + data = self.sdr.radio.acquire_samples(self.nffts * self.fft_size, nskip=nskip) data.resize((self.nffts, self.fft_size)) return data @@ -227,14 +220,14 @@ def build_sigmf_md(self): sigmf_md.set_global_field("core:sample_rate", self.sample_rate) sigmf_md.set_global_field("core:description", self.description) - sensor_def = capabilities['sensor_definition'] + sensor_def = capabilities["sensor_definition"] sigmf_md.set_global_field("ntia:sensor_definition", sensor_def) sigmf_md.set_global_field("ntia:sensor_id", settings.FQDN) sigmf_md.set_global_field("scos:version", SCOS_TRANSFER_SPEC_VER) capture_md = { "core:frequency": self.frequency, - "core:time": utils.get_datetime_str_now() + "core:time": utils.get_datetime_str_now(), } sigmf_md.add_capture(start_index=0, metadata=capture_md) @@ -247,19 +240,20 @@ def build_sigmf_md(self): "detector": detector.name + "_power", "number_of_ffts": self.nffts, "units": "dBm", - "reference": "not referenced" + "reference": "not referenced", } annotation_md = { "scos:measurement_type": { - "single_frequency_fft_detection": single_frequency_fft_md, + "single_frequency_fft_detection": single_frequency_fft_md } } sigmf_md.add_annotation( start_index=(i * self.fft_size), length=self.fft_size, - metadata=annotation_md) + metadata=annotation_md, + ) return sigmf_md @@ -268,10 +262,10 @@ def apply_detector(self, data): logger.debug("Applying detector") window = np.blackman(self.fft_size) - window_power = sum(window**2) + window_power = sum(window ** 2) impedance = 50.0 # ohms - self.enbw = self.fft_size * window_power / sum(window)**2 + self.enbw = self.fft_size * window_power / sum(window) ** 2 Vsq2W_dB = -10.0 * np.log10(self.fft_size * window_power * impedance) @@ -286,30 +280,37 @@ def apply_detector(self, data): # Apply detector while we're linear # The m4s detector returns a (5 x fft_size) ndarray fdata_watts_m4s = m4s_detector(fdata_watts) + + # If testing, don't flood output with divide-by-zero warnings + if settings.RUNNING_TESTS: + np_error_settings_savepoint = np.seterr(divide="ignore") + fdata_dbm_m4s = 10 * np.log10(fdata_watts_m4s) + 30 + Vsq2W_dB + if settings.RUNNING_TESTS: + # Restore numpy error settings + np.seterr(**np_error_settings_savepoint) + return fdata_dbm_m4s - def archive(self, m4s_data, sigmf_md, parent_entry, task_id): - from acquisitions.models import Acquisition + def archive(self, task_result, m4s_data, sigmf_md): + from tasks.models import Acquisition logger.debug("Storing acquisition in database") Acquisition( - schedule_entry=parent_entry, - task_id=task_id, - sigmf_metadata=sigmf_md._metadata, - data=m4s_data).save() + task_result=task_result, metadata=sigmf_md._metadata, data=m4s_data + ).save() @property def description(self): defs = { - 'name': self.name, - 'frequency': self.frequency / 1e6, - 'sample_rate': self.sample_rate / 1e6, - 'fft_size': self.fft_size, - 'nffts': self.nffts, - 'gain': self.gain + "name": self.name, + "frequency": self.frequency / 1e6, + "sample_rate": self.sample_rate / 1e6, + "fft_size": self.fft_size, + "nffts": self.nffts, + "gain": self.gain, } # __doc__ refers to the module docstring at the top of the file diff --git a/src/actions/acquire_stepped_freq_tdomain_iq.py b/src/actions/acquire_stepped_freq_tdomain_iq.py index b18bff81..f58233f9 100644 --- a/src/actions/acquire_stepped_freq_tdomain_iq.py +++ b/src/actions/acquire_stepped_freq_tdomain_iq.py @@ -46,13 +46,11 @@ from itertools import zip_longest import numpy as np - -from rest_framework.reverse import reverse from sigmf.sigmffile import SigMFFile from capabilities import capabilities -from hardware import usrp_iface -from sensor import V1, settings, utils +from hardware import sdr +from sensor import settings, utils from .base import Action @@ -60,12 +58,12 @@ GLOBAL_INFO = { "core:datatype": "cf32_le", # 2x 32-bit float, Little Endian - "core:version": "0.0.2" + "core:version": "0.0.2", } # The sigmf-ns-scos version targeted by this action -SCOS_TRANSFER_SPEC_VER = '0.2' +SCOS_TRANSFER_SPEC_VER = "0.2" class SteppedFrequencyTimeDomainIqAcquisition(Action): @@ -84,7 +82,7 @@ def __init__(self, name, fcs, gains, sample_rates, durations_ms): nfcs = len(fcs) - parameter_names = ('gain', 'sample_rate', 'duration_ms') + parameter_names = ("gain", "sample_rate", "duration_ms") tuning_parameters = {} for fc, *params in zip_longest(fcs, gains, sample_rates, durations_ms): @@ -99,41 +97,36 @@ def __init__(self, name, fcs, gains, sample_rates, durations_ms): self.nfcs = nfcs self.fcs = fcs self.tuning_parameters = tuning_parameters - self.usrp = usrp_iface # make instance variable to allow mocking + self.sdr = sdr # make instance variable to allow mocking def __call__(self, schedule_entry_name, task_id): """This is the entrypoint function called by the scheduler.""" - from schedule.models import ScheduleEntry + from tasks.models import TaskResult - # raises ScheduleEntry.DoesNotExist if no matching schedule entry - parent_entry = ScheduleEntry.objects.get(name=schedule_entry_name) + # Raises TaskResult.DoesNotExist if no matching task result + task_result = TaskResult.objects.get( + schedule_entry__name=schedule_entry_name, task_id=task_id + ) self.test_required_components() for recording_id, fc in enumerate(self.fcs, start=1): - data, sigmf_md = self.acquire_data(fc, parent_entry, task_id) - self.archive(data, sigmf_md, parent_entry, task_id, recording_id) - - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} - kws.update(V1) - detail = reverse( - 'acquisition-detail', kwargs=kws, request=parent_entry.request) - - return detail + data, sigmf_md = self.acquire_data(fc) + self.archive(task_result, recording_id, data, sigmf_md) def test_required_components(self): """Fail acquisition if a required component is not available.""" - self.usrp.connect() - if not self.usrp.is_available: - msg = "acquisition failed: USRP required but not available" + self.sdr.connect() + if not self.sdr.is_available: + msg = "acquisition failed: SDR required but not available" raise RuntimeError(msg) - def acquire_data(self, fc, parent_entry, task_id): + def acquire_data(self, fc): tuning_parameters = self.tuning_parameters[fc] - self.configure_usrp(fc, **tuning_parameters) + self.configure_sdr(fc, **tuning_parameters) # Use the radio's actual reported sample rate instead of requested rate - sample_rate = self.usrp.radio.sample_rate + sample_rate = self.sdr.radio.sample_rate # Build global metadata sigmf_md = SigMFFile() @@ -141,7 +134,7 @@ def acquire_data(self, fc, parent_entry, task_id): sigmf_md.set_global_field("core:sample_rate", sample_rate) sigmf_md.set_global_field("core:description", self.description) - sensor_def = capabilities['sensor_definition'] + sensor_def = capabilities["sensor_definition"] sigmf_md.set_global_field("ntia:sensor_definition", sensor_def) sigmf_md.set_global_field("ntia:sensor_id", settings.FQDN) sigmf_md.set_global_field("scos:version", SCOS_TRANSFER_SPEC_VER) @@ -149,46 +142,47 @@ def acquire_data(self, fc, parent_entry, task_id): # Acquire data and build per-capture metadata data = np.array([], dtype=np.complex64) - nsamps = int(sample_rate * tuning_parameters['duration_ms'] * 1e-3) + nsamps = int(sample_rate * tuning_parameters["duration_ms"] * 1e-3) dt = utils.get_datetime_str_now() - acq = self.usrp.radio.acquire_samples(nsamps).astype(np.complex64) + # Drop ~10 ms of samples + nskip = int(0.01 * sample_rate) + acq = self.sdr.radio.acquire_samples(nsamps, nskip=nskip).astype(np.complex64) data = np.append(data, acq) capture_md = {"core:frequency": fc, "core:datetime": dt} sigmf_md.add_capture(start_index=0, metadata=capture_md) - annotation_md = {"applied_scale_factor": self.usrp.radio.scale_factor} - sigmf_md.add_annotation(start_index=0, length=nsamps, - metadata=annotation_md) + annotation_md = {"applied_scale_factor": self.sdr.radio.scale_factor} + sigmf_md.add_annotation(start_index=0, length=nsamps, metadata=annotation_md) return data, sigmf_md - def configure_usrp(self, fc, gain, sample_rate, duration_ms): - self.set_usrp_clock_rate(sample_rate) - self.set_usrp_sample_rate(sample_rate) - self.usrp.radio.tune_frequency(fc) - self.usrp.radio.gain = gain + def configure_sdr(self, fc, gain, sample_rate, duration_ms): + self.set_sdr_clock_rate(sample_rate) + self.set_sdr_sample_rate(sample_rate) + self.sdr.radio.tune_frequency(fc) + self.sdr.radio.gain = gain - def set_usrp_clock_rate(self, sample_rate): + def set_sdr_clock_rate(self, sample_rate): clock_rate = sample_rate while clock_rate < 10e6: clock_rate *= 4 - self.usrp.radio.clock_rate = clock_rate + self.sdr.radio.clock_rate = clock_rate - def set_usrp_sample_rate(self, sample_rate): - self.usrp.radio.sample_rate = sample_rate + def set_sdr_sample_rate(self, sample_rate): + self.sdr.radio.sample_rate = sample_rate - def archive(self, m4s_data, sigmf_md, parent_entry, task_id, recording_id): - from acquisitions.models import Acquisition + def archive(self, task_result, recording_id, m4s_data, sigmf_md): + from tasks.models import Acquisition logger.debug("Storing acquisition in database") Acquisition( - schedule_entry=parent_entry, - task_id=task_id, + task_result=task_result, recording_id=recording_id, - sigmf_metadata=sigmf_md._metadata, - data=m4s_data).save() + metadata=sigmf_md._metadata, + data=m4s_data, + ).save() @property def description(self): @@ -203,34 +197,34 @@ def description(self): total_samples = 0 for fc in self.fcs: tuning_params = self.tuning_parameters[fc].copy() - tuning_params['fc_MHz'] = fc / 1e6 - srate = tuning_params['sample_rate'] - tuning_params['sample_rate_Msps'] = srate / 1e6 + tuning_params["fc_MHz"] = fc / 1e6 + srate = tuning_params["sample_rate"] + tuning_params["sample_rate_Msps"] = srate / 1e6 acquisition_plan += acq_plan_template.format(**tuning_params) - total_samples += int(tuning_params['duration_ms'] / 1e6 * srate) + total_samples += int(tuning_params["duration_ms"] / 1e6 * srate) f_low = self.fcs[0] - f_low_srate = self.tuning_parameters[f_low]['sample_rate'] + f_low_srate = self.tuning_parameters[f_low]["sample_rate"] f_low_edge = (f_low - f_low_srate / 2.0) / 1e6 f_high = self.fcs[-1] - f_high_srate = self.tuning_parameters[f_high]['sample_rate'] + f_high_srate = self.tuning_parameters[f_high]["sample_rate"] f_high_edge = (f_high - f_high_srate / 2.0) / 1e6 - durations = [v['duration_ms'] for v in self.tuning_parameters.values()] + durations = [v["duration_ms"] for v in self.tuning_parameters.values()] min_duration_ms = np.sum(durations) filesize_mb = total_samples * 8 / 1e6 # 8 bytes per complex64 sample defs = { - 'name': self.name, - 'nfcs': self.nfcs, - 'f_low_edge': f_low_edge, - 'f_high_edge': f_high_edge, - 'acquisition_plan': acquisition_plan, - 'min_duration_ms': min_duration_ms, - 'total_samples': total_samples, - 'filesize_mb': filesize_mb + "name": self.name, + "nfcs": self.nfcs, + "f_low_edge": f_low_edge, + "f_high_edge": f_high_edge, + "acquisition_plan": acquisition_plan, + "min_duration_ms": min_duration_ms, + "total_samples": total_samples, + "filesize_mb": filesize_mb, } # __doc__ refers to the module docstring at the top of the file diff --git a/src/actions/logger.py b/src/actions/logger.py index c0abfe09..78008eb5 100644 --- a/src/actions/logger.py +++ b/src/actions/logger.py @@ -1,7 +1,5 @@ """A simple example action that logs a message.""" -from __future__ import absolute_import - import logging from .base import Action diff --git a/src/actions/monitor_usrp.py b/src/actions/monitor_usrp.py index c4bf45ab..b901fa17 100644 --- a/src/actions/monitor_usrp.py +++ b/src/actions/monitor_usrp.py @@ -1,12 +1,11 @@ """Monitor the on-board USRP and touch or remove an indicator file.""" -from __future__ import absolute_import - import logging from pathlib import Path from hardware import usrp_iface from sensor import settings + from .base import Action logger = logging.getLogger(__name__) diff --git a/src/actions/sync_gps.py b/src/actions/sync_gps.py index 14bad32e..60de0266 100644 --- a/src/actions/sync_gps.py +++ b/src/actions/sync_gps.py @@ -1,11 +1,9 @@ """Monitor the on-board USRP and touch or remove an indicator file.""" -from __future__ import absolute_import - import logging from hardware import gps_iface -from status.models import Location, GPS_LOCATION_DESCRIPTION +from status.models import GPS_LOCATION_DESCRIPTION, Location from .base import Action @@ -39,4 +37,5 @@ def __call__(self, name, tid): gps=True, description=GPS_LOCATION_DESCRIPTION, latitude=latitude, - longitude=longitude) + longitude=longitude, + ) diff --git a/src/actions/tests/test_acquire_single_freq_fft.py b/src/actions/tests/test_acquire_single_freq_fft.py index e61289b3..06dbbe09 100644 --- a/src/actions/tests/test_acquire_single_freq_fft.py +++ b/src/actions/tests/test_acquire_single_freq_fft.py @@ -1,12 +1,11 @@ -from actions import by_name -from acquisitions.models import Acquisition +import json +from os import path + from django.conf import settings -# from jsonschema import validate as schema_validate -from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY from sigmf.validate import validate as sigmf_validate -import json -from os import path +from tasks.models import Acquisition, TaskResult +from tasks.tests.utils import simulate_acquisitions SCHEMA_DIR = path.join(settings.REPO_ROOT, "schemas") SCHEMA_FNAME = "scos_transfer_spec_schema.json" @@ -16,16 +15,10 @@ schema = json.load(f) -def test_detector(user_client, rf): - # Put an entry in the schedule that we can refer to - rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] - task_id = rjson['next_task_id'] - - # use mock_acquire set up in conftest.py - by_name['mock_acquire'](entry_name, task_id) - acquistion = Acquisition.objects.get(task_id=task_id) - sigmf_metadata = acquistion.sigmf_metadata - assert sigmf_validate(sigmf_metadata) +def test_detector(user_client, test_scheduler): + entry_name = simulate_acquisitions(user_client) + tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1) + acquistion = Acquisition.objects.get(task_result=tr) + assert sigmf_validate(acquistion.metadata) # FIXME: update schema so that this passes # schema_validate(sigmf_metadata, schema) diff --git a/src/actions/tests/test_init.py b/src/actions/tests/test_init.py index acf03fbd..ef1193d6 100644 --- a/src/actions/tests/test_init.py +++ b/src/actions/tests/test_init.py @@ -5,7 +5,6 @@ import actions - # Indentation makes this invalid INVALID_YAML = b"""\ single_frequency_fft: @@ -43,7 +42,7 @@ def _test_load_from_yaml_check_error(yaml_to_write, expected_error): # load_from_yaml loads all `.yml` files in the passed directory, so do a # bit of setup to create an invalid yaml tempfile in a temporary directory with tempfile.TemporaryDirectory() as tmpdir: - with tempfile.NamedTemporaryFile(suffix='.yml', dir=tmpdir) as tmpfile: + with tempfile.NamedTemporaryFile(suffix=".yml", dir=tmpdir) as tmpfile: tmpfile.write(yaml_to_write) tmpfile.seek(0) # Now try to load the invalid yaml file, expecting an error diff --git a/src/authentication/admin.py b/src/authentication/admin.py index 949a5a35..98fd0d22 100644 --- a/src/authentication/admin.py +++ b/src/authentication/admin.py @@ -5,5 +5,4 @@ from .models import User - admin.site.register(User, UserAdmin) diff --git a/src/authentication/apps.py b/src/authentication/apps.py index 9635c9df..372ba813 100644 --- a/src/authentication/apps.py +++ b/src/authentication/apps.py @@ -2,4 +2,4 @@ class AuthenticationConfig(AppConfig): - name = 'authentication' + name = "authentication" diff --git a/src/authentication/migrations/0001_initial.py b/src/authentication/migrations/0001_initial.py index 68ff35dc..c3a3ff4e 100644 --- a/src/authentication/migrations/0001_initial.py +++ b/src/authentication/migrations/0001_initial.py @@ -1,44 +1,123 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 +# Generated by Django 2.2.1 on 2019-05-17 20:43 import django.contrib.auth.models import django.contrib.auth.validators -from django.db import migrations, models import django.utils.timezone +from django.db import migrations, models class Migration(migrations.Migration): initial = True - dependencies = [ - ('auth', '0011_update_proxy_permissions'), - ] + dependencies = [("auth", "0011_update_proxy_permissions")] operations = [ migrations.CreateModel( - name='User', + name="User", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('password', models.CharField(max_length=128, verbose_name='password')), - ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), - ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), - ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), - ('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')), - ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), - ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), - ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), - ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), - ('email', models.EmailField(max_length=254, null=True)), - ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), - ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("password", models.CharField(max_length=128, verbose_name="password")), + ( + "last_login", + models.DateTimeField( + blank=True, null=True, verbose_name="last login" + ), + ), + ( + "is_superuser", + models.BooleanField( + default=False, + help_text="Designates that this user has all permissions without explicitly assigning them.", + verbose_name="superuser status", + ), + ), + ( + "username", + models.CharField( + error_messages={ + "unique": "A user with that username already exists." + }, + help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.", + max_length=150, + unique=True, + validators=[ + django.contrib.auth.validators.UnicodeUsernameValidator() + ], + verbose_name="username", + ), + ), + ( + "first_name", + models.CharField( + blank=True, max_length=30, verbose_name="first name" + ), + ), + ( + "last_name", + models.CharField( + blank=True, max_length=150, verbose_name="last name" + ), + ), + ( + "is_staff", + models.BooleanField( + default=False, + help_text="Designates whether the user can log into this admin site.", + verbose_name="staff status", + ), + ), + ( + "is_active", + models.BooleanField( + default=True, + help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", + verbose_name="active", + ), + ), + ( + "date_joined", + models.DateTimeField( + default=django.utils.timezone.now, verbose_name="date joined" + ), + ), + ("email", models.EmailField(max_length=254, null=True)), + ( + "groups", + models.ManyToManyField( + blank=True, + help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", + related_name="user_set", + related_query_name="user", + to="auth.Group", + verbose_name="groups", + ), + ), + ( + "user_permissions", + models.ManyToManyField( + blank=True, + help_text="Specific permissions for this user.", + related_name="user_set", + related_query_name="user", + to="auth.Permission", + verbose_name="user permissions", + ), + ), ], options={ - 'verbose_name': 'user', - 'verbose_name_plural': 'users', - 'abstract': False, + "verbose_name": "user", + "verbose_name_plural": "users", + "abstract": False, }, - managers=[ - ('objects', django.contrib.auth.models.UserManager()), - ], - ), + managers=[("objects", django.contrib.auth.models.UserManager())], + ) ] diff --git a/src/authentication/models.py b/src/authentication/models.py index 85b6e9f9..e0f7d559 100644 --- a/src/authentication/models.py +++ b/src/authentication/models.py @@ -7,6 +7,7 @@ class User(AbstractUser): """A user of the sensor.""" + email = models.EmailField(null=True) diff --git a/src/authentication/serializers.py b/src/authentication/serializers.py index 517a23c8..1bc7d7a8 100644 --- a/src/authentication/serializers.py +++ b/src/authentication/serializers.py @@ -2,42 +2,45 @@ from rest_framework.reverse import reverse from sensor import V1 + from .models import User class UserProfileSerializer(serializers.HyperlinkedModelSerializer): """Public user account view.""" + schedule_entries = serializers.SerializerMethodField( - help_text="The list of schedule entries owned by the user") + help_text="The list of schedule entries owned by the user" + ) class Meta: model = User - fields = ('self', 'username', 'is_active', 'date_joined', 'last_login', - 'schedule_entries') + fields = ( + "self", + "username", + "is_active", + "date_joined", + "last_login", + "schedule_entries", + ) extra_kwargs = { - 'self': { - 'view_name': 'user-detail' - }, - 'is_active': { - 'initial': True - }, - 'schedule_entries': { - 'view_name': 'schedule-detail' - }, + "self": {"view_name": "user-detail"}, + "is_active": {"initial": True}, + "schedule_entries": {"view_name": "schedule-detail"}, } - read_only_fields = ('schedule_entries', 'date_joined', 'last_login') + read_only_fields = ("schedule_entries", "date_joined", "last_login") def get_schedule_entries(self, obj): """Filter private schedule entries if requester is not an admin.""" - request = self.context['request'] + request = self.context["request"] entries = obj.schedule_entries.get_queryset() if not request.user.is_staff: entries = entries.filter(is_private=False) urls = [] for entry in entries: - route = 'schedule-detail' - kws = {'pk': entry.name} + route = "schedule-detail" + kws = {"pk": entry.name} kws.update(V1) urls.append(reverse(route, kwargs=kws, request=request)) @@ -46,6 +49,7 @@ def get_schedule_entries(self, obj): class UserDetailsSerializer(UserProfileSerializer): """Private user account view.""" + auth_token = serializers.SerializerMethodField() has_usable_password = serializers.SerializerMethodField() is_admin = serializers.SerializerMethodField() @@ -55,9 +59,12 @@ def get_is_admin(self, obj): class Meta(UserProfileSerializer.Meta): fields = UserProfileSerializer.Meta.fields + ( - 'email', 'auth_token', 'has_usable_password', 'is_admin') - read_only_fields = UserProfileSerializer.Meta.read_only_fields + ( - 'auth_token', ) + "email", + "auth_token", + "has_usable_password", + "is_admin", + ) + read_only_fields = UserProfileSerializer.Meta.read_only_fields + ("auth_token",) def get_auth_token(self, obj): return obj.auth_token.key diff --git a/src/authentication/tests/test_list_view.py b/src/authentication/tests/test_list_view.py index 08723823..ed1d7f59 100644 --- a/src/authentication/tests/test_list_view.py +++ b/src/authentication/tests/test_list_view.py @@ -3,15 +3,15 @@ from schedule.tests.utils import TEST_PRIVATE_SCHEDULE_ENTRY, post_schedule from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response def test_user_cannot_view_private_entry_in_list(admin_client, user_client): """An unprivileged user should not be able to see private entries.""" post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - url = reverse('user-list', kwargs=V1) + url = reverse("user-list", kwargs=V1) response = user_client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) - results = rjson['results'] - admin_rjson = [user for user in results if user['username'] == 'admin'][0] - assert admin_rjson['schedule_entries'] == [] + results = rjson["results"] + admin_rjson = [user for user in results if user["username"] == "admin"][0] + assert admin_rjson["schedule_entries"] == [] diff --git a/src/authentication/urls.py b/src/authentication/urls.py index 89c95202..780e9c41 100644 --- a/src/authentication/urls.py +++ b/src/authentication/urls.py @@ -1,9 +1,9 @@ from django.urls import path -from .views import UserListView, UserInstanceView +from .views import UserInstanceView, UserListView urlpatterns = ( - path('', UserListView.as_view(), name='user-list'), - path('me/', UserInstanceView.as_view(), name='user-detail'), - path('/', UserInstanceView.as_view(), name='user-detail'), + path("", UserListView.as_view(), name="user-list"), + path("me/", UserInstanceView.as_view(), name="user-detail"), + path("/", UserInstanceView.as_view(), name="user-detail"), ) diff --git a/src/authentication/views.py b/src/authentication/views.py index ae7e685b..a16b30bc 100644 --- a/src/authentication/views.py +++ b/src/authentication/views.py @@ -1,15 +1,16 @@ -from __future__ import absolute_import - -from rest_framework.generics import get_object_or_404 from rest_framework.generics import ( - ListAPIView, ListCreateAPIView, RetrieveAPIView, - RetrieveUpdateDestroyAPIView) + ListAPIView, + ListCreateAPIView, + RetrieveAPIView, + RetrieveUpdateDestroyAPIView, + get_object_or_404, +) from rest_framework.permissions import IsAdminUser from rest_framework.settings import api_settings from rest_framework.views import APIView from .models import User -from .serializers import UserProfileSerializer, UserDetailsSerializer +from .serializers import UserDetailsSerializer, UserProfileSerializer class UserListView(APIView): @@ -31,24 +32,24 @@ def dispatch(self, request, *args, **kwargs): class UserDetailsListView(ListCreateAPIView): """View user details and create users.""" - queryset = User.objects.all().order_by('-date_joined') + + queryset = User.objects.all().order_by("-date_joined") serializer_class = UserDetailsSerializer - permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ - IsAdminUser, - ] + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminUser] class UserProfilesListView(ListAPIView): """View public profiles of all registered users.""" - queryset = User.objects.all().order_by('-date_joined') + + queryset = User.objects.all().order_by("-date_joined") serializer_class = UserProfileSerializer class UserInstanceView(APIView): def dispatch(self, request, *args, **kwargs): - kwargs.pop('version', None) + kwargs.pop("version", None) if not kwargs: # /users/me - kwargs = {'pk': request.user.pk} + kwargs = {"pk": request.user.pk} requested_user = get_object_or_404(User.objects.all(), **kwargs) if request.user.is_staff or request.user == requested_user: diff --git a/src/capabilities/__init__.py b/src/capabilities/__init__.py index 7af489f6..4743876b 100644 --- a/src/capabilities/__init__.py +++ b/src/capabilities/__init__.py @@ -16,4 +16,4 @@ def load_from_json(fname): logger.exception("Unable to load JSON file {}".format(fname)) -capabilities['sensor_definition'] = load_from_json(SENSOR_DEFINITION_FILE) +capabilities["sensor_definition"] = load_from_json(SENSOR_DEFINITION_FILE) diff --git a/src/capabilities/apps.py b/src/capabilities/apps.py index 75492209..44c5bf57 100644 --- a/src/capabilities/apps.py +++ b/src/capabilities/apps.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals from django.apps import AppConfig class CapabilitiesConfig(AppConfig): - name = 'capabilities' + name = "capabilities" diff --git a/src/capabilities/urls.py b/src/capabilities/urls.py index 2073a01a..8bfd737f 100644 --- a/src/capabilities/urls.py +++ b/src/capabilities/urls.py @@ -2,6 +2,4 @@ from .views import capabilities_view -urlpatterns = ( - path('', capabilities_view, name='capabilities'), -) +urlpatterns = (path("", capabilities_view, name="capabilities"),) diff --git a/src/capabilities/views.py b/src/capabilities/views.py index ea0aa104..4127b598 100644 --- a/src/capabilities/views.py +++ b/src/capabilities/views.py @@ -8,7 +8,6 @@ import actions from capabilities import capabilities - logger = logging.getLogger(__name__) @@ -18,14 +17,13 @@ def get_actions(include_admin_actions=False): if actions.by_name[action].admin_only and not include_admin_actions: continue - serialized_actions.append({ - 'name': - action, - 'summary': - actions.get_summary(actions.by_name[action]), - 'description': - actions.by_name[action].description - }) + serialized_actions.append( + { + "name": action, + "summary": actions.get_summary(actions.by_name[action]), + "description": actions.by_name[action].description, + } + ) return serialized_actions @@ -34,5 +32,5 @@ def get_actions(include_admin_actions=False): def capabilities_view(request, version, format=None): """The capabilites of the sensor.""" filtered_actions = get_actions(include_admin_actions=request.user.is_staff) - capabilities['actions'] = filtered_actions + capabilities["actions"] = filtered_actions return Response(capabilities) diff --git a/src/conftest.py b/src/conftest.py index 4eea93b0..b07cdfdf 100644 --- a/src/conftest.py +++ b/src/conftest.py @@ -6,25 +6,6 @@ from authentication.models import User -def pytest_addoption(parser): - parser.addoption( - '--update-api-docs', - action='store_true', - default=False, - help="Ensure API docs match code") - - -def pytest_collection_modifyitems(config, items): - """Skips `test_api_docs_up_to_date` if CLI option not passed.""" - if config.getoption('--update-api-docs'): - # --update-api-docs given on cli: do not skip api doc generation - return - skip_api_gen = pytest.mark.skip(reason="didn't pass --update-api-docs") - for item in items: - if 'update_api_docs' in item.keywords: - item.add_marker(skip_api_gen) - - @pytest.yield_fixture def testclock(): """Replace scheduler's timefn with manually steppable test timefn.""" @@ -43,15 +24,15 @@ def testclock(): def test_scheduler(rf, testclock): """Instantiate test scheduler with fake request context and testclock.""" s = scheduler.scheduler.Scheduler() - s.request = rf.post('mock://cburl/schedule') + s.request = rf.post("mock://cburl/schedule") return s @pytest.fixture def user(db): """A normal user.""" - username = 'test' - password = 'password' + username = "test" + password = "password" user, created = User.objects.get_or_create(username=username) @@ -76,8 +57,8 @@ def user_client(db, user): @pytest.fixture def alt_user(db): """A normal user.""" - username = 'alt_test' - password = 'password' + username = "alt_test" + password = "password" user, created = User.objects.get_or_create(username=username) @@ -111,15 +92,16 @@ def alt_admin_user(db, django_user_model, django_username_field): username_field = django_username_field try: - user = UserModel._default_manager.get(**{username_field: 'alt_admin'}) + user = UserModel._default_manager.get(**{username_field: "alt_admin"}) except UserModel.DoesNotExist: extra_fields = {} - if username_field != 'username': - extra_fields[username_field] = 'alt_admin' + if username_field != "username": + extra_fields[username_field] = "alt_admin" user = UserModel._default_manager.create_superuser( - 'alt_admin', 'alt_admin@example.com', 'password', **extra_fields) + "alt_admin", "alt_admin@example.com", "password", **extra_fields + ) return user @@ -130,18 +112,31 @@ def alt_admin_client(db, alt_admin_user): from django.test.client import Client client = Client() - client.login(username=alt_admin_user.username, password='password') + client.login(username=alt_admin_user.username, password="password") return client # Add mock acquisitions for tests mock_acquire = actions.acquire_single_freq_fft.SingleFrequencyFftAcquisition( - name='mock_acquire', + name="mock_acquire", frequency=1e9, # 1 GHz gain=40, sample_rate=1e6, # 1 MSa/s fft_size=16, - nffts=11) -actions.by_name['mock_acquire'] = mock_acquire + nffts=11, +) +actions.by_name["mock_acquire"] = mock_acquire + +# Add mock multi-recording acquisition for tests +stepped_freq_action = actions.acquire_stepped_freq_tdomain_iq +mock_multirec_acquire = stepped_freq_action.SteppedFrequencyTimeDomainIqAcquisition( + name="mock_multi_acquire", + fcs=[1.1e9, 1.2e9, 1.3e9], # 1400, 1500, 1600 MHz + gains=[40, 40, 60], + sample_rates=[1e6, 1e6, 1e6], # 1 MSa/s + durations_ms=[1, 2, 1], +) +actions.by_name["mock_multirec_acquire"] = mock_multirec_acquire + actions.init() diff --git a/src/hardware/__init__.py b/src/hardware/__init__.py index e69de29b..8e1bd8cf 100644 --- a/src/hardware/__init__.py +++ b/src/hardware/__init__.py @@ -0,0 +1,3 @@ +from . import usrp_iface + +sdr = usrp_iface diff --git a/src/hardware/apps.py b/src/hardware/apps.py index 3413a19a..f183653a 100644 --- a/src/hardware/apps.py +++ b/src/hardware/apps.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals from django.apps import AppConfig class HardwareConfig(AppConfig): - name = 'hardware' + name = "hardware" diff --git a/src/hardware/gps_iface.py b/src/hardware/gps_iface.py index a1218f1d..aca34963 100644 --- a/src/hardware/gps_iface.py +++ b/src/hardware/gps_iface.py @@ -3,7 +3,7 @@ import logging import subprocess from datetime import datetime -from time import time, sleep +from time import sleep, time from hardware import usrp_iface @@ -22,8 +22,8 @@ def get_lat_long(timeout_s=1): logger.debug("Waiting for GPS lock... ") start = time() gps_locked = False - while (time() - start < timeout_s and not gps_locked): - gps_locked = usrp.get_mboard_sensor('gps_locked').to_bool() + while time() - start < timeout_s and not gps_locked: + gps_locked = usrp.get_mboard_sensor("gps_locked").to_bool() sleep(0.1) if not gps_locked: @@ -32,20 +32,20 @@ def get_lat_long(timeout_s=1): logger.debug("GPS locked.") - if 'gpsdo' not in usrp.get_time_sources(0): + if "gpsdo" not in usrp.get_time_sources(0): logger.warning("No GPSDO time source detected") return None - usrp.set_time_source('gpsdo') + usrp.set_time_source("gpsdo") - if usrp.get_time_source(0) != 'gpsdo': + if usrp.get_time_source(0) != "gpsdo": logger.error("Failed to set GPSDO time source") return None # Poll get_time_last_pss() until change is seen last_t = int(usrp.get_time_last_pps().get_real_secs()) now_t = int(usrp.get_time_last_pps().get_real_secs()) - while (last_t != now_t): + while last_t != now_t: sleep(0.05) now_t = int(usrp.get_time_last_pps().get_real_secs()) @@ -53,27 +53,27 @@ def get_lat_long(timeout_s=1): sleep(0.1) # To use gr-uhd instead of UHD python driver, this line needs to change # gps_t = uhd.time_spec_t(usrp.get_mboard_sensor('gps_time').to_int() + 1) - gps_t = uhd.types.TimeSpec(usrp.get_mboard_sensor('gps_time').to_int() + 1) + gps_t = uhd.types.TimeSpec(usrp.get_mboard_sensor("gps_time").to_int() + 1) usrp.set_time_next_pps(gps_t) dt = datetime.fromtimestamp(gps_t.get_real_secs()) - date_cmd = ['date', '-s', '{:}'.format(dt.strftime('%Y/%m/%d %H:%M:%S'))] + date_cmd = ["date", "-s", "{:}".format(dt.strftime("%Y/%m/%d %H:%M:%S"))] subprocess.check_output(date_cmd, shell=True) logger.info("Set USRP and system time to GPS time {}".format(dt.ctime())) - if 'gpsdo' not in usrp.get_clock_sources(0): + if "gpsdo" not in usrp.get_clock_sources(0): logger.warning("No GPSDO clock source detected") return None - usrp.set_clock_source('gpsdo') + usrp.set_clock_source("gpsdo") - if usrp.get_clock_source(0) != 'gpsdo': + if usrp.get_clock_source(0) != "gpsdo": logger.error("Failed to set GPSDO clock source") return None start = time() ref_locked = False - while (time() - start < timeout_s and not ref_locked): - ref_locked = usrp.get_mboard_sensor('ref_locked').to_bool() + while time() - start < timeout_s and not ref_locked: + ref_locked = usrp.get_mboard_sensor("ref_locked").to_bool() if not ref_locked: msg = "Timed out waiting for clock to lock to GPSDO reference" @@ -83,12 +83,27 @@ def get_lat_long(timeout_s=1): logger.debug("Clock locked to GPSDO reference") try: - gpgga = usrp.get_mboard_sensor('gps_gpgga').value - (fmt, utc, lat, ns, lng, ew, qual, nsats, hdil, alt, altu, gdalsep, - gdalsepu, age, refid) = gpgga.split(',') + gpgga = usrp.get_mboard_sensor("gps_gpgga").value + ( + fmt, + utc, + lat, + ns, + lng, + ew, + qual, + nsats, + hdil, + alt, + altu, + gdalsep, + gdalsepu, + age, + refid, + ) = gpgga.split(",") latitude = float(lat) - if ns == 'S': + if ns == "S": latitude = -latitude latitude_degs = int(latitude / 100) @@ -96,7 +111,7 @@ def get_lat_long(timeout_s=1): latitude_dd = latitude_degs + (latitude_mins / 60) longitude = float(lng) - if ew == 'W': + if ew == "W": longitude = -longitude longitude_degs = int(longitude / 100) diff --git a/src/hardware/mocks/usrp_block.py b/src/hardware/mocks/usrp_block.py index 85826f14..ec03997c 100644 --- a/src/hardware/mocks/usrp_block.py +++ b/src/hardware/mocks/usrp_block.py @@ -4,8 +4,8 @@ import numpy as np -tune_result_params = ['actual_dsp_freq', 'actual_rf_freq'] -MockTuneResult = namedtuple('MockTuneResult', tune_result_params) +tune_result_params = ["actual_dsp_freq", "actual_rf_freq"] +MockTuneResult = namedtuple("MockTuneResult", tune_result_params) class MockUsrp(object): @@ -17,8 +17,10 @@ def __init__(self, randomize_values=False): self.clock_rate = 40e6 self.gain = 0 - self.total_fail_results = 0 - self.current_fail_results = 0 + # Simulate returning less than the requested number of samples from + # self.recv_num_samps + self.times_to_fail_recv = 0 + self.times_failed_recv = 0 self.randomize_values = randomize_values @@ -26,12 +28,10 @@ def set_auto_dc_offset(self, val): self.auto_dc_offset = val def recv_num_samps(self, n, fc, fs, channels, gain): - if self.current_fail_results < self.total_fail_results: - self.current_fail_results += 1 + if self.times_failed_recv < self.times_to_fail_recv: + self.times_failed_recv += 1 return np.ones((1, 0), dtype=np.complex64) - self.current_fail_results = 0 - self.total_fail_results += 1 if self.randomize_values: i = np.random.normal(0.5, 0.5, n) q = np.random.normal(0.5, 0.5, n) @@ -42,9 +42,9 @@ def recv_num_samps(self, n, fc, fs, channels, gain): else: return np.ones((1, n), dtype=np.complex64) - def reset_bad_acquisitions(self): - self.total_fail_results = 0 - self.current_fail_results = 0 + def set_times_to_fail_recv(self, n): + self.times_to_fail_recv = n + self.times_failed_recv = 0 def get_rx_freq(self): return self.f_lo + self.f_dsp diff --git a/src/hardware/scale_factors.py b/src/hardware/scale_factors.py index fefccff1..ee08f30a 100644 --- a/src/hardware/scale_factors.py +++ b/src/hardware/scale_factors.py @@ -1,7 +1,5 @@ -import logging - import json - +import logging logger = logging.getLogger(__name__) @@ -48,25 +46,25 @@ def get_power_scale_factor(self, lo_frequency, gain): f_div_min = self.frequencies[0] f_div_max = self.frequencies[-1] for div in self.divisions: - if f >= div['upper_bound']: - f_div_min = div['upper_bound'] + if f >= div["upper_bound"]: + f_div_min = div["upper_bound"] else: # Check if we are in the division - if f > div['lower_bound']: + if f > div["lower_bound"]: logger.warning("SDR tuned to within a division:") logger.warning(" LO frequency: {}".format(f)) msg = " Division: [{},{}]" - lb = div['lower_bound'] - ub = div['upper_bound'] + lb = div["lower_bound"] + ub = div["upper_bound"] msg = msg.format(lb, ub) logger.warning(msg) msg = "Assumed scale factor of lower boundary." logger.warning(msg) - f_div_min = div['lower_bound'] - f_div_max = div['lower_bound'] + f_div_min = div["lower_bound"] + f_div_max = div["lower_bound"] bypass_freq_interpolation = True else: - f_div_max = div['lower_bound'] + f_div_max = div["lower_bound"] break # Determine the index associated with the frequency/ies @@ -85,18 +83,33 @@ def get_power_scale_factor(self, lo_frequency, gain): scale_factor = self.factors[f_i][g_i] elif bypass_freq_interpolation: scale_factor = self.interpolate_1d( - g, self.gains[g_i], self.gains[g_i + 1], - self.factors[f_i][g_i], self.factors[f_i][g_i + 1]) + g, + self.gains[g_i], + self.gains[g_i + 1], + self.factors[f_i][g_i], + self.factors[f_i][g_i + 1], + ) elif bypass_gain_interpolation: scale_factor = self.interpolate_1d( - f, self.frequencies[f_i], self.frequencies[f_i + 1], - self.factors[f_i][g_i], self.factors[f_i + 1][g_i]) + f, + self.frequencies[f_i], + self.frequencies[f_i + 1], + self.factors[f_i][g_i], + self.factors[f_i + 1][g_i], + ) else: scale_factor = self.interpolate_2d( - f, g, self.frequencies[f_i], self.frequencies[f_i + 1], - self.gains[g_i], self.gains[g_i + 1], self.factors[f_i][g_i], - self.factors[f_i + 1][g_i], self.factors[f_i][g_i + 1], - self.factors[f_i + 1][g_i + 1]) + f, + g, + self.frequencies[f_i], + self.frequencies[f_i + 1], + self.gains[g_i], + self.gains[g_i + 1], + self.factors[f_i][g_i], + self.factors[f_i + 1][g_i], + self.factors[f_i][g_i + 1], + self.factors[f_i + 1][g_i + 1], + ) logger.debug("Using power scale factor: {}".format(scale_factor)) return scale_factor @@ -104,7 +117,7 @@ def get_power_scale_factor(self, lo_frequency, gain): def get_scale_factor(self, lo_frequency, gain): """Get the linear scale factor for the current setup.""" psf = self.get_power_scale_factor(lo_frequency, gain) - sf = 10**(psf / 20.0) + sf = 10 ** (psf / 20.0) logger.debug("Using linear scale factor: {}".format(sf)) return sf @@ -124,21 +137,21 @@ def load_from_json(fname): sf = json.load(f) # Dimensions of the factors array is not validated by the schema - factor_rows = len(sf['factors']) - nfrequencies = len(sf['frequencies']) - ngains = len(sf['gains']) + factor_rows = len(sf["factors"]) + nfrequencies = len(sf["frequencies"]) + ngains = len(sf["gains"]) msg = "Number of rows in factors 2D array ({}) ".format(factor_rows) msg += "not equal to number of frequencies ({})".format(nfrequencies) - assert len(sf['factors']) == len(sf['frequencies']), msg + assert len(sf["factors"]) == len(sf["frequencies"]), msg msg = "factors row {!r} isn't the same length as the `gains` array ({})" - for row in sf['factors']: + for row in sf["factors"]: assert len(row) == ngains, format(row, ngains) # Ensure frequencies and gains arrays are already sorted - assert sf['frequencies'] == sorted(sf['frequencies']), "freqs not sorted" - assert sf['gains'] == sorted(sf['gains']), "gains not sorted" + assert sf["frequencies"] == sorted(sf["frequencies"]), "freqs not sorted" + assert sf["gains"] == sorted(sf["gains"]), "gains not sorted" return ScaleFactors(**sf) diff --git a/src/hardware/tests/test_scale_factors.py b/src/hardware/tests/test_scale_factors.py index e28b1599..986f0437 100644 --- a/src/hardware/tests/test_scale_factors.py +++ b/src/hardware/tests/test_scale_factors.py @@ -7,29 +7,26 @@ from hardware import scale_factors from sensor.settings import REPO_ROOT -RESOURCES_DIR = path.join(REPO_ROOT, './src/hardware/tests/resources') -TEST_SCALE_FACTORS_FILE = path.join(RESOURCES_DIR, 'test_scale_factors.json') +RESOURCES_DIR = path.join(REPO_ROOT, "./src/hardware/tests/resources") +TEST_SCALE_FACTORS_FILE = path.join(RESOURCES_DIR, "test_scale_factors.json") sfs = scale_factors.load_from_json(TEST_SCALE_FACTORS_FILE) @pytest.mark.parametrize( - 'sf,f,g', + "sf,f,g", [ # (scale_factor, lo_frequency, gain) - # Outer boundary (-7.47813046479, 70e6, 0), (7.50256094609, 6e9, 0), (-76.2557869767, 70e6, 76), (-65.3006507223, 6e9, 76), - # Beyond limits (-7.47813046479, 50e6, 0), (7.50256094609, 7e9, 0), (-7.47813046479, 70e6, -10), (-76.2557869767, 70e6, 100), - # At division (-5.40071178476, 1299974999, 0), (-5.41274003389, 1300974999, 0), @@ -37,12 +34,10 @@ (-76.3832149678, 2200468999, 100), (5.81812380813, 3999124997, -10), (-69.7131434755, 4000123997, 100), - # In division (-22.8093940482, 1300000000, 20), (-38.0043597179, 2200000000, 40), (-47.2748864466, 4000000000, 60), - # Interpolated (-11.5030015054, 100e6, 5), (-30.0076949404, 600e6, 25), @@ -51,7 +46,8 @@ (-32.2959584348, 3000e6, 37), (-51.2041078009, 4100e6, 58), (-11.4556252931, 5000e6, 19), - ]) + ], +) def test_scale_factor_calculation(sf, f, g): """Test that the default scale factor is used if not file was loaded.""" diff --git a/src/hardware/tests/test_usrp.py b/src/hardware/tests/test_usrp.py index 547b8b04..04611e05 100644 --- a/src/hardware/tests/test_usrp.py +++ b/src/hardware/tests/test_usrp.py @@ -1,6 +1,7 @@ """Test aspects of RadioInterface with mocked USRP.""" import pytest + from hardware import usrp_iface # Create the RadioInterface with the mock usrp_block @@ -12,34 +13,37 @@ # Ensure the usrp can recover from acquisition errors -def test_acquisition_errors(): - """Test USRP bad acquisitions handled gracefully up to max_retries. +def test_acquire_samples_with_retries(): + """Acquire samples should retry without error up to `max_retries`.""" + max_retries = 5 + times_to_fail = 3 + rx.usrp.set_times_to_fail_recv(times_to_fail) + + try: + rx.acquire_samples(1000, retries=max_retries) + except RuntimeError: + msg = "Acquisition failing {} times sequentially with {}\n" + msg += "retries requested should NOT have raised an error." + msg = msg.format(times_to_fail, max_retries) + pytest.fail(msg) - The mock usrp_block will return "bad" data equal to the number of times - aquire_samples() has been called until the reset_bad_acquisitions() has - been called. + rx.usrp.set_times_to_fail_recv(0) - """ - rx.usrp.reset_bad_acquisitions() + +def test_acquire_samples_fails_when_over_max_retries(): + """After `max_retries`, an error should be thrown.""" max_retries = 5 - for i in range(max_retries + 2): - if i <= max_retries: - try: - rx.acquire_samples(1000, 1000, max_retries) - except RuntimeError: - msg = "Acquisition failing {} sequentially with {}\n" - msg += "retries requested should NOT have raised an error." - msg = msg.format(i, max_retries) - pytest.fail(msg) - else: - msg = "Acquisition failing {} sequentially with {}\n" - msg += "retries requested SHOULD have raised an error." - msg = msg.format(i, max_retries) - with pytest.raises(RuntimeError): - rx.acquire_samples(1000, 1000, max_retries) - pytest.fail(msg) - - rx.usrp.reset_bad_acquisitions() + times_to_fail = 7 + rx.usrp.set_times_to_fail_recv(times_to_fail) + + msg = "Acquisition failing {} times sequentially with {}\n" + msg += "retries requested SHOULD have raised an error." + msg = msg.format(times_to_fail, max_retries) + with pytest.raises(RuntimeError): + rx.acquire_samples(1000, 1000, max_retries) + pytest.fail(msg) + + rx.usrp.set_times_to_fail_recv(0) def test_tune_result(): @@ -64,17 +68,16 @@ def test_tune_result(): def test_scaled_data_acquisition(): # Do an arbitrary data collection - rx.usrp.reset_bad_acquisitions() rx.frequency = 1900e6 rx.gain = 20 data = rx.acquire_samples(1000) # Pick an arbitrary sample and round to 5 decimal places - datum = int(data[236] * 1e6) + datum = int((data[236] * 1e6).real) true_val = 104190 # Assert the value msg = "Scale factor not correctly applied to acquisition.\n" msg += "Algorithm: {}\n".format(datum / 1e6) msg += "Expected: {}\n".format(true_val / 1e6) - assert (datum == true_val), msg + assert datum == true_val, msg diff --git a/src/hardware/usrp_iface.py b/src/hardware/usrp_iface.py index a5b1a678..e8c311d1 100644 --- a/src/hardware/usrp_iface.py +++ b/src/hardware/usrp_iface.py @@ -28,7 +28,8 @@ radio = None is_available = False -# Testing determined these gain values provide +# Testing determined these gain values provide a good mix of sensitivity and +# dynamic range performance VALID_GAINS = (0, 20, 40, 60) @@ -37,13 +38,13 @@ def connect(sf_file=settings.SCALE_FACTORS_FILE): # -> bool: global is_available global radio - if settings.RUNNING_DEMO or settings.RUNNING_TESTS or settings.MOCK_RADIO: + if settings.MOCK_RADIO: logger.warning("Using mock USRP.") random = settings.MOCK_RADIO_RANDOM usrp = MockUsrp(randomize_values=random) is_available = True - RESOURCES_DIR = path.join(REPO_ROOT, './src/hardware/tests/resources') - sf_file = path.join(RESOURCES_DIR, 'test_scale_factors.json') + RESOURCES_DIR = path.join(REPO_ROOT, "./src/hardware/tests/resources") + sf_file = path.join(RESOURCES_DIR, "test_scale_factors.json") else: if is_available and radio is not None: return True @@ -54,7 +55,7 @@ def connect(sf_file=settings.SCALE_FACTORS_FILE): # -> bool: logger.warning("uhd not available - disabling radio") return False - usrp_args = 'type=b200' # find any b-series device + usrp_args = "type=b200" # find any b-series device try: usrp = uhd.usrp.MultiUSRP(usrp_args) @@ -158,26 +159,38 @@ def recompute_scale_factor(self): return self.scale_factor = self.scale_factors.get_scale_factor( - lo_frequency=self.frequency, gain=self.gain) + lo_frequency=self.frequency, gain=self.gain + ) - def acquire_samples(self, n, nskip=200000, retries=5): # -> np.ndarray: + def acquire_samples(self, n, nskip=0, retries=5): # -> np.ndarray: """Aquire nskip+n samples and return the last n""" - o_retries = retries + max_retries = retries + while True: + + # No need to skip initial samples when simulating the radio + if settings.MOCK_RADIO: + nsamps = n + else: + nsamps = n + nskip + samples = self.usrp.recv_num_samps( - n + nskip, # number of samples - self.frequency, # center frequency in Hz - self.sample_rate, # sample rate in samples per second - [0], # channel list - self.gain # gain in dB + nsamps, # number of samples + self.frequency, # center frequency in Hz + self.sample_rate, # sample rate in samples per second + [0], # channel list + self.gain, # gain in dB ) # usrp.recv_num_samps returns a numpy array of shape # (n_channels, n_samples) and dtype complex64 assert samples.dtype == np.complex64 assert len(samples.shape) == 2 and samples.shape[0] == 1 - data = samples[0] # isolate data for channel 0 + data = samples[0] # isolate data for channel 0 data_len = len(data) - data = data[nskip:] + + if not settings.MOCK_RADIO: + data = data[nskip:] + data = data * self.scale_factor if not len(data) == n: if retries > 0: @@ -187,7 +200,7 @@ def acquire_samples(self, n, nskip=200000, retries=5): # -> np.ndarray: retries = retries - 1 else: err = "Failed to acquire correct number of samples " - err += "{} times in a row.".format(o_retries) + err += "{} times in a row.".format(max_retries) raise RuntimeError(err) else: logger.debug("Successfully acquired {} samples.".format(n)) diff --git a/src/manage.py b/src/manage.py index 17b4bd0e..e25962a9 100755 --- a/src/manage.py +++ b/src/manage.py @@ -17,10 +17,11 @@ raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?") + "forget to activate a virtual environment?" + ) raise execute_from_command_line(sys.argv) - if os.getenv('INSIDE_EMACS'): + if os.getenv("INSIDE_EMACS"): # runserver turns on term echo, which echoes twice in emacs shell - os.system('stty -echo') + os.system("stty -echo") diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt index 1dd69e7c..04aaab3e 100644 --- a/src/requirements-dev.txt +++ b/src/requirements-dev.txt @@ -1,11 +1,15 @@ -rrequirements.txt +black==18.9b0 flake8==3.7.7 +flake8-bugbear==19.3.0 +isort==4.3.20 jedi==0.13.3 jsonschema==3.0.1 mkdocs==1.0.4 +pre-commit==1.16.1 pytest-cov==2.7.1 -pytest-django==3.4.8 +pytest-django==3.5.0 pytest-flake8==1.0.4 -tox==3.10.0 -yapf==0.27.0 +seed-isort-config==1.9.1 +tox==3.12.1 diff --git a/src/requirements.txt b/src/requirements.txt index d3996355..2ac38ffa 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -1,21 +1,21 @@ Django==2.2.1 -Pygments==2.4.0 -Markdown==3.1 +Pygments==2.4.2 +Markdown==3.1.1 -e git+https://github.com/NTIA/SigMF.git@multi-recording-archive#egg=SigMF coreapi==2.3.3 django-debug-toolbar==1.11 -django-extensions==2.1.6 +django-extensions==2.1.7 django-filter==2.1.0 djangorestframework==3.9.4 docker-compose==1.24.0 drf-yasg==1.15.0 gunicorn==19.9.0 jsonfield==2.0.2 -numpy>=1.16.3 +numpy>=1.16.4 psycopg2-binary==2.8.2 raven==6.10.0 requests-futures==0.9.9 requests-mock==1.6.0 -ruamel.yaml==0.15.94 +ruamel.yaml==0.15.96 six==1.12.0 typing==3.6.6 diff --git a/src/results/__init__.py b/src/results/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/results/apps.py b/src/results/apps.py deleted file mode 100644 index 60bd5c1a..00000000 --- a/src/results/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ResultsConfig(AppConfig): - name = 'results' diff --git a/src/results/migrations/0001_initial.py b/src/results/migrations/0001_initial.py deleted file mode 100644 index f47f7de0..00000000 --- a/src/results/migrations/0001_initial.py +++ /dev/null @@ -1,33 +0,0 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ('schedule', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='TaskResult', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('task_id', models.IntegerField(help_text='The id of the task relative to the result')), - ('started', models.DateTimeField(help_text='The time the task started')), - ('finished', models.DateTimeField(help_text='The time the task finished')), - ('duration', models.DurationField(help_text='Task duration in seconds')), - ('result', models.CharField(choices=[(1, 'success'), (2, 'failure')], help_text='"success" or "failure"', max_length=7)), - ('detail', models.CharField(blank=True, help_text='Arbitrary detail string', max_length=512)), - ('schedule_entry', models.ForeignKey(help_text='The schedule entry relative to the result', on_delete=django.db.models.deletion.CASCADE, related_name='results', to='schedule.ScheduleEntry')), - ], - options={ - 'ordering': ('task_id',), - 'unique_together': {('schedule_entry', 'task_id')}, - }, - ), - ] diff --git a/src/results/migrations/__init__.py b/src/results/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/results/models.py b/src/results/models.py deleted file mode 100644 index b9739598..00000000 --- a/src/results/models.py +++ /dev/null @@ -1,55 +0,0 @@ -from django.db import models - -from schedule.models import ScheduleEntry -from sensor.settings import MAX_TASK_RESULTS -from .consts import MAX_DETAIL_LEN - - -class TaskResult(models.Model): - """Map between schedule entries and their task results.""" - SUCCESS = 1 - FAILURE = 2 - RESULT_CHOICES = ((SUCCESS, 'success'), (FAILURE, 'failure')) - - schedule_entry = models.ForeignKey( - ScheduleEntry, - on_delete=models.CASCADE, - related_name='results', - help_text="The schedule entry relative to the result") - task_id = models.IntegerField( - help_text="The id of the task relative to the result") - started = models.DateTimeField(help_text="The time the task started") - finished = models.DateTimeField(help_text="The time the task finished") - duration = models.DurationField(help_text="Task duration in seconds") - result = models.CharField( - max_length=7, - help_text='"success" or "failure"', - choices=RESULT_CHOICES) - detail = models.CharField( - max_length=MAX_DETAIL_LEN, - blank=True, - help_text="Arbitrary detail string") - - class Meta: - ordering = ('task_id', ) - unique_together = (('schedule_entry', 'task_id'), ) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - # Allow Swapping max_results for testing - self.max_results = MAX_TASK_RESULTS - - def save(self): - """Limit number of results to MAX_TASK_RESULTS by removing oldest.""" - all_results = TaskResult.objects.all() - filter = {'schedule_entry__name': self.schedule_entry.name} - same_entry_results = all_results.filter(**filter) - if same_entry_results.count() >= self.max_results: - same_entry_results[0].delete() - - super(TaskResult, self).save() - - def __str__(self): - s = "{}/{}" - return s.format(self.schedule_entry.name, self.task_id) diff --git a/src/results/serializers.py b/src/results/serializers.py deleted file mode 100644 index 5436b248..00000000 --- a/src/results/serializers.py +++ /dev/null @@ -1,83 +0,0 @@ -from rest_framework import serializers -from rest_framework.reverse import reverse - -from schedule.models import ScheduleEntry -from sensor import V1 -from .models import TaskResult - - -class TaskResultsOverviewSerializer(serializers.HyperlinkedModelSerializer): - results = serializers.SerializerMethodField( - help_text="The link to the task results") - schedule_entry = serializers.SerializerMethodField( - help_text="The related schedule entry for the result") - results_available = serializers.SerializerMethodField( - help_text="The number of available results") - - class Meta: - model = ScheduleEntry - fields = ('results', 'results_available', 'schedule_entry') - - def get_results(self, obj): - request = self.context['request'] - route = 'result-list' - kws = {'schedule_entry_name': obj.name} - kws.update(V1) - url = reverse(route, kwargs=kws, request=request) - return url - - def get_results_available(self, obj): - return obj.results.count() - - def get_schedule_entry(self, obj): - request = self.context['request'] - route = 'schedule-detail' - kws = {'pk': obj.name} - kws.update(V1) - url = reverse(route, kwargs=kws, request=request) - return url - - -# FIXME: this is identical to AcquisitionHyperlinkedRelatedField -class TaskResultHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): - # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields - def get_url(self, obj, view_name, request, format): - kws = { - 'schedule_entry_name': obj.schedule_entry.name, - 'task_id': obj.task_id - } - kws.update(V1) - url = reverse(view_name, kwargs=kws, request=request, format=format) - return url - - -class TaskResultSerializer(serializers.HyperlinkedModelSerializer): - self = TaskResultHyperlinkedRelatedField( - view_name='result-detail', - read_only=True, - help_text="The url of the result", - source='*' # pass whole object - ) - schedule_entry = serializers.SerializerMethodField( - help_text="The url of the parent schedule entry") - - class Meta: - model = TaskResult - fields = ( - 'self', - 'task_id', - 'started', - 'finished', - 'duration', - 'result', - 'detail', - 'schedule_entry', - ) - - def get_schedule_entry(self, obj): - request = self.context['request'] - route = 'schedule-detail' - kws = {'pk': obj.schedule_entry.name} - kws.update(V1) - url = reverse(route, kwargs=kws, request=request) - return url diff --git a/src/results/tests/__init__.py b/src/results/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/results/tests/test_detail_view.py b/src/results/tests/test_detail_view.py deleted file mode 100644 index 143da522..00000000 --- a/src/results/tests/test_detail_view.py +++ /dev/null @@ -1,39 +0,0 @@ -from rest_framework import status - -from acquisitions.tests.utils import simulate_acquisitions -from results.tests.utils import create_task_results, reverse_result_detail -from sensor.tests.utils import validate_response, HTTPS_KWARG - - -def test_can_view_own_result_details(user_client): - """A user should be able to view results they create.""" - entry_name = create_task_results(1, user_client) - url = reverse_result_detail(entry_name, 1) - response = user_client.get(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_200_OK) - - -def test_can_view_others_result_details(user_client, alt_user_client): - """A user should be able to view results created by others.""" - entry_name = create_task_results(1, user_client) - url = reverse_result_detail(entry_name, 1) - response = alt_user_client.get(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_200_OK) - - -def test_cannot_view_private_result_details(user_client, admin_client, - test_scheduler): - """A user should not be able to view the result of a private task.""" - entry_name = simulate_acquisitions(admin_client, is_private=True) - url = reverse_result_detail(entry_name, 1) - response = user_client.get(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_404_NOT_FOUND) - - -def test_cannot_delete_result_details(user_client): - """Results are read-only.""" - entry_name = create_task_results(1, user_client) - url = reverse_result_detail(entry_name, 1) - response = user_client.delete(url, **HTTPS_KWARG) - - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/results/tests/utils.py b/src/results/tests/utils.py deleted file mode 100644 index 6e8b4d8f..00000000 --- a/src/results/tests/utils.py +++ /dev/null @@ -1,89 +0,0 @@ -import datetime - -from django.test import RequestFactory -from django.utils import timezone -from rest_framework.reverse import reverse -from rest_framework import status - -from results.models import TaskResult -from schedule.models import ScheduleEntry -from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY -from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG - -TEST_MAX_TASK_RESULTS = 100 # Reduce from default of settings.MAX_TASK_RESULTS -ONE_MICROSECOND = datetime.timedelta(0, 0, 1) - -EMPTY_RESULTS_RESPONSE = [] - - -def create_task_results(n, user_client, entry_name=None): - # We need an entry in the schedule to create TRs for - try: - entry = ScheduleEntry.objects.get(name=entry_name) - except Exception: - test_entry = TEST_SCHEDULE_ENTRY - if entry_name is not None: - test_entry['name'] = entry_name - - rjson = post_schedule(user_client, test_entry) - entry_name = rjson['name'] - entry = ScheduleEntry.objects.get(name=entry_name) - - for i in range(n): - started = timezone.now() - tr = TaskResult( - schedule_entry=entry, - task_id=i + 1, - started=started, - finished=started + ONE_MICROSECOND, - duration=ONE_MICROSECOND, - result='success', - detail='') - tr.max_results = TEST_MAX_TASK_RESULTS - tr.save() - - return entry_name - - -def reverse_results_overview(): - rf = RequestFactory() - request = rf.get('/results/', **HTTPS_KWARG) - return reverse('results-overview', kwargs=V1, request=request) - - -def reverse_result_list(schedule_entry_name): - rf = RequestFactory() - request = rf.get('/results/' + schedule_entry_name, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name} - kws.update(V1) - return reverse('result-list', kwargs=kws, request=request) - - -def reverse_result_detail(schedule_entry_name, task_id): - rf = RequestFactory() - url = '/results/' + schedule_entry_name + '/' + str(task_id) - request = rf.get(url, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} - kws.update(V1) - return reverse('result-detail', kwargs=kws, request=request) - - -def get_results_overview(client): - url = reverse_results_overview() - response = client.get(url, **HTTPS_KWARG) - rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] - - -def get_result_list(client, schedule_entry_name): - url = reverse_result_list(schedule_entry_name) - response = client.get(url, **HTTPS_KWARG) - rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] - - -def get_result_detail(client, schedule_entry_name, task_id): - url = reverse_result_detail(schedule_entry_name, task_id) - response = client.get(url, **HTTPS_KWARG) - return validate_response(response, status.HTTP_200_OK) diff --git a/src/results/urls.py b/src/results/urls.py deleted file mode 100644 index db12f641..00000000 --- a/src/results/urls.py +++ /dev/null @@ -1,20 +0,0 @@ -from django.urls import path - -from .views import (ResultsOverviewViewSet, ResultListViewSet, - ResultInstanceViewSet) - -urlpatterns = ( - path('', - view=ResultsOverviewViewSet.as_view({'get': 'list'}), - name='results-overview'), - path('/', - view=ResultListViewSet.as_view({ - 'get': 'list', - }), - name='result-list'), - path('//', - view=ResultInstanceViewSet.as_view({ - 'get': 'retrieve', - }), - name='result-detail') -) diff --git a/src/results/views.py b/src/results/views.py deleted file mode 100644 index 4082de74..00000000 --- a/src/results/views.py +++ /dev/null @@ -1,97 +0,0 @@ -from django.http import Http404 -from rest_framework import filters -from rest_framework.generics import get_object_or_404 -from rest_framework.mixins import ListModelMixin, RetrieveModelMixin -from rest_framework.viewsets import GenericViewSet - -from schedule.models import ScheduleEntry -from .models import TaskResult -from .serializers import TaskResultsOverviewSerializer, TaskResultSerializer - - -class ResultsOverviewViewSet(ListModelMixin, GenericViewSet): - """ - list: - Returns an overview of how many results are available per schedule - entry. - """ - lookup_field = 'schedule_entry_name' - queryset = ScheduleEntry.objects.all() - serializer_class = TaskResultsOverviewSerializer - - def get_queryset(self): - # .list() does not call .get_object(), which triggers permissions - # checks, so we need to filter our queryset based on `is_private` and - # request user. - base_queryset = self.filter_queryset(self.queryset) - if self.request.user.is_staff: - return base_queryset.all() - else: - return base_queryset.filter(is_private=False) - - -class MultipleFieldLookupMixin(object): - """Get multiple field filtering based on a `lookup_fields` attribute.""" - - def get_queryset(self): - base_queryset = super(MultipleFieldLookupMixin, self).get_queryset() - base_queryset = self.filter_queryset(base_queryset) - - filter = {'schedule_entry__name': self.kwargs['schedule_entry_name']} - if not self.request.user.is_staff: - filter.update({'schedule_entry__is_private': False}) - - queryset = base_queryset.filter(**filter) - - if not queryset.exists(): - raise Http404 - - return queryset - - def get_object(self): - queryset = self.get_queryset() - filter = {'task_id': self.kwargs['task_id']} - - return get_object_or_404(queryset, **filter) - - -class ResultListViewSet(ListModelMixin, GenericViewSet): - """ - list: - Returns a list of all results created by the given schedule entry. - """ - queryset = TaskResult.objects.all() - serializer_class = TaskResultSerializer - filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', ) - lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('task_id', 'started', 'finished', 'duration', 'result') - search_fields = ('task_id', 'result', 'detail') - - def get_queryset(self): - # .list() does not call .get_object(), which triggers permissions - # checks, so we need to filter our queryset based on `is_private` and - # request user. - base_queryset = self.filter_queryset(self.queryset) - - filter = {'schedule_entry__name': self.kwargs['schedule_entry_name']} - if not self.request.user.is_staff: - filter.update({'schedule_entry__is_private': False}) - - queryset = base_queryset.filter(**filter) - - if not queryset.exists(): - raise Http404 - - return queryset.all() - - -class ResultInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, - GenericViewSet): - """ - retrieve: - Returns a specific result. - """ - queryset = TaskResult.objects.all() - serializer_class = TaskResultSerializer - lookup_fields = ('schedule_entry__name', 'task_id') diff --git a/src/schedule/apps.py b/src/schedule/apps.py index 4bb70791..a887cfa2 100644 --- a/src/schedule/apps.py +++ b/src/schedule/apps.py @@ -2,4 +2,4 @@ class ScheduleConfig(AppConfig): - name = 'schedule' + name = "schedule" diff --git a/src/schedule/migrations/0001_initial.py b/src/schedule/migrations/0001_initial.py index 90353ffa..24a788c7 100644 --- a/src/schedule/migrations/0001_initial.py +++ b/src/schedule/migrations/0001_initial.py @@ -1,9 +1,10 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 +# Generated by Django 2.2.1 on 2019-05-17 20:43 -from django.conf import settings import django.core.validators -from django.db import migrations, models import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + import schedule.models.schedule_entry @@ -11,42 +12,165 @@ class Migration(migrations.Migration): initial = True - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] + dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] operations = [ migrations.CreateModel( - name='Request', + name="Request", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('scheme', models.CharField(blank=True, max_length=16, null=True)), - ('version', models.CharField(blank=True, max_length=16, null=True)), - ('host', models.CharField(blank=True, max_length=255, null=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("scheme", models.CharField(blank=True, max_length=16, null=True)), + ("version", models.CharField(blank=True, max_length=16, null=True)), + ("host", models.CharField(blank=True, max_length=255, null=True)), ], ), migrations.CreateModel( - name='ScheduleEntry', + name="ScheduleEntry", fields=[ - ('name', models.SlugField(help_text='[Required] The unique identifier used in URLs and filenames', primary_key=True, serialize=False)), - ('action', models.CharField(choices=[('acquire_700c_dl', 'acquire_700c_dl - Apply m4s detector over 300 1024-pt FFTs at 751.00 MHz.'), ('logger', 'logger - Log the message "running test {name}/{tid}".'), ('survey_700_band_iq', 'survey_700_band_iq - Capture time-domain IQ samples at 10 frequencies between')], help_text='[Required] The name of the action to be scheduled', max_length=50)), - ('priority', models.SmallIntegerField(default=10, help_text='Lower number is higher priority (default=10)', validators=[django.core.validators.MinValueValidator(-20), django.core.validators.MaxValueValidator(19)])), - ('start', models.BigIntegerField(blank=True, default=schedule.models.schedule_entry.next_schedulable_timefn, help_text="Absolute time (epoch) to start, or leave blank for 'now'")), - ('stop', models.BigIntegerField(blank=True, help_text="Absolute time (epoch) to stop, or leave blank for 'never'", null=True)), - ('interval', models.PositiveIntegerField(blank=True, help_text='Seconds between tasks, or leave blank to run once', null=True, validators=[django.core.validators.MinValueValidator(1)])), - ('is_active', models.BooleanField(default=True, help_text='Indicates whether the entry should be removed from the scheduler without removing it from the system')), - ('is_private', models.BooleanField(default=False, help_text='Indicates whether the entry, and resulting data, are only visible to admins')), - ('callback_url', models.URLField(blank=True, help_text='If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes', null=True)), - ('next_task_time', models.BigIntegerField(editable=False, help_text='The time the next task is scheduled to be executed', null=True)), - ('next_task_id', models.IntegerField(default=1, editable=False, help_text='The id of the next task to be executed')), - ('created', models.DateTimeField(auto_now_add=True, help_text='The date the entry was created')), - ('modified', models.DateTimeField(auto_now=True, help_text='The date the entry was modified')), - ('owner', models.ForeignKey(editable=False, help_text='The name of the user who owns the entry', on_delete=django.db.models.deletion.CASCADE, related_name='schedule_entries', to=settings.AUTH_USER_MODEL)), - ('request', models.ForeignKey(editable=False, help_text='The request that created the entry', null=True, on_delete=django.db.models.deletion.CASCADE, to='schedule.Request')), + ( + "name", + models.SlugField( + help_text="[Required] The unique identifier used in URLs and filenames", + primary_key=True, + serialize=False, + ), + ), + ( + "action", + models.CharField( + choices=[ + ( + "acquire_700c_dl", + "acquire_700c_dl - Apply m4s detector over 300 1024-pt FFTs at 751.00 MHz.", + ), + ( + "logger", + 'logger - Log the message "running test {name}/{tid}".', + ), + ( + "survey_700_band_iq", + "survey_700_band_iq - Capture time-domain IQ samples at 10 frequencies between", + ), + ], + help_text="[Required] The name of the action to be scheduled", + max_length=50, + ), + ), + ( + "priority", + models.SmallIntegerField( + default=10, + help_text="Lower number is higher priority (default=10)", + validators=[ + django.core.validators.MinValueValidator(-20), + django.core.validators.MaxValueValidator(19), + ], + ), + ), + ( + "start", + models.BigIntegerField( + blank=True, + default=schedule.models.schedule_entry.next_schedulable_timefn, + help_text="Absolute time (epoch) to start, or leave blank for 'now'", + ), + ), + ( + "stop", + models.BigIntegerField( + blank=True, + help_text="Absolute time (epoch) to stop, or leave blank for 'never'", + null=True, + ), + ), + ( + "interval", + models.PositiveIntegerField( + blank=True, + help_text="Seconds between tasks, or leave blank to run once", + null=True, + validators=[django.core.validators.MinValueValidator(1)], + ), + ), + ( + "is_active", + models.BooleanField( + default=True, + help_text="Indicates whether the entry should be removed from the scheduler without removing it from the system", + ), + ), + ( + "is_private", + models.BooleanField( + default=False, + help_text="Indicates whether the entry, and resulting data, are only visible to admins", + ), + ), + ( + "callback_url", + models.URLField( + blank=True, + help_text="If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", + null=True, + ), + ), + ( + "next_task_time", + models.BigIntegerField( + editable=False, + help_text="The time the next task is scheduled to be executed", + null=True, + ), + ), + ( + "next_task_id", + models.IntegerField( + default=1, + editable=False, + help_text="The id of the next task to be executed", + ), + ), + ( + "created", + models.DateTimeField( + auto_now_add=True, help_text="The date the entry was created" + ), + ), + ( + "modified", + models.DateTimeField( + auto_now=True, help_text="The date the entry was modified" + ), + ), + ( + "owner", + models.ForeignKey( + editable=False, + help_text="The name of the user who owns the entry", + on_delete=django.db.models.deletion.CASCADE, + related_name="schedule_entries", + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "request", + models.ForeignKey( + editable=False, + help_text="The request that created the entry", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="schedule.Request", + ), + ), ], - options={ - 'db_table': 'schedule', - 'ordering': ('created',), - }, + options={"db_table": "schedule", "ordering": ("created",)}, ), ] diff --git a/src/schedule/models/__init__.py b/src/schedule/models/__init__.py index 7eb52c55..c5ac34a6 100644 --- a/src/schedule/models/__init__.py +++ b/src/schedule/models/__init__.py @@ -1,4 +1,2 @@ -from __future__ import absolute_import - -from .schedule_entry import ScheduleEntry, DEFAULT_PRIORITY # noqa from .request import Request # noqa +from .schedule_entry import DEFAULT_PRIORITY, ScheduleEntry # noqa diff --git a/src/schedule/models/request.py b/src/schedule/models/request.py index 28fbd917..96cfdca8 100644 --- a/src/schedule/models/request.py +++ b/src/schedule/models/request.py @@ -1,7 +1,5 @@ """Request model to save enough of a request to be passed to reverse().""" -from __future__ import absolute_import - from django.db import models from django.utils.encoding import iri_to_uri from django.utils.functional import cached_property @@ -10,6 +8,7 @@ class Request(models.Model): """Save enough of a request to be passed to reverse().""" + scheme = models.CharField(max_length=16, blank=True, null=True) version = models.CharField(max_length=16, blank=True, null=True) host = models.CharField(max_length=255, blank=True, null=True) diff --git a/src/schedule/models/schedule_entry.py b/src/schedule/models/schedule_entry.py index 4439cfe9..b7c127ac 100644 --- a/src/schedule/models/schedule_entry.py +++ b/src/schedule/models/schedule_entry.py @@ -1,7 +1,7 @@ import sys from itertools import count -from django.core.validators import MinValueValidator, MaxValueValidator +from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models import actions @@ -66,78 +66,97 @@ class ScheduleEntry(models.Model): name = models.SlugField( primary_key=True, - help_text="[Required] The unique identifier used in URLs and filenames" + help_text="[Required] The unique identifier used in URLs and filenames", ) action = models.CharField( choices=actions.CHOICES, max_length=actions.MAX_LENGTH, - help_text="[Required] The name of the action to be scheduled") + help_text="[Required] The name of the action to be scheduled", + ) priority = models.SmallIntegerField( default=DEFAULT_PRIORITY, validators=(MinValueValidator(-20), MaxValueValidator(19)), - help_text=("Lower number is higher priority (default={})" - ).format(DEFAULT_PRIORITY)) + help_text=("Lower number is higher priority (default={})").format( + DEFAULT_PRIORITY + ), + ) start = models.BigIntegerField( blank=True, default=next_schedulable_timefn, - help_text="Absolute time (epoch) to start, or leave blank for 'now'") + help_text="Absolute time (epoch) to start, or leave blank for 'now'", + ) stop = models.BigIntegerField( null=True, blank=True, - help_text="Absolute time (epoch) to stop, or leave blank for 'never'") + help_text="Absolute time (epoch) to stop, or leave blank for 'never'", + ) interval = models.PositiveIntegerField( null=True, blank=True, - validators=(MinValueValidator(1), ), - help_text="Seconds between tasks, or leave blank to run once") + validators=(MinValueValidator(1),), + help_text="Seconds between tasks, or leave blank to run once", + ) is_active = models.BooleanField( default=True, editable=True, - help_text=("Indicates whether the entry should be removed from the " - "scheduler without removing it from the system")) + help_text=( + "Indicates whether the entry should be removed from the " + "scheduler without removing it from the system" + ), + ) is_private = models.BooleanField( default=False, editable=True, - help_text=("Indicates whether the entry, and resulting data, are only " - "visible to admins")) + help_text=( + "Indicates whether the entry, and resulting data, are only " + "visible to admins" + ), + ) callback_url = models.URLField( null=True, blank=True, - help_text=("If given, the scheduler will POST a `TaskResult` JSON " - "object to this URL after each task completes")) + help_text=( + "If given, the scheduler will POST a `TaskResult` JSON " + "object to this URL after each task completes" + ), + ) # read-only fields next_task_time = models.BigIntegerField( null=True, editable=False, - help_text="The time the next task is scheduled to be executed") + help_text="The time the next task is scheduled to be executed", + ) next_task_id = models.IntegerField( - default=1, - editable=False, - help_text="The id of the next task to be executed") + default=1, editable=False, help_text="The id of the next task to be executed" + ) created = models.DateTimeField( - auto_now_add=True, help_text="The date the entry was created") + auto_now_add=True, help_text="The date the entry was created" + ) modified = models.DateTimeField( - auto_now=True, help_text="The date the entry was modified") + auto_now=True, help_text="The date the entry was modified" + ) owner = models.ForeignKey( - 'authentication.User', + "authentication.User", editable=False, - related_name='schedule_entries', + related_name="schedule_entries", on_delete=models.CASCADE, - help_text="The name of the user who owns the entry") + help_text="The name of the user who owns the entry", + ) request = models.ForeignKey( - 'schedule.Request', + "schedule.Request", null=True, # null allowable for unit testing only editable=False, on_delete=models.CASCADE, - help_text="The request that created the entry") + help_text="The request that created the entry", + ) class Meta: - db_table = 'schedule' - ordering = ('created', ) + db_table = "schedule" + ordering = ("created",) def __init__(self, *args, **kwargs): - relative_stop = kwargs.pop('relative_stop', None) + relative_stop = kwargs.pop("relative_stop", None) super(ScheduleEntry, self).__init__(*args, **kwargs) @@ -215,6 +234,7 @@ def get_next_task_id(self): return next_task_id def __str__(self): - fmtstr = 'name={}, pri={}, start={}, stop={}, ival={}, action={}' - return fmtstr.format(self.name, self.priority, self.start, self.stop, - self.interval, self.action) + fmtstr = "name={}, pri={}, start={}, stop={}, ival={}, action={}" + return fmtstr.format( + self.name, self.priority, self.start, self.stop, self.interval, self.action + ) diff --git a/src/schedule/serializers.py b/src/schedule/serializers.py index 0ca2b072..c731cb90 100644 --- a/src/schedule/serializers.py +++ b/src/schedule/serializers.py @@ -5,14 +5,12 @@ import actions from sensor import V1 -from sensor.utils import (get_datetime_from_timestamp, - get_timestamp_from_datetime) -from .models import DEFAULT_PRIORITY, ScheduleEntry +from sensor.utils import get_datetime_from_timestamp, get_timestamp_from_datetime +from .models import DEFAULT_PRIORITY, ScheduleEntry action_help = "[Required] The name of the action to be scheduled" -priority_help = "Lower number is higher priority (default={})".format( - DEFAULT_PRIORITY) +priority_help = "Lower number is higher priority (default={})".format(DEFAULT_PRIORITY) def datetimes_to_timestamps(validated_data): @@ -48,15 +46,16 @@ def to_internal_value(self, dt_str): class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): """Covert ScheduleEntry to and from JSON.""" - acquisitions = serializers.SerializerMethodField( - help_text="The list of acquisitions related to the entry") - results = serializers.SerializerMethodField( - help_text="The list of results related to the entry") + + task_results = serializers.SerializerMethodField( + help_text="The list of results related to the entry" + ) start = DateTimeFromTimestampField( required=False, allow_null=True, default=None, - help_text="UTC time (ISO 8601) to start, or leave blank for 'now'") + help_text="UTC time (ISO 8601) to start, or leave blank for 'now'", + ) stop = DateTimeFromTimestampField( required=False, allow_null=True, @@ -64,7 +63,9 @@ class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): label="Absolute stop", help_text=( "UTC time (ISO 8601) to stop, " - "or leave blank for 'never' (not valid with relative stop)")) + "or leave blank for 'never' (not valid with relative stop)" + ), + ) relative_stop = serializers.IntegerField( required=False, write_only=True, @@ -73,51 +74,74 @@ class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): min_value=1, help_text=( "Integer seconds after start to stop, " - "or leave blank for 'never' (not valid with absolute stop)")) + "or leave blank for 'never' (not valid with absolute stop)" + ), + ) next_task_time = DateTimeFromTimestampField( - read_only=True, - help_text="UTC time (ISO 8601) the next task is scheduled for") + read_only=True, help_text="UTC time (ISO 8601) the next task is scheduled for" + ) # action choices is modified in schedule/views.py based on user action = serializers.ChoiceField( choices=actions.CHOICES, - help_text="[Required] The name of the action to be scheduled") + help_text="[Required] The name of the action to be scheduled", + ) # priority min_value is modified in schedule/views.py based on user priority = serializers.IntegerField( required=False, allow_null=True, min_value=0, max_value=19, - help_text=priority_help) + help_text=priority_help, + ) # validate_only is a serializer-only field validate_only = serializers.BooleanField( required=False, default=False, - help_text="Only validate the input, do not modify the schedule") + help_text="Only validate the input, do not modify the schedule", + ) class Meta: model = ScheduleEntry - fields = ('self', 'name', 'action', 'priority', 'start', 'stop', - 'relative_stop', 'interval', 'is_active', 'is_private', - 'callback_url', 'next_task_time', 'next_task_id', 'created', - 'modified', 'owner', 'acquisitions', 'results', - 'validate_only') + fields = ( + "self", + "name", + "action", + "priority", + "start", + "stop", + "relative_stop", + "interval", + "is_active", + "is_private", + "callback_url", + "next_task_time", + "next_task_id", + "created", + "modified", + "owner", + "task_results", + "validate_only", + ) extra_kwargs = { - 'self': { - 'view_name': 'schedule-detail', - 'help_text': "The url of the entry" + "self": { + "view_name": "schedule-detail", + "help_text": "The url of the entry", + }, + "owner": { + "view_name": "user-detail", + "help_text": "The name of the user who owns the entry", }, - 'owner': { - 'view_name': 'user-detail', - 'help_text': "The name of the user who owns the entry" - } } - read_only_fields = ('next_task_time', 'is_private') - write_only_fields = ('relative_stop', 'validate_only') + read_only_fields = ("next_task_time", "is_private") + write_only_fields = ("relative_stop", "validate_only") + # FIXME: This is required by drf_yasg, but may not be required for + # built-in DRF 3.10+ OpenAPI generation + ref_name = "ScheduleEntry" def save(self, *args, **kwargs): """Don't save if validate_only is True.""" - if self.validated_data.get('validate_only'): + if self.validated_data.get("validate_only"): return super(ScheduleEntrySerializer, self).save(*args, **kwargs) @@ -129,16 +153,16 @@ def validate(self, data): got_absolute_stop = False got_relative_stop = False - if 'start' in data: - if data['start'] is None: - data.pop('start') + if "start" in data: + if data["start"] is None: + data.pop("start") else: got_start = True - if 'stop' in data and data['stop'] is not None: + if "stop" in data and data["stop"] is not None: got_absolute_stop = True - if 'relative_stop' in data and data['relative_stop'] is not None: + if "relative_stop" in data and data["relative_stop"] is not None: got_relative_stop = True if got_absolute_stop and got_relative_stop: @@ -147,54 +171,52 @@ def validate(self, data): if got_start and got_absolute_stop: # We should have timestamps at this point - assert type(data['start']) is int - assert type(data['stop']) is int - if data['stop'] <= data['start']: + assert type(data["start"]) is int + assert type(data["stop"]) is int + if data["stop"] <= data["start"]: err = "stop time is not after start" raise serializers.ValidationError(err) - if 'priority' in data and data['priority'] is None: - data.pop('priority') + if "priority" in data and data["priority"] is None: + data.pop("priority") - if 'validate_only' in data and data['validate_only'] is not True: - data.pop('validate_only') + if "validate_only" in data and data["validate_only"] is not True: + data.pop("validate_only") return data - def get_acquisitions(self, obj): - request = self.context['request'] - kws = {'schedule_entry_name': obj.name} + def get_task_results(self, obj): + request = self.context["request"] + kws = {"schedule_entry_name": obj.name} kws.update(V1) - url = reverse('acquisition-list', kwargs=kws, request=request) - return url - - def get_results(self, obj): - request = self.context['request'] - kws = {'schedule_entry_name': obj.name} - kws.update(V1) - url = reverse('result-list', kwargs=kws, request=request) + url = reverse("task-result-list", kwargs=kws, request=request) return url def to_internal_value(self, data): """Clean up input before starting validation.""" # Allow 'absolute_stop' to be a synonym for 'stop' - if 'absolute_stop' in data: - data['stop'] = data.pop('absolute_stop') + if "absolute_stop" in data: + data["stop"] = data.pop("absolute_stop") return super().to_internal_value(data) class AdminScheduleEntrySerializer(ScheduleEntrySerializer): """ScheduleEntrySerializer class for superusers.""" + action = serializers.ChoiceField( - choices=actions.CHOICES + actions.ADMIN_CHOICES, - help_text=action_help) + choices=actions.CHOICES + actions.ADMIN_CHOICES, help_text=action_help + ) priority = serializers.IntegerField( required=False, allow_null=True, min_value=-20, max_value=19, - help_text=priority_help) + help_text=priority_help, + ) class Meta(ScheduleEntrySerializer.Meta): - read_only_fields = ('next_task_time', ) + read_only_fields = ("next_task_time",) + # FIXME: This is required by drf_yasg, but may not be required for + # built-in DRF 3.10+ OpenAPI generation + ref_name = "AdminScheduleEntry" diff --git a/src/schedule/tests/test_admin_views.py b/src/schedule/tests/test_admin_views.py index e8894d7e..5d7549bb 100644 --- a/src/schedule/tests/test_admin_views.py +++ b/src/schedule/tests/test_admin_views.py @@ -1,53 +1,55 @@ from rest_framework import status from rest_framework.reverse import reverse -from schedule.tests.utils import (EMPTY_SCHEDULE_RESPONSE, TEST_SCHEDULE_ENTRY, - TEST_PRIVATE_SCHEDULE_ENTRY, post_schedule, - update_schedule) +from schedule.tests.utils import ( + EMPTY_SCHEDULE_RESPONSE, + TEST_PRIVATE_SCHEDULE_ENTRY, + TEST_SCHEDULE_ENTRY, + post_schedule, + update_schedule, +) from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response def test_post_admin_private_schedule(admin_client): rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] - kws = {'pk': entry_name} + entry_name = rjson["name"] + kws = {"pk": entry_name} kws.update(V1) - entry_url = reverse('schedule-detail', kwargs=kws) + entry_url = reverse("schedule-detail", kwargs=kws) admin_user_respose = admin_client.get(entry_url, **HTTPS_KWARG) for k, v in TEST_PRIVATE_SCHEDULE_ENTRY.items(): assert rjson[k] == v - assert rjson['is_private'] + assert rjson["is_private"] validate_response(admin_user_respose, status.HTTP_200_OK) - assert admin_user_respose.data['is_private'] + assert admin_user_respose.data["is_private"] def test_admin_can_view_private_entry_in_list(admin_client): post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) response = admin_client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) assert rjson != EMPTY_SCHEDULE_RESPONSE -def test_admin_can_view_all_entries(admin_client, user_client, - alt_admin_client): +def test_admin_can_view_all_entries(admin_client, user_client, alt_admin_client): # user schedule entry user_rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - user_entry_name = user_rjson['name'] - kws = {'pk': user_entry_name} + user_entry_name = user_rjson["name"] + kws = {"pk": user_entry_name} kws.update(V1) - user_url = reverse('schedule-detail', kwargs=kws) + user_url = reverse("schedule-detail", kwargs=kws) # alt admin user schedule entry - alt_admin_rjson = post_schedule(alt_admin_client, - TEST_PRIVATE_SCHEDULE_ENTRY) - alt_admin_entry_name = alt_admin_rjson['name'] - kws = {'pk': alt_admin_entry_name} + alt_admin_rjson = post_schedule(alt_admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) + alt_admin_entry_name = alt_admin_rjson["name"] + kws = {"pk": alt_admin_entry_name} kws.update(V1) - alt_admin_url = reverse('schedule-detail', kwargs=kws) + alt_admin_url = reverse("schedule-detail", kwargs=kws) response = admin_client.get(user_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -56,22 +58,20 @@ def test_admin_can_view_all_entries(admin_client, user_client, validate_response(response, status.HTTP_200_OK) -def test_admin_can_delete_all_entries(admin_client, user_client, - alt_admin_client): +def test_admin_can_delete_all_entries(admin_client, user_client, alt_admin_client): # user schedule entry user_rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - user_entry_name = user_rjson['name'] - kws = {'pk': user_entry_name} + user_entry_name = user_rjson["name"] + kws = {"pk": user_entry_name} kws.update(V1) - user_url = reverse('schedule-detail', kwargs=kws) + user_url = reverse("schedule-detail", kwargs=kws) # admin user schedule entry - alt_admin_rjson = post_schedule(alt_admin_client, - TEST_PRIVATE_SCHEDULE_ENTRY) - alt_admin_entry_name = alt_admin_rjson['name'] - kws = {'pk': alt_admin_entry_name} + alt_admin_rjson = post_schedule(alt_admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) + alt_admin_entry_name = alt_admin_rjson["name"] + kws = {"pk": alt_admin_entry_name} kws.update(V1) - alt_admin_url = reverse('schedule-detail', kwargs=kws) + alt_admin_url = reverse("schedule-detail", kwargs=kws) response = admin_client.delete(user_url, **HTTPS_KWARG) validate_response(response, status.HTTP_204_NO_CONTENT) @@ -84,39 +84,39 @@ def test_admin_can_delete_all_entries(admin_client, user_client, validate_response(response, status.HTTP_404_NOT_FOUND) -def test_admin_can_modify_all_entries(admin_client, user_client, - alt_admin_client): +def test_admin_can_modify_all_entries(admin_client, user_client, alt_admin_client): # user schedule entry user_rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - user_entry_name = user_rjson['name'] + user_entry_name = user_rjson["name"] - admin_adjust_user_response = update_schedule(admin_client, user_entry_name, - TEST_PRIVATE_SCHEDULE_ENTRY) + admin_adjust_user_response = update_schedule( + admin_client, user_entry_name, TEST_PRIVATE_SCHEDULE_ENTRY + ) # admin user schedule entry - alt_admin_rjson = post_schedule(alt_admin_client, - TEST_PRIVATE_SCHEDULE_ENTRY) - alt_admin_entry_name = alt_admin_rjson['name'] + alt_admin_rjson = post_schedule(alt_admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) + alt_admin_entry_name = alt_admin_rjson["name"] admin_adjust_alt_admin_response = update_schedule( - admin_client, alt_admin_entry_name, TEST_SCHEDULE_ENTRY) + admin_client, alt_admin_entry_name, TEST_SCHEDULE_ENTRY + ) validate_response(admin_adjust_user_response, status.HTTP_200_OK) - assert admin_adjust_user_response.data['is_private'] + assert admin_adjust_user_response.data["is_private"] validate_response(admin_adjust_alt_admin_response, status.HTTP_200_OK) - assert not admin_adjust_alt_admin_response.data['is_private'] + assert not admin_adjust_alt_admin_response.data["is_private"] def test_admin_can_use_negative_priority(admin_client): hipri = TEST_PRIVATE_SCHEDULE_ENTRY.copy() - hipri['priority'] = -20 + hipri["priority"] = -20 rjson = post_schedule(admin_client, hipri) - entry_name = rjson['name'] - kws = {'pk': entry_name} + entry_name = rjson["name"] + kws = {"pk": entry_name} kws.update(V1) - entry_url = reverse('schedule-detail', kwargs=kws) + entry_url = reverse("schedule-detail", kwargs=kws) admin_user_respose = admin_client.get(entry_url, **HTTPS_KWARG) - assert rjson['priority'] == -20 + assert rjson["priority"] == -20 validate_response(admin_user_respose, status.HTTP_200_OK) - assert admin_user_respose.data['is_private'] + assert admin_user_respose.data["is_private"] diff --git a/src/schedule/tests/test_models.py b/src/schedule/tests/test_models.py index dc066f1b..4a8ed861 100644 --- a/src/schedule/tests/test_models.py +++ b/src/schedule/tests/test_models.py @@ -4,18 +4,21 @@ import pytest from django.core.exceptions import ValidationError -from .utils import flatten +from schedule.models import DEFAULT_PRIORITY, ScheduleEntry from scheduler import utils -from schedule.models import ScheduleEntry, DEFAULT_PRIORITY + +from .utils import flatten -@pytest.mark.parametrize('test_input,future_t,expected', - [((0, 5, 1), 2, [[0, 1], [2, 3], [4]]), - ((1, 5, 2), 8, [[1, 3]])]) +@pytest.mark.parametrize( + "test_input,future_t,expected", + [((0, 5, 1), 2, [[0, 1], [2, 3], [4]]), ((1, 5, 2), 8, [[1, 3]])], +) def test_take_until(test_input, future_t, expected): start, stop, interval = test_input entry = ScheduleEntry( - name='t', start=start, stop=stop, interval=interval, action='logger') + name="t", start=start, stop=stop, interval=interval, action="logger" + ) initial_times = list(entry.get_remaining_times()) r = [] for t in count(future_t, future_t): @@ -29,33 +32,32 @@ def test_take_until(test_input, future_t, expected): def test_undefined_start_is_now(): - entry = ScheduleEntry(name='t', action='logger') + entry = ScheduleEntry(name="t", action="logger") now = utils.timefn() assert entry.start in (now - 1, now, now + 1) def test_undefined_stop_is_never(): - entry = ScheduleEntry(name='t', action='logger', interval=1) + entry = ScheduleEntry(name="t", action="logger", interval=1) assert entry.stop is None assert type(entry.get_remaining_times()) is itertools.count def test_relative_stop_becomes_absolute(): - e = ScheduleEntry( - name='t', start=20, relative_stop=10, interval=1, action='logger') + e = ScheduleEntry(name="t", start=20, relative_stop=10, interval=1, action="logger") assert e.start == 20 assert e.stop == 30 assert list(e.get_remaining_times()) == list(range(20, 30, 1)) def test_stop_before_start(): - e = ScheduleEntry(name='t', start=20, stop=10, interval=1, action='logger') + e = ScheduleEntry(name="t", start=20, stop=10, interval=1, action="logger") assert list(e.get_remaining_times()) == list(range(0)) def test_no_interval_is_one_shot(): """Leaving `interval` blank should indicate "one-shot" entry.""" - e = ScheduleEntry(name='t', action='logger') + e = ScheduleEntry(name="t", action="logger") remaining_times = list(e.get_remaining_times()) assert len(remaining_times) == 1 @@ -70,7 +72,7 @@ def test_no_interval_is_one_shot(): def test_no_interval_with_start_is_one_shot(): """Specifying start should not affect number of times.""" - e = ScheduleEntry(name='t', action='logger', start=1) + e = ScheduleEntry(name="t", action="logger", start=1) remaining_times = list(e.get_remaining_times()) assert len(remaining_times) == 1 @@ -86,42 +88,42 @@ def test_no_interval_with_start_is_one_shot(): def test_no_interval_future_start(testclock): """One-shot entry should wait for start.""" # recall current t=0 so start=1 is 1 second in the future - e = ScheduleEntry(name='t', action='logger', start=1) + e = ScheduleEntry(name="t", action="logger", start=1) assert not e.take_pending() def test_bad_interval_raises(): with pytest.raises(ValidationError): - ScheduleEntry(name='t', interval=-1, action='logger').clean_fields() + ScheduleEntry(name="t", interval=-1, action="logger").clean_fields() with pytest.raises(ValidationError): - ScheduleEntry(name='t', interval=0, action='logger').clean_fields() + ScheduleEntry(name="t", interval=0, action="logger").clean_fields() with pytest.raises(ValidationError): - ScheduleEntry(name='t', interval=0.1, action='logger').clean_fields() + ScheduleEntry(name="t", interval=0.1, action="logger").clean_fields() def test_bad_action_raises(): with pytest.raises(ValidationError): - ScheduleEntry(name='t', action='this_doesnt_exist').clean_fields() + ScheduleEntry(name="t", action="this_doesnt_exist").clean_fields() def test_bad_name_raises(): with pytest.raises(ValidationError): # whitespace - ScheduleEntry(name='test 1', action='logger').clean_fields() + ScheduleEntry(name="test 1", action="logger").clean_fields() with pytest.raises(ValidationError): # punctuation other than "_-" - ScheduleEntry(name='test1!', action='logger').clean_fields() + ScheduleEntry(name="test1!", action="logger").clean_fields() # ok - ScheduleEntry(name='_test-Stuff123', action='logger').clean_fields() + ScheduleEntry(name="_test-Stuff123", action="logger").clean_fields() def test_non_unique_name_raises(user): - ScheduleEntry(name='t', action='logger', owner=user).save() + ScheduleEntry(name="t", action="logger", owner=user).save() with pytest.raises(ValidationError): - ScheduleEntry(name='t', action='logger', owner=user).full_clean() + ScheduleEntry(name="t", action="logger", owner=user).full_clean() def test_defaults(): - entry = ScheduleEntry(name='t', action='logger') + entry = ScheduleEntry(name="t", action="logger") assert entry.priority == DEFAULT_PRIORITY assert entry.start is not None assert entry.stop is None @@ -130,4 +132,4 @@ def test_defaults(): def test_str(): - str(ScheduleEntry(name='t', action='logger')) + str(ScheduleEntry(name="t", action="logger")) diff --git a/src/schedule/tests/test_serializers.py b/src/schedule/tests/test_serializers.py index 52c066cb..85b564d0 100644 --- a/src/schedule/tests/test_serializers.py +++ b/src/schedule/tests/test_serializers.py @@ -1,7 +1,6 @@ import pytest -from schedule.serializers import ( - AdminScheduleEntrySerializer, ScheduleEntrySerializer) +from schedule.serializers import AdminScheduleEntrySerializer, ScheduleEntrySerializer from sensor.utils import parse_datetime_str from .utils import post_schedule @@ -14,90 +13,56 @@ # Test that valid user input is valid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # A name and action should be the minimum acceptable entry # i.e., (one-shot, ASAP) - { - 'name': 'test', - 'action': 'logger' - }, + {"name": "test", "action": "logger"}, # Stop 10 seconds after starting, start ASAP - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 10 - }, + {"name": "test", "action": "logger", "relative_stop": 10}, # Min integer interval ok - { - 'name': 'test', - 'action': 'logger', - 'interval': 10 - }, + {"name": "test", "action": "logger", "interval": 10}, # Max priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': 19 - }, + {"name": "test", "action": "logger", "priority": 19}, # Min user priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': 0 - }, + {"name": "test", "action": "logger", "priority": 0}, # Stop 10 seconds after starting; start at absolute time { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "relative_stop": 10, }, # Start and stop at absolute time; equivalent to above { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'absolute_stop': '2018-03-16T17:12:35Z', + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "absolute_stop": "2018-03-16T17:12:35Z", }, # 'stop' and 'absolute_stop' are synonyms - { - 'name': 'test', - 'action': 'logger', - 'stop': '2018-03-16T17:12:35.0Z' - }, + {"name": "test", "action": "logger", "stop": "2018-03-16T17:12:35.0Z"}, # Subseconds are optional - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z' - }, + {"name": "test", "action": "logger", "start": "2018-03-16T17:12:35Z"}, # Sensor is timezone-aware - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-22T13:53:25-06:00' - }, + {"name": "test", "action": "logger", "start": "2018-03-22T13:53:25-06:00"}, # All non-boolean, non-required fields accepts null to mean not defined { - 'name': 'test', - 'action': 'logger', - 'start': None, - 'absolute_stop': None, - 'relative_stop': None, - 'priority': None, - 'start': None, - 'start': None, - 'interval': None, - 'callback_url': None, + "name": "test", + "action": "logger", + "start": None, + "absolute_stop": None, + "relative_stop": None, + "priority": None, + "start": None, + "start": None, + "interval": None, + "callback_url": None, }, # Explicit validate_only is valid - { - 'name': 'test', - 'action': 'logger', - 'validate_only': False - }, - ]) + {"name": "test", "action": "logger", "validate_only": False}, + ], +) def test_valid_user_entries(entry_json, user): serializer = ScheduleEntrySerializer(data=entry_json) assert serializer.is_valid() @@ -107,96 +72,58 @@ def test_valid_user_entries(entry_json, user): # Test that valid admin input is valid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # A name and action should be the minimum acceptable entry # i.e., (one-shot, ASAP) - { - 'name': 'test', - 'action': 'logger' - }, + {"name": "test", "action": "logger"}, # Stop 10 seconds after starting, start ASAP - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 10 - }, + {"name": "test", "action": "logger", "relative_stop": 10}, # Min integer interval ok - { - 'name': 'test', - 'action': 'logger', - 'interval': 10 - }, + {"name": "test", "action": "logger", "interval": 10}, # Max priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': 19 - }, + {"name": "test", "action": "logger", "priority": 19}, # Min admin priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': -20 - }, + {"name": "test", "action": "logger", "priority": -20}, # Stop 10 seconds after starting; start at absolute time { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "relative_stop": 10, }, # Start and stop at absolute time; equivalent to above { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'absolute_stop': '2018-03-16T17:12:35Z', + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "absolute_stop": "2018-03-16T17:12:35Z", }, # 'stop' and 'absolute_stop' are synonyms - { - 'name': 'test', - 'action': 'logger', - 'stop': '2018-03-16T17:12:35.0Z' - }, + {"name": "test", "action": "logger", "stop": "2018-03-16T17:12:35.0Z"}, # Subseconds are optional - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z' - }, + {"name": "test", "action": "logger", "start": "2018-03-16T17:12:35Z"}, # Sensor is timezone-aware - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-22T13:53:25-06:00' - }, + {"name": "test", "action": "logger", "start": "2018-03-22T13:53:25-06:00"}, # All non-boolean, non-required fields accepts null to mean not defined { - 'name': 'test', - 'action': 'logger', - 'start': None, - 'absolute_stop': None, - 'relative_stop': None, - 'priority': None, - 'start': None, - 'start': None, - 'interval': None, - 'callback_url': None, + "name": "test", + "action": "logger", + "start": None, + "absolute_stop": None, + "relative_stop": None, + "priority": None, + "start": None, + "start": None, + "interval": None, + "callback_url": None, }, # Explicit validate_only is valid - { - 'name': 'test', - 'action': 'logger', - 'validate_only': False - }, + {"name": "test", "action": "logger", "validate_only": False}, # Admin can create private entries - { - 'name': 'test', - 'action': 'logger', - 'is_private': True - } - ]) + {"name": "test", "action": "logger", "is_private": True}, + ], +) def test_valid_admin_entries(entry_json, user): serializer = AdminScheduleEntrySerializer(data=entry_json) assert serializer.is_valid() @@ -206,93 +133,54 @@ def test_valid_admin_entries(entry_json, user): # Test that invalid user input is invalid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # name is a required field - { - 'action': 'logger' - }, + {"action": "logger"}, # action is a required field - { - 'name': 'test' - }, + {"name": "test"}, # non-integer priority - { - 'name': 'test', - 'action': 'logger', - 'priority': 3.14 - }, + {"name": "test", "action": "logger", "priority": 3.14}, # priority less than min (for normal user) - { - 'name': 'test', - 'action': 'logger', - 'priority': -1 - }, + {"name": "test", "action": "logger", "priority": -1}, # priority greater than max (19) - { - 'name': 'test', - 'action': 'logger', - 'priority': 20 - }, + {"name": "test", "action": "logger", "priority": 20}, # non-integer interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 3.14 - }, + {"name": "test", "action": "logger", "interval": 3.14}, # zero interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 0 - }, + {"name": "test", "action": "logger", "interval": 0}, # negative interval - { - 'name': 'test', - 'action': 'logger', - 'interval': -1 - }, + {"name": "test", "action": "logger", "interval": -1}, # can't interpret both absolute and relative stop { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25.0Z', - 'absolute_stop': '2018-03-16T17:12:35.0Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25.0Z", + "absolute_stop": "2018-03-16T17:12:35.0Z", + "relative_stop": 10, }, # 0 relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 0 - }, + {"name": "test", "action": "logger", "relative_stop": 0}, # negative relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': -10 - }, + {"name": "test", "action": "logger", "relative_stop": -10}, # non-integer relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 3.14 - }, + {"name": "test", "action": "logger", "relative_stop": 3.14}, # stop is before start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:30Z' + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:30Z", }, # stop is same as start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:35Z', - } - ]) + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:35Z", + }, + ], +) def test_invalid_user_entries(entry_json): serializer = ScheduleEntrySerializer(data=entry_json) assert not serializer.is_valid() @@ -301,93 +189,54 @@ def test_invalid_user_entries(entry_json): # Test that invalid admin input is invalid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # name is a required field - { - 'action': 'logger' - }, + {"action": "logger"}, # action is a required field - { - 'name': 'test' - }, + {"name": "test"}, # non-integer priority - { - 'name': 'test', - 'action': 'logger', - 'priority': 3.14 - }, + {"name": "test", "action": "logger", "priority": 3.14}, # priority less than min (for admin) - { - 'name': 'test', - 'action': 'logger', - 'priority': -21 - }, + {"name": "test", "action": "logger", "priority": -21}, # priority greater than max (19) - { - 'name': 'test', - 'action': 'logger', - 'priority': 20 - }, + {"name": "test", "action": "logger", "priority": 20}, # non-integer interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 3.14 - }, + {"name": "test", "action": "logger", "interval": 3.14}, # zero interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 0 - }, + {"name": "test", "action": "logger", "interval": 0}, # negative interval - { - 'name': 'test', - 'action': 'logger', - 'interval': -1 - }, + {"name": "test", "action": "logger", "interval": -1}, # can't interpret both absolute and relative stop { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25.0Z', - 'absolute_stop': '2018-03-16T17:12:35.0Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25.0Z", + "absolute_stop": "2018-03-16T17:12:35.0Z", + "relative_stop": 10, }, # 0 relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 0 - }, + {"name": "test", "action": "logger", "relative_stop": 0}, # negative relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': -10 - }, + {"name": "test", "action": "logger", "relative_stop": -10}, # non-integer relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 3.14 - }, + {"name": "test", "action": "logger", "relative_stop": 3.14}, # stop is before start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:30Z' + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:30Z", }, # stop is same as start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:35Z', + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:35Z", }, - ]) + ], +) def test_invalid_admin_entries(entry_json): serializer = AdminScheduleEntrySerializer(data=entry_json) assert not serializer.is_valid() @@ -400,35 +249,34 @@ def test_invalid_admin_entries(entry_json): def test_serialized_fields(user_client): """Certain fields on the schedule entry model should be serialized.""" - rjson = post_schedule(user_client, {'name': 'test', 'action': 'logger'}) + rjson = post_schedule(user_client, {"name": "test", "action": "logger"}) # nullable fields - assert 'interval' in rjson - assert 'callback_url' in rjson + assert "interval" in rjson + assert "callback_url" in rjson # non-nullable fields - assert rjson['name'] - assert rjson['action'] - assert rjson['priority'] is not None # potentially 0 - assert rjson['next_task_id'] + assert rjson["name"] + assert rjson["action"] + assert rjson["priority"] is not None # potentially 0 + assert rjson["next_task_id"] # nullable datetimes - assert rjson['start'] is None or parse_datetime_str(rjson['start']) - assert rjson['stop'] is None or parse_datetime_str(rjson['stop']) + assert rjson["start"] is None or parse_datetime_str(rjson["start"]) + assert rjson["stop"] is None or parse_datetime_str(rjson["stop"]) # datetimes - assert parse_datetime_str(rjson['created']) - assert parse_datetime_str(rjson['modified']) - assert parse_datetime_str(rjson['next_task_time']) + assert parse_datetime_str(rjson["created"]) + assert parse_datetime_str(rjson["modified"]) + assert parse_datetime_str(rjson["next_task_time"]) # booleans - assert rjson['is_active'] in {True, False} - assert rjson['is_private'] in {True, False} + assert rjson["is_active"] in {True, False} + assert rjson["is_private"] in {True, False} # links - assert rjson['self'] - assert rjson['owner'] - assert rjson['results'] - assert rjson['acquisitions'] + assert rjson["self"] + assert rjson["owner"] + assert rjson["task_results"] def test_non_serialized_fields(user_client): """Certain fields on the schedule entry model should not be serialized.""" - rjson = post_schedule(user_client, {'name': 'test', 'action': 'logger'}) + rjson = post_schedule(user_client, {"name": "test", "action": "logger"}) - assert 'relative_stop' not in rjson + assert "relative_stop" not in rjson diff --git a/src/schedule/tests/test_user_views.py b/src/schedule/tests/test_user_views.py index f9a3cf90..81bdf815 100644 --- a/src/schedule/tests/test_user_views.py +++ b/src/schedule/tests/test_user_views.py @@ -1,31 +1,35 @@ import pytest - from rest_framework import status from rest_framework.reverse import reverse -from schedule.tests.utils import (EMPTY_SCHEDULE_RESPONSE, TEST_SCHEDULE_ENTRY, - TEST_PRIVATE_SCHEDULE_ENTRY, - TEST_ALTERNATE_SCHEDULE_ENTRY, post_schedule, - update_schedule, reverse_detail_url) +from schedule.tests.utils import ( + EMPTY_SCHEDULE_RESPONSE, + TEST_ALTERNATE_SCHEDULE_ENTRY, + TEST_PRIVATE_SCHEDULE_ENTRY, + TEST_SCHEDULE_ENTRY, + post_schedule, + reverse_detail_url, + update_schedule, +) from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response def test_user_cannot_post_private_schedule(user_client): """Unpriveleged users should not be able to create private entries.""" rjson = post_schedule(user_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) - assert not rjson['is_private'] + assert not rjson["is_private"] validate_response(response, status.HTTP_200_OK) - assert not response.data['is_private'] + assert not response.data["is_private"] def test_user_can_view_non_private_user_entries(user_client, alt_user_client): # alt user schedule entry alt_user_rjson = post_schedule(alt_user_client, TEST_SCHEDULE_ENTRY) - alt_user_entry_name = alt_user_rjson['name'] + alt_user_entry_name = alt_user_rjson["name"] alt_user_entry_url = reverse_detail_url(alt_user_entry_name) response = user_client.get(alt_user_entry_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -34,7 +38,7 @@ def test_user_can_view_non_private_user_entries(user_client, alt_user_client): def test_user_can_view_non_private_admin_entries(admin_client, user_client): # admin user schedule entry admin_rjson = post_schedule(admin_client, TEST_ALTERNATE_SCHEDULE_ENTRY) - admin_entry_name = admin_rjson['name'] + admin_entry_name = admin_rjson["name"] admin_entry_url = reverse_detail_url(admin_entry_name) response = user_client.get(admin_entry_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -42,10 +46,10 @@ def test_user_can_view_non_private_admin_entries(admin_client, user_client): def test_user_cannot_view_private_entry_in_list(admin_client, user_client): post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) response = user_client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) - assert rjson['results'] == EMPTY_SCHEDULE_RESPONSE + assert rjson["results"] == EMPTY_SCHEDULE_RESPONSE def test_user_cannot_view_private_entry_details(admin_client, user_client): @@ -53,7 +57,7 @@ def test_user_cannot_view_private_entry_details(admin_client, user_client): # Private indicates admin wants users to be unaware that the entry exists # on the system, hence 404 vs 403 (FORBIDDEN). rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) @@ -61,7 +65,7 @@ def test_user_cannot_view_private_entry_details(admin_client, user_client): def test_user_can_delete_their_entry(user_client): rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) # First attempt to delete should return 204 @@ -73,23 +77,22 @@ def test_user_can_delete_their_entry(user_client): validate_response(response, status.HTTP_404_NOT_FOUND) -def test_user_cannot_delete_any_other_entry(admin_client, user_client, - alt_user_client): +def test_user_cannot_delete_any_other_entry(admin_client, user_client, alt_user_client): # alt user schedule entry alt_user_rjson = post_schedule(alt_user_client, TEST_SCHEDULE_ENTRY) - alt_user_entry_name = alt_user_rjson['name'] + alt_user_entry_name = alt_user_rjson["name"] alt_user_entry_url = reverse_detail_url(alt_user_entry_name) - user_delete_alt_user_response = user_client.delete(alt_user_entry_url, - **HTTPS_KWARG) + user_delete_alt_user_response = user_client.delete( + alt_user_entry_url, **HTTPS_KWARG + ) # admin user schedule entry admin_rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - admin_entry_name = admin_rjson['name'] + admin_entry_name = admin_rjson["name"] admin_entry_url = reverse_detail_url(admin_entry_name) - user_delete_admin_response = user_client.delete(admin_entry_url, - **HTTPS_KWARG) + user_delete_admin_response = user_client.delete(admin_entry_url, **HTTPS_KWARG) validate_response(user_delete_alt_user_response, status.HTTP_403_FORBIDDEN) # Admin's entry is private, hence 404 instead of 403 @@ -98,31 +101,33 @@ def test_user_cannot_delete_any_other_entry(admin_client, user_client, def test_user_can_modify_their_entry(user_client): rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] - user_adjust_response = update_schedule(user_client, entry_name, - TEST_ALTERNATE_SCHEDULE_ENTRY) + user_adjust_response = update_schedule( + user_client, entry_name, TEST_ALTERNATE_SCHEDULE_ENTRY + ) validate_response(user_adjust_response, status.HTTP_200_OK) - assert rjson['priority'] == 10 - assert user_adjust_response.data['priority'] == 5 + assert rjson["priority"] == 10 + assert user_adjust_response.data["priority"] == 5 -def test_user_cannot_modify_any_other_entry(admin_client, user_client, - alt_user_client): +def test_user_cannot_modify_any_other_entry(admin_client, user_client, alt_user_client): # alt user schedule entry alt_user_rjson = post_schedule(alt_user_client, TEST_SCHEDULE_ENTRY) - alt_user_entry_name = alt_user_rjson['name'] + alt_user_entry_name = alt_user_rjson["name"] user_adjust_alt_user_response = update_schedule( - user_client, alt_user_entry_name, TEST_PRIVATE_SCHEDULE_ENTRY) + user_client, alt_user_entry_name, TEST_PRIVATE_SCHEDULE_ENTRY + ) # admin user schedule entry admin_rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - admin_entry_name = admin_rjson['name'] + admin_entry_name = admin_rjson["name"] - user_adjust_admin_response = update_schedule(user_client, admin_entry_name, - TEST_SCHEDULE_ENTRY) + user_adjust_admin_response = update_schedule( + user_client, admin_entry_name, TEST_SCHEDULE_ENTRY + ) validate_response(user_adjust_alt_user_response, status.HTTP_403_FORBIDDEN) # Admin's entry is private, hence 404 instead of 403 @@ -132,7 +137,7 @@ def test_user_cannot_modify_any_other_entry(admin_client, user_client, def test_user_cannot_use_negative_priority(user_client): """Unpriveleged users should not be able to use "high" priority.""" hipri = TEST_SCHEDULE_ENTRY.copy() - hipri['priority'] = -20 + hipri["priority"] = -20 with pytest.raises(AssertionError): post_schedule(user_client, hipri) @@ -141,12 +146,12 @@ def test_validate_only_does_not_modify_schedule_with_good_entry(user_client): """A good entry with validate_only should return 200 only.""" # Ensure that a 200 "OK" is returned from the validator entry = TEST_SCHEDULE_ENTRY.copy() - entry['validate_only'] = True + entry["validate_only"] = True expected_status = status.HTTP_204_NO_CONTENT post_schedule(user_client, entry, expected_status=expected_status) # Ensure that the entry didn't make it into the schedule - entry_name = entry['name'] + entry_name = entry["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) @@ -156,12 +161,12 @@ def test_validate_only_does_not_modify_schedule_with_bad_entry(user_client): """A bad entry with validate_only should return 400 only.""" # Ensure that a 400 "BAD REQUEST" is returned from the validator entry = TEST_SCHEDULE_ENTRY.copy() - entry['interval'] = 1.5 # non-integer interval is invalid - entry['validate_only'] = True + entry["interval"] = 1.5 # non-integer interval is invalid + entry["validate_only"] = True expected_status = status.HTTP_400_BAD_REQUEST post_schedule(user_client, entry, expected_status=expected_status) # Ensure that the entry didn't make it into the schedule - url = reverse_detail_url(entry['name']) + url = reverse_detail_url(entry["name"]) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) diff --git a/src/schedule/tests/test_views.py b/src/schedule/tests/test_views.py index 8b6a8615..e1bd9598 100644 --- a/src/schedule/tests/test_views.py +++ b/src/schedule/tests/test_views.py @@ -1,17 +1,21 @@ from rest_framework import status from rest_framework.reverse import reverse -from acquisitions.tests.utils import simulate_acquisitions -from schedule.tests.utils import (EMPTY_SCHEDULE_RESPONSE, TEST_SCHEDULE_ENTRY, - TEST_PRIVATE_SCHEDULE_ENTRY, post_schedule, - reverse_detail_url) +from schedule.tests.utils import ( + EMPTY_SCHEDULE_RESPONSE, + TEST_PRIVATE_SCHEDULE_ENTRY, + TEST_SCHEDULE_ENTRY, + post_schedule, + reverse_detail_url, +) from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response +from tasks.tests.utils import simulate_acquisitions def test_entry_posted_to_schedule_is_immediately_available(user_client): rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) user_response = user_client.get(entry_url, **HTTPS_KWARG) @@ -24,9 +28,9 @@ def test_entry_posted_to_schedule_is_immediately_available(user_client): def test_post_unknown_field_to_schedule(user_client): """Unknown fields in a schedule entry should be ignored.""" entry_json = TEST_SCHEDULE_ENTRY.copy() - entry_json['nonsense'] = True + entry_json["nonsense"] = True rjson = post_schedule(user_client, entry_json) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) response = user_client.get(entry_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -34,13 +38,13 @@ def test_post_unknown_field_to_schedule(user_client): for k, v in TEST_SCHEDULE_ENTRY.items(): assert rjson[k] == v - assert 'nonsense' not in rjson - assert 'nonsense' not in response.data + assert "nonsense" not in rjson + assert "nonsense" not in response.data def test_private_schedule_entry_is_private(admin_client, user_client): rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) user_response = user_client.get(entry_url, **HTTPS_KWARG) admin_user_response = admin_client.get(entry_url, **HTTPS_KWARG) @@ -50,22 +54,22 @@ def test_private_schedule_entry_is_private(admin_client, user_client): def test_get_schedule(user_client): - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) rjson = validate_response(user_client.get(url, **HTTPS_KWARG)) - assert rjson['results'] == EMPTY_SCHEDULE_RESPONSE + assert rjson["results"] == EMPTY_SCHEDULE_RESPONSE post_schedule(user_client, TEST_SCHEDULE_ENTRY) rjson = validate_response(user_client.get(url, **HTTPS_KWARG)) - assert rjson['count'] == 1 + assert rjson["count"] == 1 - expected_name = TEST_SCHEDULE_ENTRY['name'] - actual_name = rjson['results'][0]['name'] + expected_name = TEST_SCHEDULE_ENTRY["name"] + actual_name = rjson["results"][0]["name"] assert expected_name == actual_name def test_get_nonexistent_entry_details_returns_404(user_client): """Requesting details of non-existent entry should return 404.""" - url = reverse_detail_url('doesntexist') + url = reverse_detail_url("doesntexist") response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) @@ -73,7 +77,7 @@ def test_get_nonexistent_entry_details_returns_404(user_client): def test_get_existing_entry_details_returns_200(user_client): """Requesting details of existing entry should return 200.""" rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -87,7 +91,7 @@ def test_delete_entry_with_acquisitions_fails(user_client, test_scheduler): rjson = validate_response(response, status.HTTP_400_BAD_REQUEST) expected_status = status.HTTP_204_NO_CONTENT - for acq_url in rjson['protected_objects']: + for acq_url in rjson["protected_objects"]: response = user_client.delete(acq_url, **HTTPS_KWARG) validate_response(response, expected_status) diff --git a/src/schedule/tests/utils.py b/src/schedule/tests/utils.py index d033ba9e..fc6834e2 100644 --- a/src/schedule/tests/utils.py +++ b/src/schedule/tests/utils.py @@ -8,31 +8,31 @@ EMPTY_SCHEDULE_RESPONSE = [] -TEST_SCHEDULE_ENTRY = {'name': 'test', 'action': 'logger', 'is_private': False} +TEST_SCHEDULE_ENTRY = {"name": "test", "action": "logger", "is_private": False} TEST_ALTERNATE_SCHEDULE_ENTRY = { - 'name': 'test_alternate', - 'action': 'logger', - 'is_private': False, - 'priority': 5 + "name": "test_alternate", + "action": "logger", + "is_private": False, + "priority": 5, } TEST_PRIVATE_SCHEDULE_ENTRY = { - 'name': 'test_private', - 'action': 'logger', - 'is_private': True + "name": "test_private", + "action": "logger", + "is_private": True, } def post_schedule(client, entry, expected_status=status.HTTP_201_CREATED): kwargs = { - 'data': json.dumps(entry), - 'content_type': 'application/json', - 'secure': True, - 'wsgi.url_scheme': 'https' + "data": json.dumps(entry), + "content_type": "application/json", + "secure": True, + "wsgi.url_scheme": "https", } - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) r = client.post(url, **kwargs) err = "Got status {}, expected {}".format(r.status_code, expected_status) @@ -48,19 +48,19 @@ def update_schedule(client, entry_name, new_entry): url = reverse_detail_url(entry_name) kwargs = { - 'data': json.dumps(new_entry), - 'content_type': 'application/json', - 'secure': True, - 'wsgi.url_scheme': 'https' + "data": json.dumps(new_entry), + "content_type": "application/json", + "secure": True, + "wsgi.url_scheme": "https", } return client.put(url, **kwargs) def reverse_detail_url(entry_name): - kws = {'pk': entry_name} + kws = {"pk": entry_name} kws.update(V1) - url = reverse('schedule-detail', kwargs=kws) + url = reverse("schedule-detail", kwargs=kws) return url diff --git a/src/schedule/urls.py b/src/schedule/urls.py index 5467cad1..d4153d14 100644 --- a/src/schedule/urls.py +++ b/src/schedule/urls.py @@ -3,6 +3,6 @@ from .views import ScheduleEntryViewSet router = SimpleRouter() -router.register('', ScheduleEntryViewSet, basename='schedule') +router.register("", ScheduleEntryViewSet, basename="schedule") urlpatterns = router.urls diff --git a/src/schedule/views.py b/src/schedule/views.py index 45de8fa4..447376f4 100644 --- a/src/schedule/views.py +++ b/src/schedule/views.py @@ -1,11 +1,11 @@ -from rest_framework import status, filters +from rest_framework import filters, status from rest_framework.response import Response from rest_framework.settings import api_settings from rest_framework.viewsets import ModelViewSet -from .models import ScheduleEntry, Request +from .models import Request, ScheduleEntry from .permissions import IsAdminOrOwnerOrReadOnly -from .serializers import ScheduleEntrySerializer, AdminScheduleEntrySerializer +from .serializers import AdminScheduleEntrySerializer, ScheduleEntrySerializer class ScheduleEntryViewSet(ModelViewSet): @@ -30,15 +30,15 @@ class ScheduleEntryViewSet(ModelViewSet): Deletes the specified schedule entry. """ + queryset = ScheduleEntry.objects.all() permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ - IsAdminOrOwnerOrReadOnly, + IsAdminOrOwnerOrReadOnly ] filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('priority', 'start', 'next_task_time', 'created', - 'modified') - search_fields = ('name', 'action') + lookup_fields = ("schedule_entry__name", "task_id") + ordering_fields = ("priority", "start", "next_task_time", "created", "modified") + search_fields = ("name", "action") def create(self, request, *args, **kwargs): """Return NO CONTENT when input is valid but validate_only is True.""" @@ -47,7 +47,7 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) - if serializer.validated_data.get('validate_only'): + if serializer.validated_data.get("validate_only"): return Response(status=status.HTTP_204_NO_CONTENT) self.perform_create(serializer) @@ -73,7 +73,7 @@ def get_queryset(self): def get_serializer_class(self): """Modify the base serializer based on user and request.""" - updating = self.action in {'update', 'partial_update'} + updating = self.action in {"update", "partial_update"} if self.request.user.is_staff: SerializerBaseClass = AdminScheduleEntrySerializer @@ -83,9 +83,9 @@ def get_serializer_class(self): ro_fields = SerializerBaseClass.Meta.read_only_fields if updating: - ro_fields += ('name', 'action') + ro_fields += ("name", "action") else: - ro_fields += ('is_active', ) + ro_fields += ("is_active",) class SerializerClass(SerializerBaseClass): class Meta(SerializerBaseClass.Meta): diff --git a/src/scheduler/apps.py b/src/scheduler/apps.py index 60fb286a..d30957b6 100644 --- a/src/scheduler/apps.py +++ b/src/scheduler/apps.py @@ -2,4 +2,4 @@ class SchedulerConfig(AppConfig): - name = 'scheduler' + name = "scheduler" diff --git a/src/scheduler/scheduler.py b/src/scheduler/scheduler.py index d49995dd..f5dc49ba 100644 --- a/src/scheduler/scheduler.py +++ b/src/scheduler/scheduler.py @@ -8,13 +8,14 @@ from django.utils import timezone from requests_futures.sessions import FuturesSession -from results.consts import MAX_DETAIL_LEN -from results.models import TaskResult -from results.serializers import TaskResultSerializer from schedule.models import ScheduleEntry from sensor import settings +from tasks.consts import MAX_DETAIL_LEN +from tasks.models import TaskResult +from tasks.serializers import TaskResultSerializer +from tasks.task_queue import TaskQueue + from . import utils -from .tasks import TaskQueue logger = logging.getLogger(__name__) requests_futures_session = FuturesSession() @@ -34,10 +35,15 @@ def __init__(self): # scheduler looks ahead `interval_multiplier` times the shortest # interval in the schedule in order to keep memory-usage low self.interval_multiplier = 10 - self.name = 'Scheduler' + self.name = "Scheduler" self.running = False self.interrupt_flag = threading.Event() + # Cache the currently running task state + self.entry = None # ScheduleEntry that created the current task + self.task = None # Task object describing current task + self.task_result = None # TaskResult object for current task + @property def schedule(self): """An updated view of the current schedule""" @@ -52,7 +58,7 @@ def schedule_has_entries(self): def cancel(entry): """Remove an entry from the scheduler without deleting it.""" entry.is_active = False - entry.save(update_fields=('is_active', )) + entry.save(update_fields=("is_active",)) def stop(self): """Complete the current task, then return control.""" @@ -113,50 +119,53 @@ def _queue_tasks(self, schedule_snapshot): def _consume_task_queue(self, pending_task_queue): for task in pending_task_queue.to_list(): - result, started, finished, detail = self._call_task_action(task) - self._save_task_result(task, started, finished, result, detail) - - def _call_task_action(self, task): - entry_name = task.schedule_entry_name - task_id = task.task_id - started = timezone.now() + entry_name = task.schedule_entry_name + self.task = task + self.entry = ScheduleEntry.objects.get(name=entry_name) + self._initialize_task_result() + started = timezone.now() + status, detail = self._call_task_action() + finished = timezone.now() + self._finalize_task_result(started, finished, status, detail) + + def _initialize_task_result(self): + """Initalize an 'in-progress' result so it exists when action runs.""" + tid = self.task.task_id + self.task_result = TaskResult(schedule_entry=self.entry, task_id=tid) + self.task_result.save() + + def _call_task_action(self): + entry_name = self.task.schedule_entry_name + task_id = self.task.task_id try: logger.debug("running task {}/{}".format(entry_name, task_id)) - detail = task.action_fn(entry_name, task_id) + detail = self.task.action_fn(entry_name, task_id) self.delayfn(0) # let other threads run - result = 'success' + status = "success" if not isinstance(detail, str): detail = "" except Exception as err: detail = str(err) logger.exception("action failed: {}".format(detail)) - result = 'failure' - - finished = timezone.now() - - return result, started, finished, detail[:MAX_DETAIL_LEN] + status = "failure" - def _save_task_result(self, task, started, finished, result, detail): - entry_name = task.schedule_entry_name - entry = ScheduleEntry.objects.get(name=entry_name) - task_id = task.task_id + return status, detail[:MAX_DETAIL_LEN] - tr = TaskResult( - schedule_entry=entry, - task_id=task_id, - started=started, - finished=finished, - duration=(finished - started), - result=result, - detail=detail) + def _finalize_task_result(self, started, finished, status, detail): + tr = self.task_result + tr.started = started + tr.finished = finished + tr.duration = finished - started + tr.status = status + tr.detail = detail tr.save() - if entry.callback_url: - context = {'request': entry.request} + if self.entry.callback_url: + context = {"request": self.entry.request} result_json = TaskResultSerializer(tr, context=context).data requests_futures_session.post( - entry.callback_url, + self.entry.callback_url, json=result_json, background_callback=self._callback_response_handler, ) @@ -178,7 +187,7 @@ def _queue_pending_tasks(self, schedule_snapshot): continue task_id = entry.get_next_task_id() - entry.save(update_fields=('next_task_id', )) + entry.save(update_fields=("next_task_id",)) pri = entry.priority action = entry.action pending_queue.enter(task_time, pri, action, entry.name, task_id) @@ -187,7 +196,7 @@ def _queue_pending_tasks(self, schedule_snapshot): def _take_pending_task_time(self, entry): task_times = entry.take_pending() - entry.save(update_fields=('next_task_time', 'is_active')) + entry.save(update_fields=("next_task_time", "is_active")) if not task_times: return None @@ -240,12 +249,12 @@ def _cancel_if_completed(self, entry): @property def status(self): if self.is_alive(): - return 'running' if self.running else 'idle' - return 'dead' + return "running" if self.running else "idle" + return "dead" def __repr__(self): - s = 'running' if self.running else 'stopped' - return '<{} status={}>'.format(self.__class__.__name__, s) + s = "running" if self.running else "stopped" + return "<{} status={}>".format(self.__class__.__name__, s) @contextmanager diff --git a/src/scheduler/tasks/__init__.py b/src/scheduler/tasks/__init__.py deleted file mode 100644 index d3b5e56d..00000000 --- a/src/scheduler/tasks/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# flake8: noqa F401 - imported but unused - -from .task import Task -from .task_queue import TaskQueue diff --git a/src/scheduler/tests/test_scheduler.py b/src/scheduler/tests/test_scheduler.py index cc7ed792..98fcd183 100644 --- a/src/scheduler/tests/test_scheduler.py +++ b/src/scheduler/tests/test_scheduler.py @@ -1,18 +1,24 @@ -import time import threading +import time import pytest import requests_mock from scheduler.scheduler import Scheduler, minimum_duration -from .utils import (BAD_ACTION_STR, advance_testclock, create_action, - create_bad_action, create_entry) + +from .utils import ( + BAD_ACTION_STR, + advance_testclock, + create_action, + create_bad_action, + create_entry, +) @pytest.mark.django_db def test_populate_queue(test_scheduler): """An entry in the schedule should be added to a read-only task queue.""" - create_entry('test', 1, 0, 5, 1, 'logger') + create_entry("test", 1, 0, 5, 1, "logger") s = test_scheduler s.run(blocking=False) # now=0, so task with time 0 is run assert [e.time for e in s.task_queue] == [1, 2, 3, 4] @@ -23,8 +29,8 @@ def test_priority(test_scheduler): """A task with lower priority number should sort higher in task queue.""" lopri = 20 hipri = 10 - create_entry('lopri', lopri, 0, 5, 1, 'logger') - create_entry('hipri', hipri, 0, 5, 1, 'logger') + create_entry("lopri", lopri, 0, 5, 1, "logger") + create_entry("hipri", hipri, 0, 5, 1, "logger") s = test_scheduler s.run(blocking=False) q = s.task_queue.to_list() @@ -36,7 +42,7 @@ def test_priority(test_scheduler): @pytest.mark.django_db def test_future_start(test_scheduler): """An entry with start time in future should remain in schedule.""" - create_entry('t', 1, 50, 100, 1, 'logger') + create_entry("t", 1, 50, 100, 1, "logger") test_scheduler.run(blocking=False) s = test_scheduler assert len(s.task_queue) == 0 @@ -51,7 +57,7 @@ def test_calls_actions(test_scheduler): test_actions = dict(create_action() for _ in range(3)) for i, cb in enumerate(test_actions): - create_entry('test' + str(i), 1, 0, 3, 1, cb.__name__) + create_entry("test" + str(i), 1, 0, 3, 1, cb.__name__) s = test_scheduler s.run(blocking=False) @@ -66,11 +72,11 @@ def test_calls_actions(test_scheduler): @pytest.mark.django_db def test_add_entry(test_scheduler): """Creating a new entry instance adds it to the current schedule.""" - create_entry('t1', 10, 1, 100, 5, 'logger') + create_entry("t1", 10, 1, 100, 5, "logger") s = test_scheduler s.run(blocking=False) advance_testclock(s.timefn, 49) - create_entry('t2', 20, 50, 300, 5, 'logger') + create_entry("t2", 20, 50, 300, 5, "logger") s.run(blocking=False) assert len(s.task_queue) == 20 assert s.task_queue[0].priority == 20 @@ -79,8 +85,8 @@ def test_add_entry(test_scheduler): @pytest.mark.django_db def test_remove_entry_by_delete(test_scheduler): """An entry is removed from schedule if it's deleted.""" - e1 = create_entry('t1', 10, 1, 300, 5, 'logger') - e2 = create_entry('t2', 20, 50, 300, 5, 'logger') + e1 = create_entry("t1", 10, 1, 300, 5, "logger") + e2 = create_entry("t2", 20, 50, 300, 5, "logger") s = test_scheduler s.run(blocking=False) advance_testclock(s.timefn, 10) @@ -93,8 +99,8 @@ def test_remove_entry_by_delete(test_scheduler): @pytest.mark.django_db def test_remove_entry_by_cancel(test_scheduler): """scheduler.cancel removes an entry from schedule without deleting it.""" - e1 = create_entry('t1', 10, 1, 300, 5, 'logger') - e2 = create_entry('t2', 20, 50, 300, 5, 'logger') + e1 = create_entry("t1", 10, 1, 300, 5, "logger") + e2 = create_entry("t2", 20, 50, 300, 5, "logger") s = test_scheduler s.run(blocking=False) advance_testclock(s.timefn, 10) @@ -107,7 +113,7 @@ def test_remove_entry_by_cancel(test_scheduler): @pytest.mark.django_db def test_start_stop(test_scheduler): """Calling stop on started scheduler thread should cause thread exit.""" - create_entry('t', 1, 1, 100, 5, 'logger') + create_entry("t", 1, 1, 100, 5, "logger") s = test_scheduler s.start() time.sleep(0.02) # hit minimum_duration @@ -121,7 +127,7 @@ def test_start_stop(test_scheduler): @pytest.mark.django_db def test_run_completes(test_scheduler): """The scheduler should return to idle state after schedule completes.""" - create_entry('t', 1, None, None, None, 'logger') + create_entry("t", 1, None, None, None, "logger") s = test_scheduler s.start() time.sleep(0.1) # hit minimum_duration @@ -136,10 +142,10 @@ def test_run_completes(test_scheduler): def test_survives_failed_action(test_scheduler): """An action throwing an exception should be survivable.""" cb1 = create_bad_action() - create_entry('t1', 10, None, None, None, cb1.__name__) + create_entry("t1", 10, None, None, None, cb1.__name__) cb2, flag = create_action() # less priority to force run after bad_entry fails - create_entry('t2', 20, None, None, None, cb2.__name__) + create_entry("t2", 20, None, None, None, cb2.__name__) s = test_scheduler advance_testclock(s.timefn, 1) assert not flag.is_set() @@ -150,7 +156,7 @@ def test_survives_failed_action(test_scheduler): @pytest.mark.django_db def test_compress_past_times(test_scheduler): """Multiple task times in the past should be compressed to one.""" - create_entry('t', 1, -10, 5, 1, 'logger') + create_entry("t", 1, -10, 5, 1, "logger") s = test_scheduler s.run(blocking=False) # past times -10 through 0 are compressed and a single task is run, @@ -161,7 +167,7 @@ def test_compress_past_times(test_scheduler): @pytest.mark.django_db def test_compress_past_times_offset(test_scheduler): """Multiple task times in the past should be compressed to one.""" - create_entry('t', 1, -2, 14, 4, 'logger') + create_entry("t", 1, -2, 14, 4, "logger") s = test_scheduler s.run(blocking=False) # past time -2 is run, then 2, 6, and 10 are queued @@ -173,7 +179,7 @@ def test_compress_past_times_offset(test_scheduler): @pytest.mark.django_db def test_next_task_time_value_when_start_changes(test_scheduler): """When an entry's start value changes, update `next_task_time`.""" - entry = create_entry('t', 1, 1, 10, 1, 'logger') + entry = create_entry("t", 1, 1, 10, 1, "logger") s = test_scheduler s.run(blocking=False) assert entry.next_task_time == 1 @@ -209,7 +215,7 @@ def test_next_task_time_value_when_start_changes(test_scheduler): @pytest.mark.django_db def test_next_task_time_value_when_interval_changes(test_scheduler): """When an entry's interval value changes, update `next_task_time`.""" - entry = create_entry('t', 1, 1, 100, 1, 'logger') + entry = create_entry("t", 1, 1, 100, 1, "logger") s = test_scheduler s.run(blocking=False) assert entry.next_task_time == 1 @@ -237,7 +243,7 @@ def test_next_task_time_value_when_interval_changes(test_scheduler): @pytest.mark.django_db def test_one_shot(test_scheduler): """If no start or interval given, entry should be run once and removed.""" - create_entry('t', 1, None, None, None, 'logger') + create_entry("t", 1, None, None, None, "logger") s = test_scheduler advance_testclock(s.timefn, 1) s.run(blocking=False) @@ -248,7 +254,7 @@ def test_one_shot(test_scheduler): @pytest.mark.django_db def test_task_queue(test_scheduler): """The scheduler should maintain a queue of upcoming tasks.""" - e = create_entry('t', 1, 1, 100, 5, 'logger') + e = create_entry("t", 1, 1, 100, 5, "logger") s = test_scheduler # upcoming tasks are queued @@ -273,7 +279,7 @@ def test_task_queue(test_scheduler): @pytest.mark.django_db def test_clearing_schedule_clears_task_queue(test_scheduler): """The scheduler should empty task_queue when schedule is deleted.""" - create_entry('t', 1, 1, 100, 5, 'logger') + create_entry("t", 1, 1, 100, 5, "logger") s = test_scheduler s.run(blocking=False) # queue first 10 tasks assert len(s.task_queue) == 10 @@ -314,7 +320,7 @@ def cb_request_handler(sess, resp): cb_flag.set() cb = create_bad_action() - create_entry('t', 10, None, None, None, cb.__name__, 'mock://cburl') + create_entry("t", 10, None, None, None, cb.__name__, "mock://cburl") s = test_scheduler advance_testclock(s.timefn, 1) s._callback_response_handler = cb_request_handler @@ -323,19 +329,19 @@ def cb_request_handler(sess, resp): request_json = None with requests_mock.Mocker() as m: - m.post('mock://cburl') # register url for posting + m.post("mock://cburl") # register url for posting s.run(blocking=False) time.sleep(0.1) # let requests thread run request_json = m.request_history[0].json() assert cb_flag.is_set() - assert request_json['result'] == 'failure' - assert request_json['task_id'] == 1 - assert request_json['self'] - assert request_json['detail'] == BAD_ACTION_STR - assert request_json['started'] - assert request_json['finished'] - assert request_json['duration'] + assert request_json["status"] == "failure" + assert request_json["task_id"] == 1 + assert request_json["self"] + assert request_json["detail"] == BAD_ACTION_STR + assert request_json["started"] + assert request_json["finished"] + assert request_json["duration"] @pytest.mark.django_db @@ -348,7 +354,7 @@ def cb_request_handler(sess, resp): cb, action_flag = create_action() # less priority to force run after bad_entry fails - create_entry('t', 20, None, None, None, cb.__name__, 'mock://cburl') + create_entry("t", 20, None, None, None, cb.__name__, "mock://cburl") s = test_scheduler advance_testclock(s.timefn, 1) s._callback_response_handler = cb_request_handler @@ -357,19 +363,19 @@ def cb_request_handler(sess, resp): request_json = None with requests_mock.Mocker() as m: - m.post('mock://cburl') # register mock url for posting + m.post("mock://cburl") # register mock url for posting s.run(blocking=False) time.sleep(0.1) # let requests thread run request_json = m.request_history[0].json() assert cb_flag.is_set() assert action_flag.is_set() - assert request_json['result'] == 'success' - assert request_json['task_id'] == 1 - assert request_json['self'] - assert request_json['started'] - assert request_json['finished'] - assert request_json['duration'] + assert request_json["status"] == "success" + assert request_json["task_id"] == 1 + assert request_json["self"] + assert request_json["started"] + assert request_json["finished"] + assert request_json["duration"] @pytest.mark.django_db @@ -377,10 +383,10 @@ def test_starvation(test_scheduler): """A recurring high-pri task should not 'starve' a low-pri task.""" # higher-pri recurring task that takes 3 ticks to complete enters at t=0 cb0, flag0 = create_action() - create_entry('t0', 10, None, None, 3, cb0.__name__) + create_entry("t0", 10, None, None, 3, cb0.__name__) # lower-pri task enters at t=2 cb1, flag1 = create_action() - create_entry('t1', 20, 2, None, None, cb1.__name__) + create_entry("t1", 20, 2, None, None, cb1.__name__) s = test_scheduler s.run(blocking=False) assert not flag1.is_set() @@ -399,7 +405,7 @@ def test_task_pushed_past_stop_still_runs(test_scheduler): """A task pushed past `stop` by a long running task should still run.""" # create an entry that runs at time 1 and 2 cb0, flag0 = create_action() - create_entry('t0', 10, 1, 3, 1, cb0.__name__) + create_entry("t0", 10, 1, 3, 1, cb0.__name__) s = test_scheduler s.run(blocking=False) diff --git a/src/scheduler/tests/utils.py b/src/scheduler/tests/utils.py index 94ed5dcf..55045031 100644 --- a/src/scheduler/tests/utils.py +++ b/src/scheduler/tests/utils.py @@ -62,19 +62,19 @@ def simulate_scheduler_run(n=1): def create_entry(name, priority, start, stop, interval, action, cb_url=None): kwargs = { - 'name': name, - 'priority': priority, - 'stop': stop, - 'interval': interval, - 'action': action, - 'owner': User.objects.get_or_create(username='test')[0], + "name": name, + "priority": priority, + "stop": stop, + "interval": interval, + "action": action, + "owner": User.objects.get_or_create(username="test")[0], } if start is not None: - kwargs['start'] = start + kwargs["start"] = start if cb_url is not None: - kwargs['callback_url'] = cb_url + kwargs["callback_url"] = cb_url return ScheduleEntry.objects.create(**kwargs) @@ -94,9 +94,10 @@ def cb(entry, task_id): flag.set() return "set flag" - cb.__name__ = 'testcb' + str(create_action.counter) + cb.__name__ = "testcb" + str(create_action.counter) actions.by_name[cb.__name__] = cb create_action.counter += 1 + return cb, flag @@ -107,7 +108,7 @@ def create_bad_action(): def bad_action(entry, task_id): raise Exception(BAD_ACTION_STR) - actions.by_name['bad_action'] = bad_action + actions.by_name["bad_action"] = bad_action return bad_action diff --git a/src/sensor/__init__.py b/src/sensor/__init__.py index de3f9da8..2e233ca1 100644 --- a/src/sensor/__init__.py +++ b/src/sensor/__init__.py @@ -1,3 +1,3 @@ # API versions -V1 = {'version': 'v1'} +V1 = {"version": "v1"} diff --git a/src/sensor/apps.py b/src/sensor/apps.py index c9eb2ec7..37eff209 100644 --- a/src/sensor/apps.py +++ b/src/sensor/apps.py @@ -2,4 +2,4 @@ class SensorConfig(AppConfig): - name = 'sensor' + name = "sensor" diff --git a/src/sensor/exceptions.py b/src/sensor/exceptions.py index c05699d1..9e35e7c9 100644 --- a/src/sensor/exceptions.py +++ b/src/sensor/exceptions.py @@ -1,7 +1,5 @@ """Provides custom exception handing.""" -from __future__ import absolute_import - import logging from django import db @@ -24,10 +22,7 @@ def exception_handler(exc, context): if isinstance(exc, ProtectedError): response = handle_protected_error(exc, context) elif isinstance(exc, db.IntegrityError): - response = Response({ - 'detail': str(exc) - }, - status=status.HTTP_409_CONFLICT) + response = Response({"detail": str(exc)}, status=status.HTTP_409_CONFLICT) else: logger.exception("Caught unhandled exception", exc_info=exc) @@ -35,30 +30,31 @@ def exception_handler(exc, context): def handle_protected_error(exc, context): - if 'name' in context['kwargs']: - entry_name = context['kwargs']['name'] + if "name" in context["kwargs"]: + entry_name = context["kwargs"]["name"] else: - entry_name = context['kwargs']['pk'] + entry_name = context["kwargs"]["pk"] - request = context['request'] + request = context["request"] + view_name = "task-result-detail" protected_object_urls = [] for protected_object in exc.protected_objects: task_id = protected_object.task_id - url_kwargs = {'schedule_entry_name': entry_name, 'task_id': task_id} + url_kwargs = {"schedule_entry_name": entry_name, "task_id": task_id} url_kwargs.update(V1) - view_name = 'acquisition-detail' url = reverse(view_name, kwargs=url_kwargs, request=request) protected_object_urls.append(url) - response = Response({ - 'detail': - ("Cannot delete schedule entry {!r} because acquisitions on disk " - "reference it. Delete the protected acquisitions first." - ).format(entry_name), - 'protected_objects': - protected_object_urls - }, - status=status.HTTP_400_BAD_REQUEST) + response = Response( + { + "detail": ( + "Cannot delete schedule entry {!r} because results on disk " + "reference it. Delete the protected results first." + ).format(entry_name), + "protected_objects": protected_object_urls, + }, + status=status.HTTP_400_BAD_REQUEST, + ) return response diff --git a/src/sensor/settings.py b/src/sensor/settings.py index 4a6f63a3..262f075f 100644 --- a/src/sensor/settings.py +++ b/src/sensor/settings.py @@ -20,47 +20,47 @@ BASE_DIR = path.dirname(path.dirname(path.abspath(__file__))) REPO_ROOT = path.dirname(BASE_DIR) -FQDN = environ.get('FQDN', 'fqdn.unset') +FQDN = environ.get("FQDN", "fqdn.unset") -DOCKER_TAG = environ.get('DOCKER_TAG') -GIT_BRANCH = environ.get('GIT_BRANCH') -if not DOCKER_TAG or DOCKER_TAG == 'latest': +DOCKER_TAG = environ.get("DOCKER_TAG") +GIT_BRANCH = environ.get("GIT_BRANCH") +if not DOCKER_TAG or DOCKER_TAG == "latest": VERSION_STRING = GIT_BRANCH else: VERSION_STRING = DOCKER_TAG - if VERSION_STRING.startswith('v'): + if VERSION_STRING.startswith("v"): VERSION_STRING = VERSION_STRING[1:] -STATIC_ROOT = path.join(BASE_DIR, 'static') -STATIC_URL = '/static/' +STATIC_ROOT = path.join(BASE_DIR, "static") +STATIC_URL = "/static/" STATICFILES_DIRS = ( - ('js', path.join(STATIC_ROOT, 'js')), - ('css', path.join(STATIC_ROOT, 'css')), - ('images', path.join(STATIC_ROOT, 'images')), - ('fonts', path.join(STATIC_ROOT, 'fonts')), + ("js", path.join(STATIC_ROOT, "js")), + ("css", path.join(STATIC_ROOT, "css")), + ("images", path.join(STATIC_ROOT, "images")), + ("fonts", path.join(STATIC_ROOT, "fonts")), ) __cmd = path.split(sys.argv[0])[-1] -IN_DOCKER = bool(environ.get('IN_DOCKER')) -RUNNING_TESTS = 'test' in __cmd -RUNNING_DEMO = bool(environ.get('DEMO')) -MOCK_RADIO = bool(environ.get('MOCK_RADIO')) -MOCK_RADIO_RANDOM = bool(environ.get('MOCK_RADIO_RANDOM')) +IN_DOCKER = bool(environ.get("IN_DOCKER")) +RUNNING_TESTS = "test" in __cmd +RUNNING_DEMO = bool(environ.get("DEMO")) +MOCK_RADIO = bool(environ.get("MOCK_RADIO")) or RUNNING_DEMO or RUNNING_TESTS +MOCK_RADIO_RANDOM = bool(environ.get("MOCK_RADIO_RANDOM")) # Healthchecks - the existance of any of these indicates an unhealthy state -SDR_HEALTHCHECK_FILE = path.join(REPO_ROOT, 'sdr_unhealthy') -SCHEDULER_HEALTHCHECK_FILE = path.join(REPO_ROOT, 'scheduler_dead') +SDR_HEALTHCHECK_FILE = path.join(REPO_ROOT, "sdr_unhealthy") +SCHEDULER_HEALTHCHECK_FILE = path.join(REPO_ROOT, "scheduler_dead") LICENSE_URL = "https://github.com/NTIA/scos-sensor/blob/master/LICENSE.md" -OPENAPI_FILE = path.join(REPO_ROOT, 'docs', 'openapi.json') +OPENAPI_FILE = path.join(REPO_ROOT, "docs", "openapi.json") -CONFIG_DIR = path.join(REPO_ROOT, 'configs') +CONFIG_DIR = path.join(REPO_ROOT, "configs") # JSON configs -SCALE_FACTORS_FILE = path.join(CONFIG_DIR, 'scale_factors.json') -SENSOR_DEFINITION_FILE = path.join(CONFIG_DIR, 'sensor_definition.json') -ACTION_DEFINITIONS_DIR = path.join(CONFIG_DIR, 'actions') +SCALE_FACTORS_FILE = path.join(CONFIG_DIR, "scale_factors.json") +SENSOR_DEFINITION_FILE = path.join(CONFIG_DIR, "sensor_definition.json") +ACTION_DEFINITIONS_DIR = path.join(CONFIG_DIR, "actions") # Cleanup any existing healtcheck files try: @@ -69,7 +69,7 @@ pass # As defined in SigMF -DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' +DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" # https://docs.djangoproject.com/en/2.2/ref/settings/#internal-ips If # IN_DOCKER, the IP address that needs to go here to enable the debugging @@ -77,19 +77,19 @@ # possible to extract the correct address from an incoming request, so if # IN_DOCKER and DEBUG=true, then the `api_v1_root` view will insert the correct # IP when the first request comes in. -INTERNAL_IPS = ['127.0.0.1'] +INTERNAL_IPS = ["127.0.0.1"] # See /env.template if not IN_DOCKER or RUNNING_TESTS: - SECRET_KEY = '!j1&*$wnrkrtc-74cc7_^#n6r3om$6s#!fy=zkd_xp(gkikl+8' + SECRET_KEY = "!j1&*$wnrkrtc-74cc7_^#n6r3om$6s#!fy=zkd_xp(gkikl+8" DEBUG = True ALLOWED_HOSTS = [] else: - SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') - SECRET_KEY = environ['SECRET_KEY'] - DEBUG = environ['DEBUG'] == "true" - ALLOWED_HOSTS = environ['DOMAINS'].split() + environ['IPS'].split() - POSTGRES_PASSWORD = environ['POSTGRES_PASSWORD'] + SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") + SECRET_KEY = environ["SECRET_KEY"] + DEBUG = environ["DEBUG"] == "true" + ALLOWED_HOSTS = environ["DOMAINS"].split() + environ["IPS"].split() + POSTGRES_PASSWORD = environ["POSTGRES_PASSWORD"] SESSION_COOKIE_SECURE = IN_DOCKER CSRF_COOKIE_SECURE = IN_DOCKER @@ -141,118 +141,113 @@ """ INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django_extensions', - 'django_filters', - 'rest_framework', - 'rest_framework.authtoken', - 'drf_yasg', # OpenAPI generator - 'raven.contrib.django.raven_compat', - 'debug_toolbar', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "django_extensions", + "django_filters", + "rest_framework", + "rest_framework.authtoken", + "drf_yasg", # OpenAPI generator + "raven.contrib.django.raven_compat", + "debug_toolbar", # project-local apps - 'acquisitions.apps.AcquisitionsConfig', - 'authentication.apps.AuthenticationConfig', - 'capabilities.apps.CapabilitiesConfig', - 'hardware.apps.HardwareConfig', - 'results.apps.ResultsConfig', - 'schedule.apps.ScheduleConfig', - 'scheduler.apps.SchedulerConfig', - 'status.apps.StatusConfig', - 'sensor.apps.SensorConfig', # global settings/utils, etc + "authentication.apps.AuthenticationConfig", + "capabilities.apps.CapabilitiesConfig", + "hardware.apps.HardwareConfig", + "tasks.apps.TasksConfig", + "schedule.apps.ScheduleConfig", + "scheduler.apps.SchedulerConfig", + "status.apps.StatusConfig", + "sensor.apps.SensorConfig", # global settings/utils, etc ] MIDDLEWARE = [ - 'debug_toolbar.middleware.DebugToolbarMiddleware', - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "debug_toolbar.middleware.DebugToolbarMiddleware", + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = 'sensor.urls' +ROOT_URLCONF = "sensor.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [path.join(BASE_DIR, 'templates')], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [path.join(BASE_DIR, "templates")], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], - 'builtins': [ - 'sensor.templatetags.sensor_tags', - ] + "builtins": ["sensor.templatetags.sensor_tags"], }, - }, + } ] -WSGI_APPLICATION = 'sensor.wsgi.application' +WSGI_APPLICATION = "sensor.wsgi.application" # Django Rest Framework # http://www.django-rest-framework.org/ REST_FRAMEWORK = { - 'EXCEPTION_HANDLER': - 'sensor.exceptions.exception_handler', - 'DEFAULT_AUTHENTICATION_CLASSES': ( - 'rest_framework.authentication.TokenAuthentication', - 'rest_framework.authentication.SessionAuthentication', + "EXCEPTION_HANDLER": "sensor.exceptions.exception_handler", + "DEFAULT_AUTHENTICATION_CLASSES": ( + "rest_framework.authentication.TokenAuthentication", + "rest_framework.authentication.SessionAuthentication", ), - 'DEFAULT_PERMISSION_CLASSES': - ('rest_framework.permissions.IsAuthenticated', ), - 'DEFAULT_RENDERER_CLASSES': ( - 'rest_framework.renderers.JSONRenderer', - 'rest_framework.renderers.BrowsableAPIRenderer', + "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",), + "DEFAULT_RENDERER_CLASSES": ( + "rest_framework.renderers.JSONRenderer", + "rest_framework.renderers.BrowsableAPIRenderer", ), - 'DEFAULT_VERSIONING_CLASS': - 'rest_framework.versioning.URLPathVersioning', - 'DEFAULT_VERSION': 'v1', # this should always point to latest stable api - 'ALLOWED_VERSIONS': ('v1', ), - 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', - 'PAGE_SIZE': 10, - 'DATETIME_FORMAT': DATETIME_FORMAT, - 'DATETIME_INPUT_FORMATS': ('iso-8601', ), - 'COERCE_DECIMAL_TO_STRING': False, # DecimalField should return floats - 'URL_FIELD_NAME': 'self' # RFC 42867 + "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.URLPathVersioning", + "DEFAULT_VERSION": "v1", # this should always point to latest stable api + "ALLOWED_VERSIONS": ("v1",), + "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination", + "PAGE_SIZE": 10, + "DATETIME_FORMAT": DATETIME_FORMAT, + "DATETIME_INPUT_FORMATS": ("iso-8601",), + "COERCE_DECIMAL_TO_STRING": False, # DecimalField should return floats + "URL_FIELD_NAME": "self", # RFC 42867 } # https://drf-yasg.readthedocs.io/en/stable/settings.html SWAGGER_SETTINGS = { - 'SECURITY_DEFINITIONS': { - 'token': { - 'type': 'apiKey', - 'description': - ("Tokens are automatically generated for all users. You can " - "view yours by going to your User Details view in the " - "browsable API at `/api/v1/users/me` and looking for the " - "`auth_token` key. Non-admin user accounts do not initially " - "have a password and so can not log in to the browsable API. " - "To set a password for a user (for testing purposes), an " - "admin can do that in the Sensor Configuration Portal, but " - "only the account's token should be stored and used for " - "general purpose API access. " - "Example cURL call: `curl -kLsS -H \"Authorization: Token" - " 529c30e6e04b3b546f2e073e879b75fdfa147c15\" " - "https://greyhound5.sms.internal/api/v1`"), - 'name': 'Token', - 'in': 'header' + "SECURITY_DEFINITIONS": { + "token": { + "type": "apiKey", + "description": ( + "Tokens are automatically generated for all users. You can " + "view yours by going to your User Details view in the " + "browsable API at `/api/v1/users/me` and looking for the " + "`auth_token` key. Non-admin user accounts do not initially " + "have a password and so can not log in to the browsable API. " + "To set a password for a user (for testing purposes), an " + "admin can do that in the Sensor Configuration Portal, but " + "only the account's token should be stored and used for " + "general purpose API access. " + 'Example cURL call: `curl -kLsS -H "Authorization: Token' + ' 529c30e6e04b3b546f2e073e879b75fdfa147c15" ' + "https://greyhound5.sms.internal/api/v1`" + ), + "name": "Token", + "in": "header", } }, - 'APIS_SORTER': 'alpha', - 'OPERATIONS_SORTER': 'method', - 'VALIDATOR_URL': None + "APIS_SORTER": "alpha", + "OPERATIONS_SORTER": "method", + "VALIDATOR_URL": None, } # Database @@ -260,126 +255,72 @@ if RUNNING_TESTS or RUNNING_DEMO: DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': ':memory:' - } + "default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"} } else: DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'postgres', - 'USER': 'postgres', - 'PASSWORD': environ['POSTGRES_PASSWORD'], - 'HOST': 'db', - 'PORT': '5432', + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": "postgres", + "USER": "postgres", + "PASSWORD": environ["POSTGRES_PASSWORD"], + "HOST": "db", + "PORT": "5432", } } if not IN_DOCKER: - DATABASES['default']['HOST'] = 'localhost' + DATABASES["default"]["HOST"] = "localhost" # Ensure only the last MAX_TASK_RESULTS results are kept per schedule entry MAX_TASK_RESULTS = 100 -# Display at most MAX_TASK_QUEUE upcoming tasks in the status endpoint -MAX_TASK_QUEUE = 100 +# Display at most MAX_TASK_QUEUE upcoming tasks in /tasks/upcoming +MAX_TASK_QUEUE = 50 # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': - 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': - 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': - 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': - 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" }, + {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, + {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, + {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] -AUTH_USER_MODEL = 'authentication.User' +AUTH_USER_MODEL = "authentication.User" # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ -LANGUAGE_CODE = 'en-us' -TIME_ZONE = 'UTC' +LANGUAGE_CODE = "en-us" +TIME_ZONE = "UTC" USE_I18N = True USE_L10N = True USE_TZ = True -LOGLEVEL = 'DEBUG' if DEBUG else 'INFO' +LOGLEVEL = "DEBUG" if DEBUG else "INFO" LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'simple': { - 'format': '[%(asctime)s] [%(levelname)s] %(message)s' - }, - }, - 'filters': { - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', - }, + "version": 1, + "disable_existing_loggers": False, + "formatters": {"simple": {"format": "[%(asctime)s] [%(levelname)s] %(message)s"}}, + "filters": {"require_debug_true": {"()": "django.utils.log.RequireDebugTrue"}}, + "handlers": {"console": {"class": "logging.StreamHandler", "formatter": "simple"}}, + "loggers": { + "actions": {"handlers": ["console"], "level": LOGLEVEL}, + "capabilities": {"handlers": ["console"], "level": LOGLEVEL}, + "hardware": {"handlers": ["console"], "level": LOGLEVEL}, + "schedule": {"handlers": ["console"], "level": LOGLEVEL}, + "scheduler": {"handlers": ["console"], "level": LOGLEVEL}, + "sensor": {"handlers": ["console"], "level": LOGLEVEL}, + "status": {"handlers": ["console"], "level": LOGLEVEL}, }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'simple' - }, - }, - 'loggers': { - 'actions': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'acquisitions': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'capabilities': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'hardware': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'schedule': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'scheduler': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'sensor': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'status': { - 'handlers': ['console'], - 'level': LOGLEVEL - } - } } -SENTRY_DSN = environ.get('SENTRY_DSN') +SENTRY_DSN = environ.get("SENTRY_DSN") if SENTRY_DSN: import raven - RAVEN_CONFIG = { - 'dsn': SENTRY_DSN, - 'release': raven.fetch_git_sha(REPO_ROOT), - } + RAVEN_CONFIG = {"dsn": SENTRY_DSN, "release": raven.fetch_git_sha(REPO_ROOT)} diff --git a/src/sensor/tests/test_api_docs.py b/src/sensor/tests/test_api_docs.py index 48ac12b4..5f73b507 100644 --- a/src/sensor/tests/test_api_docs.py +++ b/src/sensor/tests/test_api_docs.py @@ -1,13 +1,11 @@ import json from os import path -import pytest from rest_framework.reverse import reverse from sensor import V1, settings -@pytest.mark.update_api_docs def test_api_docs_up_to_date(admin_client): """Ensure that docs/openapi.json is up-to-date.""" @@ -17,9 +15,9 @@ def test_api_docs_up_to_date(admin_client): print("{} doesn't exist, not in src tree.".format(docs_dir)) return True - schema_url = reverse('api_schema', kwargs=V1) + '?format=openapi' + schema_url = reverse("api_schema", kwargs=V1) + "?format=openapi" response = admin_client.get(schema_url) - with open(settings.OPENAPI_FILE, 'w+') as openapi_file: + with open(settings.OPENAPI_FILE, "w+") as openapi_file: openapi_json = json.loads(response.content) json.dump(openapi_json, openapi_file, indent=4) diff --git a/src/sensor/tests/test_api_root_view.py b/src/sensor/tests/test_api_root_view.py index 668f1834..6de541a0 100644 --- a/src/sensor/tests/test_api_root_view.py +++ b/src/sensor/tests/test_api_root_view.py @@ -1,15 +1,14 @@ from rest_framework.reverse import reverse from sensor import V1 -from .utils import validate_response, HTTPS_KWARG -API_ROOT_ENDPOINTS = { - 'acquisitions', 'users', 'schedule', 'status', 'capabilities', 'results' -} +from .utils import HTTPS_KWARG, validate_response + +API_ROOT_ENDPOINTS = {"users", "schedule", "status", "capabilities", "tasks"} def test_index(user_client): - response = user_client.get(reverse('api-root', kwargs=V1), **HTTPS_KWARG) + response = user_client.get(reverse("api-root", kwargs=V1), **HTTPS_KWARG) rjson = validate_response(response) assert rjson.keys() == API_ROOT_ENDPOINTS diff --git a/src/sensor/tests/utils.py b/src/sensor/tests/utils.py index 764a9323..cf327955 100644 --- a/src/sensor/tests/utils.py +++ b/src/sensor/tests/utils.py @@ -1,6 +1,6 @@ from rest_framework import status -HTTPS_KWARG = {'wsgi.url_scheme': 'https'} +HTTPS_KWARG = {"wsgi.url_scheme": "https"} def validate_response(response, expected_code=None): @@ -11,6 +11,6 @@ def validate_response(response, expected_code=None): else: assert actual_code == expected_code, response.data - if actual_code not in (status.HTTP_204_NO_CONTENT, ): + if actual_code not in (status.HTTP_204_NO_CONTENT,): rjson = response.json() return rjson diff --git a/src/sensor/urls.py b/src/sensor/urls.py index f88eac61..23925ff8 100644 --- a/src/sensor/urls.py +++ b/src/sensor/urls.py @@ -17,54 +17,49 @@ """ -from __future__ import absolute_import - from django.contrib import admin from django.urls import include, path, re_path from django.views.generic import RedirectView from rest_framework.urlpatterns import format_suffix_patterns from . import settings -from .views import schema_view, api_v1_root - +from .views import api_v1_root, schema_view # Matches api/v1, api/v2, etc... -API_PREFIX = r'^api/(?Pv[0-9]+)/' -DEFAULT_API_VERSION = settings.REST_FRAMEWORK['DEFAULT_VERSION'] +API_PREFIX = r"^api/(?Pv[0-9]+)/" +DEFAULT_API_VERSION = settings.REST_FRAMEWORK["DEFAULT_VERSION"] api_urlpatterns = format_suffix_patterns( ( - path('', api_v1_root, name='api-root'), - path('acquisitions/', include('acquisitions.urls')), - path('capabilities/', include('capabilities.urls')), - path('schedule/', include('schedule.urls')), - path('status', include('status.urls')), - path('users/', include('authentication.urls')), - path('results/', include('results.urls')), - path('schema/', schema_view.with_ui('redoc', cache_timeout=0), - name='api_schema') + path("", api_v1_root, name="api-root"), + path("capabilities/", include("capabilities.urls")), + path("schedule/", include("schedule.urls")), + path("status", include("status.urls")), + path("users/", include("authentication.urls")), + path("tasks/", include("tasks.urls")), + path( + "schema/", schema_view.with_ui("redoc", cache_timeout=0), name="api_schema" + ), ) ) # Modify admin portal before including url # Text to put in each page's

(and above login form). -admin.site.site_header = 'SCOS Sensor Configuration Portal' +admin.site.site_header = "SCOS Sensor Configuration Portal" # Text to put at the top of the admin index page. -admin.site.index_title = 'SCOS Sensor Configuration Portal' +admin.site.index_title = "SCOS Sensor Configuration Portal" urlpatterns = ( - path('', RedirectView.as_view(url='/api/')), - path('admin/', admin.site.urls), - path('api/', - RedirectView.as_view(url='/api/{}/'.format(DEFAULT_API_VERSION))), + path("", RedirectView.as_view(url="/api/")), + path("admin/", admin.site.urls), + path("api/", RedirectView.as_view(url="/api/{}/".format(DEFAULT_API_VERSION))), re_path(API_PREFIX, include(api_urlpatterns)), - path('api/auth/', include('rest_framework.urls')) + path("api/auth/", include("rest_framework.urls")), ) if settings.DEBUG: import debug_toolbar - urlpatterns = [ - path('__debug__/', include(debug_toolbar.urls)), - ] + list(urlpatterns) + + urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + list(urlpatterns) diff --git a/src/sensor/utils.py b/src/sensor/utils.py index c63ff809..4f663bdf 100644 --- a/src/sensor/utils.py +++ b/src/sensor/utils.py @@ -61,7 +61,7 @@ def get_timestamp_from_datetime(dt): def get_datetime_str_now(): - return datetime.isoformat(datetime.utcnow()) + 'Z' + return datetime.isoformat(datetime.utcnow()) + "Z" def parse_datetime_str(d): diff --git a/src/sensor/views.py b/src/sensor/views.py index 310610be..e2e6c98e 100644 --- a/src/sensor/views.py +++ b/src/sensor/views.py @@ -1,7 +1,7 @@ from functools import partial -from drf_yasg.views import get_schema_view from drf_yasg import openapi +from drf_yasg.views import get_schema_view from rest_framework import permissions from rest_framework.decorators import api_view from rest_framework.response import Response @@ -11,23 +11,22 @@ from . import settings -@api_view(('GET', )) +@api_view(("GET",)) def api_v1_root(request, version, format=None): """SCOS sensor API root.""" reverse_ = partial(reverse, request=request, format=format) list_endpoints = { - 'schedule': reverse_('schedule-list'), - 'acquisitions': reverse_('acquisitions-overview'), - 'status': reverse_('status'), - 'users': reverse_('user-list'), - 'capabilities': reverse_('capabilities'), - 'results': reverse_('results-overview') + "capabilities": reverse_("capabilities"), + "schedule": reverse_("schedule-list"), + "status": reverse_("status"), + "tasks": reverse_("task-root"), + "users": reverse_("user-list"), } # See note in settings:INTERNAL_IPS about why we do this here - nginx_container_ip = request.META['REMOTE_ADDR'] + nginx_container_ip = request.META["REMOTE_ADDR"] nginx_ip_set = nginx_container_ip in settings.INTERNAL_IPS - if (settings.IN_DOCKER and settings.DEBUG and not nginx_ip_set): + if settings.IN_DOCKER and settings.DEBUG and not nginx_ip_set: settings.INTERNAL_IPS.append(nginx_container_ip) return Response(list_endpoints) @@ -42,5 +41,5 @@ def api_v1_root(request, version, format=None): license=openapi.License(name="NTIA/ITS", url=settings.LICENSE_URL), ), public=False, - permission_classes=(permissions.IsAuthenticated, ), + permission_classes=(permissions.IsAuthenticated,), ) diff --git a/src/sensor/wsgi.py b/src/sensor/wsgi.py index 7dd94a07..a6e9bb7c 100644 --- a/src/sensor/wsgi.py +++ b/src/sensor/wsgi.py @@ -4,9 +4,10 @@ For more information on this file, see https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/ -""" -from __future__ import absolute_import +isort:skip_file + +""" import os @@ -16,8 +17,8 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sensor.settings") django.setup() # this is necessary because we need to handle our own thread -from sensor import settings # noqa from scheduler import scheduler # noqa +from sensor import settings # noqa application = get_wsgi_application() diff --git a/src/static/drf-yasg/redoc/redoc.min.js b/src/static/drf-yasg/redoc/redoc.min.js index 39a7fd79..70f395e1 100644 --- a/src/static/drf-yasg/redoc/redoc.min.js +++ b/src/static/drf-yasg/redoc/redoc.min.js @@ -1,10 +1,10 @@ /*! * ReDoc - OpenAPI/Swagger-generated API Reference Documentation * ------------------------------------------------------------- - * Version: "2.0.0-rc.2" + * Version: "2.0.0-rc.4" * Repo: https://github.com/Rebilly/ReDoc */ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("null"),function(){try{return require("esprima")}catch(e){}}()):"function"==typeof define&&define.amd?define(["null","esprima"],t):"object"==typeof exports?exports.Redoc=t(require("null"),function(){try{return require("esprima")}catch(e){}}()):e.Redoc=t(e.null,e.esprima)}(this,function(e,t){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=134)}([function(e,t,n){"use strict";e.exports=n(178)},function(e,t,n){"use strict";(function(e,r){n.d(t,"a",function(){return Jt}),n.d(t,"b",function(){return Wt}),n.d(t,"o",function(){return Ft}),n.d(t,"g",function(){return De}),n.d(t,"n",function(){return v}),n.d(t,"k",function(){return ne}),n.d(t,"i",function(){return Je}),n.d(t,"j",function(){return Pe}),n.d(t,"l",function(){return be}),n.d(t,"e",function(){return le}),n.d(t,"m",function(){return Xt}),n.d(t,"d",function(){return O}),n.d(t,"f",function(){return Kt}),n.d(t,"h",function(){return Le}),n.d(t,"c",function(){return x}); +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(function(){try{return require("esprima")}catch(e){}}()):"function"==typeof define&&define.amd?define(["esprima"],t):"object"==typeof exports?exports.Redoc=t(function(){try{return require("esprima")}catch(e){}}()):e.Redoc=t(e.esprima)}(this,function(e){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=177)}([function(e,t,n){"use strict";e.exports=n(222)},function(e,t,n){"use strict";n.r(t),n.d(t,"__extends",function(){return o}),n.d(t,"__assign",function(){return i}),n.d(t,"__rest",function(){return a}),n.d(t,"__decorate",function(){return s}),n.d(t,"__param",function(){return l}),n.d(t,"__metadata",function(){return u}),n.d(t,"__awaiter",function(){return c}),n.d(t,"__generator",function(){return f}),n.d(t,"__exportStar",function(){return p}),n.d(t,"__values",function(){return d}),n.d(t,"__read",function(){return h}),n.d(t,"__spread",function(){return m}),n.d(t,"__await",function(){return v}),n.d(t,"__asyncGenerator",function(){return g}),n.d(t,"__asyncDelegator",function(){return y}),n.d(t,"__asyncValues",function(){return b}),n.d(t,"__makeTemplateObject",function(){return w}),n.d(t,"__importStar",function(){return x}),n.d(t,"__importDefault",function(){return k}); /*! ***************************************************************************** Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use @@ -19,14 +19,29 @@ MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. ***************************************************************************** */ -var o=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n])};function i(e,t){function n(){this.constructor=e}o(e,t),e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)}var a=Object.assign||function(e){for(var t,n=1,r=arguments.length;n0)&&!(r=i.next()).done;)a.push(r.value)}catch(e){o={error:e}}finally{try{r&&!r.done&&(n=i.return)&&n.call(i)}finally{if(o)throw o.error}}return a}function l(){for(var e=[],t=0;t0)for(var l=0;l",e):2===arguments.length&&"function"==typeof t?b(e,t):1===arguments.length&&"string"==typeof e?_(e):!0!==r?_(t).apply(null,arguments):void(e[t]=b(e.name||t,n.value))};function C(e,t,n){pt(e,t,b(t,n.bind(e)))}O.bound=function(e,t,n,r){return!0===r?(C(e,t,n.value),null):n?{configurable:!0,enumerable:!1,get:function(){return C(this,t,n.value||n.initializer.call(this)),this[t]},set:S}:{enumerable:!1,configurable:!0,set:function(e){C(this,t,e)},get:function(){}}};var T=Object.prototype.toString;function j(e,t){return A(e,t)}function A(e,t,n,r){if(e===t)return 0!==e||1/e==1/t;if(null==e||null==t)return!1;if(e!=e)return t!=t;var o=typeof e;return("function"===o||"object"===o||"object"==typeof t)&&function(e,t,n,r){e=I(e),t=I(t);var o=T.call(e);if(o!==T.call(t))return!1;switch(o){case"[object RegExp]":case"[object String]":return""+e==""+t;case"[object Number]":return+e!=+e?+t!=+t:0==+e?1/+e==1/t:+e==+t;case"[object Date]":case"[object Boolean]":return+e==+t;case"[object Symbol]":return"undefined"!=typeof Symbol&&Symbol.valueOf.call(e)===Symbol.valueOf.call(t)}var i="[object Array]"===o;if(!i){if("object"!=typeof e||"object"!=typeof t)return!1;var a=e.constructor,s=t.constructor;if(a!==s&&!("function"==typeof a&&a instanceof a&&"function"==typeof s&&s instanceof s)&&"constructor"in e&&"constructor"in t)return!1}r=r||[];var l=(n=n||[]).length;for(;l--;)if(n[l]===e)return r[l]===t;if(n.push(e),r.push(t),i){if((l=e.length)!==t.length)return!1;for(;l--;)if(!A(e[l],t[l],n,r))return!1}else{var c,u=Object.keys(e);if(l=u.length,Object.keys(t).length!==l)return!1;for(;l--;)if(c=u[l],!P(t,c)||!A(e[c],t[c],n,r))return!1}return n.pop(),r.pop(),!0}(e,t,n,r)}function I(e){return Je(e)?e.peek():ht(e)||Pe(e)?mt(e.entries()):e}function P(e,t){return Object.prototype.hasOwnProperty.call(e,t)}function R(e,t){return e===t}var N={identity:R,structural:function(e,t){return j(e,t)},default:function(e,t){return function(e,t){return"number"==typeof e&&"number"==typeof t&&isNaN(e)&&isNaN(t)}(e,t)||R(e,t)}};function L(e,t){void 0===t&&(t=tt);var n,r=t&&t.name||e.name||"Autorun@"+rt();if(!t.scheduler&&!t.delay)n=new Wt(r,function(){this.track(a)},t.onError);else{var o=D(t),i=!1;n=new Wt(r,function(){i||(i=!0,o(function(){i=!1,n.isDisposed||n.track(a)}))},t.onError)}function a(){e(n)}return n.schedule(),n.getDisposer()}var M=function(e){return e()};function D(e){return e.scheduler?e.scheduler:e.delay?function(t){return setTimeout(t,e.delay)}:M}var F=function(){function e(e){this.dependenciesState=vt.NOT_TRACKING,this.observing=[],this.newObserving=null,this.isBeingObserved=!1,this.isPendingUnobservation=!1,this.observers=[],this.observersIndexes={},this.diffValue=0,this.runId=0,this.lastAccessedBy=0,this.lowestObserverState=vt.UP_TO_DATE,this.unboundDepsCount=0,this.__mapid="#"+rt(),this.value=new Pt(null),this.isComputing=!1,this.isRunningSetter=!1,this.isTracing=bt.NONE,this.derivation=e.get,this.name=e.name||"ComputedValue@"+rt(),e.set&&(this.setter=b(this.name+"-setter",e.set)),this.equals=e.equals||(e.compareStructural||e.struct?N.structural:N.default),this.scope=e.context,this.requiresReaction=!!e.requiresReaction,this.keepAlive=!!e.keepAlive}return e.prototype.onBecomeStale=function(){!function(e){if(e.lowestObserverState!==vt.UP_TO_DATE)return;e.lowestObserverState=vt.POSSIBLY_STALE;var t=e.observers,n=t.length;for(;n--;){var r=t[n];r.dependenciesState===vt.UP_TO_DATE&&(r.dependenciesState=vt.POSSIBLY_STALE,r.isTracing!==bt.NONE&&It(r,e),r.onBecomeStale())}}(this)},e.prototype.onBecomeUnobserved=function(){},e.prototype.onBecomeObserved=function(){},e.prototype.get=function(){this.isComputing&&ot("Cycle detected in computation "+this.name+": "+this.derivation),0!==Et.inBatch||0!==this.observers.length||this.keepAlive?(At(this),Nt(this)&&this.trackAndCompute()&&function(e){if(e.lowestObserverState===vt.STALE)return;e.lowestObserverState=vt.STALE;var t=e.observers,n=t.length;for(;n--;){var r=t[n];r.dependenciesState===vt.POSSIBLY_STALE?r.dependenciesState=vt.STALE:r.dependenciesState===vt.UP_TO_DATE&&(e.lowestObserverState=vt.UP_TO_DATE)}}(this)):Nt(this)&&(this.warnAboutUntrackedRead(),Tt(),this.value=this.computeValue(!1),jt());var e=this.value;if(Rt(e))throw e.cause;return e},e.prototype.peek=function(){var e=this.computeValue(!1);if(Rt(e))throw e.cause;return e},e.prototype.set=function(e){if(this.setter){it(!this.isRunningSetter,"The setter of computed value '"+this.name+"' is trying to update itself. Did you intend to update an _observable_ value, instead of the computed property?"),this.isRunningSetter=!0;try{this.setter.call(this.scope,e)}finally{this.isRunningSetter=!1}}else it(!1,!1)},e.prototype.trackAndCompute=function(){d()&&h({object:this.scope,type:"compute",name:this.name});var e=this.value,t=this.dependenciesState===vt.NOT_TRACKING,n=this.computeValue(!0),r=t||Rt(e)||Rt(n)||!this.equals(e,n);return r&&(this.value=n),r},e.prototype.computeValue=function(e){var t;if(this.isComputing=!0,Et.computationDepth++,e)t=Mt(this,this.derivation,this.scope);else if(!0===Et.disableErrorBoundaries)t=this.derivation.call(this.scope);else try{t=this.derivation.call(this.scope)}catch(e){t=new Pt(e)}return Et.computationDepth--,this.isComputing=!1,t},e.prototype.suspend=function(){this.keepAlive||(Dt(this),this.value=void 0)},e.prototype.observe=function(e,t){var n=this,r=!0,o=void 0;return L(function(){var i=n.get();if(!r||t){var a=Ut();e({type:"update",object:n,newValue:i,oldValue:o}),Bt(a)}r=!1,o=i})},e.prototype.warnAboutUntrackedRead=function(){},e.prototype.toJSON=function(){return this.get()},e.prototype.toString=function(){return this.name+"["+this.derivation.toString()+"]"},e.prototype.valueOf=function(){return yt(this.get())},e}();F.prototype[gt()]=F.prototype.valueOf;var U=dt("ComputedValue",F);function B(e){return void 0!==e.interceptors&&e.interceptors.length>0}function z(e,t){var n=e.interceptors||(e.interceptors=[]);return n.push(t),st(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function $(e,t){var n=Ut();try{var r=e.interceptors;if(r)for(var o=0,i=r.length;o0}function q(e,t){var n=e.changeListeners||(e.changeListeners=[]);return n.push(t),st(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function H(e,t){var n=Ut(),r=e.changeListeners;if(r){for(var o=0,i=(r=r.slice()).length;o2&&we("box");var n=fe(t);return new V(e,de(n),n.name)},shallowBox:function(e,t){return arguments.length>2&&we("shallowBox"),at("observable.shallowBox","observable.box(value, { deep: false })"),be.box(e,{name:t,deep:!1})},array:function(e,t){arguments.length>2&&we("array");var n=fe(t);return new Ye(e,de(n),n.name)},shallowArray:function(e,t){return arguments.length>2&&we("shallowArray"),at("observable.shallowArray","observable.array(values, { deep: false })"),be.array(e,{name:t,deep:!1})},map:function(e,t){arguments.length>2&&we("map");var n=fe(t);return new je(e,de(n),n.name)},shallowMap:function(e,t){return arguments.length>2&&we("shallowMap"),at("observable.shallowMap","observable.map(values, { deep: false })"),be.map(e,{name:t,deep:!1})},object:function(e,t,n){return"string"==typeof arguments[1]&&we("object"),ce({},e,t,fe(n))},shallowObject:function(e,t){return"string"==typeof arguments[1]&&we("shallowObject"),at("observable.shallowObject","observable.object(values, {}, { deep: false })"),be.object(e,{},{name:t,deep:!1})},ref:ge,shallow:me,deep:he,struct:ye},be=function(e,t,n){if("string"==typeof arguments[1])return he.apply(null,arguments);if(ie(e))return e;var r=ut(e)?be.object(e,t,n):Array.isArray(e)?be.array(e,t):ht(e)?be.map(e,t):e;if(r!==e)return r;ot(!1)};function we(e){ot("Expected one or two arguments to observable."+e+". Did you accidentally try to use observable."+e+" as decorator?")}function xe(e,t,n){return ie(e)?e:Array.isArray(e)?be.array(e,{name:n}):ut(e)?be.object(e,void 0,{name:n}):ht(e)?be.map(e,{name:n}):e}function ke(e){return e}function Ee(){return"function"==typeof Symbol&&Symbol.iterator||"@@iterator"}function Se(e,t){ft(e,Ee(),t)}function _e(e){return e[Ee()]=Oe,e}function Oe(){return this}function Ce(e,t){void 0===t&&(t=void 0),Tt();try{return e.apply(t)}finally{jt()}}Object.keys(ve).forEach(function(e){return be[e]=ve[e]});var Te={},je=function(){function e(e,t,n){if(void 0===t&&(t=xe),void 0===n&&(n="ObservableMap@"+rt()),this.enhancer=t,this.name=n,this.$mobx=Te,this._keys=new Ye(void 0,ke,this.name+".keys()",!0),"function"!=typeof Map)throw new Error("mobx.map requires Map polyfill for the current browser. Check babel-polyfill or core-js/es6/map.js");this._data=new Map,this._hasMap=new Map,this.merge(e)}return e.prototype._has=function(e){return this._data.has(e)},e.prototype.has=function(e){return this._hasMap.has(e)?this._hasMap.get(e).get():this._updateHasMapEntry(e,!1).get()},e.prototype.set=function(e,t){var n=this._has(e);if(B(this)){var r=$(this,{type:n?"update":"add",object:this,newValue:t,name:e});if(!r)return this;t=r.newValue}return n?this._updateValue(e,t):this._addValue(e,t),this},e.prototype.delete=function(e){var t=this;if(B(this)&&!(o=$(this,{type:"delete",object:this,name:e})))return!1;if(this._has(e)){var n=d(),r=W(this),o=r||n?{type:"delete",object:this,oldValue:this._data.get(e).value,name:e}:null;return n&&m(a({},o,{name:this.name,key:e})),Ce(function(){t._keys.remove(e),t._updateHasMapEntry(e,!1),t._data.get(e).setNewValue(void 0),t._data.delete(e)}),r&&H(this,o),n&&y(),!0}return!1},e.prototype._updateHasMapEntry=function(e,t){var n=this._hasMap.get(e);return n?n.setNewValue(t):(n=new V(t,ke,this.name+"."+e+"?",!1),this._hasMap.set(e,n)),n},e.prototype._updateValue=function(e,t){var n=this._data.get(e);if((t=n.prepareNewValue(t))!==Y){var r=d(),o=W(this),i=o||r?{type:"update",object:this,oldValue:n.value,name:e,newValue:t}:null;r&&m(a({},i,{name:this.name,key:e})),n.setNewValue(t),o&&H(this,i),r&&y()}},e.prototype._addValue=function(e,t){var n=this;Ce(function(){var r=new V(t,n.enhancer,n.name+"."+e,!1);n._data.set(e,r),t=r.value,n._updateHasMapEntry(e,!0),n._keys.push(e)});var r=d(),o=W(this),i=o||r?{type:"add",object:this,name:e,newValue:t}:null;r&&m(a({},i,{name:this.name,key:e})),o&&H(this,i),r&&y()},e.prototype.get=function(e){return this.has(e)?this.dehanceValue(this._data.get(e).get()):this.dehanceValue(void 0)},e.prototype.dehanceValue=function(e){return void 0!==this.dehancer?this.dehancer(e):e},e.prototype.keys=function(){return this._keys[Ee()]()},e.prototype.values=function(){var e=this,t=0;return _e({next:function(){return t0?e.map(this.dehancer):e},e.prototype.intercept=function(e){return z(this,e)},e.prototype.observe=function(e,t){return void 0===t&&(t=!1),t&&e({object:this.array,type:"splice",index:0,added:this.values.slice(),addedCount:this.values.length,removed:[],removedCount:0}),q(this,e)},e.prototype.getArrayLength=function(){return this.atom.reportObserved(),this.values.length},e.prototype.setArrayLength=function(e){if("number"!=typeof e||e<0)throw new Error("[mobx.array] Out of range: "+e);var t=this.values.length;if(e!==t)if(e>t){for(var n=new Array(e-t),r=0;r0&&e+t+1>We&&Xe(e+t+1)},e.prototype.spliceWithArray=function(e,t,n){var r=this;Lt(this.atom);var o=this.values.length;if(void 0===e?e=0:e>o?e=o:e<0&&(e=Math.max(0,o+e)),t=1===arguments.length?o-e:null==t?0:Math.max(0,Math.min(t,o-e)),void 0===n&&(n=et),B(this)){var i=$(this,{object:this.array,type:"splice",index:e,removedCount:t,added:n});if(!i)return et;t=i.removedCount,n=i.added}var a=(n=0===n.length?n:n.map(function(e){return r.enhancer(e,void 0)})).length-t;this.updateArrayLength(o,a);var s=this.spliceItemsIntoValues(e,t,n);return 0===t&&0===n.length||this.notifyArraySplice(e,n,s),this.dehanceValues(s)},e.prototype.spliceItemsIntoValues=function(e,t,n){if(n.length<1e4)return(r=this.values).splice.apply(r,l([e,t],n));var r,o=this.values.slice(e,e+t);return this.values=this.values.slice(0,e).concat(n,this.values.slice(e+t)),o},e.prototype.notifyArrayChildUpdate=function(e,t,n){var r=!this.owned&&d(),o=W(this),i=o||r?{object:this.array,type:"update",index:e,newValue:t,oldValue:n}:null;r&&m(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&&H(this,i),r&&y()},e.prototype.notifyArraySplice=function(e,t,n){var r=!this.owned&&d(),o=W(this),i=o||r?{object:this.array,type:"splice",index:e,removed:n,added:t,removedCount:n.length,addedCount:t.length}:null;r&&m(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&&H(this,i),r&&y()},e}(),Ye=function(e){function t(t,n,r,o){void 0===r&&(r="ObservableArray@"+rt()),void 0===o&&(o=!1);var i=e.call(this)||this,a=new He(r,n,i,o);if(ft(i,"$mobx",a),t&&t.length){var s=k(!0);i.spliceWithArray(0,0,t),E(s)}return $e&&Object.defineProperty(a.array,"0",Ve),i}return i(t,e),t.prototype.intercept=function(e){return this.$mobx.intercept(e)},t.prototype.observe=function(e,t){return void 0===t&&(t=!1),this.$mobx.observe(e,t)},t.prototype.clear=function(){return this.splice(0)},t.prototype.concat=function(){for(var e=[],t=0;t-1&&(this.splice(t,1),!0)},t.prototype.move=function(e,t){function n(e){if(e<0)throw new Error("[mobx.array] Index out of bounds: "+e+" is negative");var t=this.$mobx.values.length;if(e>=t)throw new Error("[mobx.array] Index out of bounds: "+e+" is not smaller than "+t)}if(at("observableArray.move is deprecated, use .slice() & .replace() instead"),n.call(this,e),n.call(this,t),e!==t){var r,o=this.$mobx.values;r=e0&&!e.__mobxGlobals&&(xt=!1),e.__mobxGlobals&&e.__mobxGlobals.version!==(new wt).version&&(xt=!1),xt?e.__mobxGlobals?(e.__mobxInstanceCount+=1,e.__mobxGlobals):(e.__mobxInstanceCount=1,e.__mobxGlobals=new wt):(setTimeout(function(){kt||ot("There are multiple, different versions of MobX active. Make sure MobX is loaded only once or use `configure({ isolateGlobalState: true })`")},1),new wt)}();function St(e){var t,n,r={name:e.name};return e.observing&&e.observing.length>0&&(r.dependencies=(t=e.observing,n=[],t.forEach(function(e){-1===n.indexOf(e)&&n.push(e)}),n).map(St)),r}function _t(e,t){var n=e.observers.length;n&&(e.observersIndexes[t.__mapid]=n),e.observers[n]=t,e.lowestObserverState>t.dependenciesState&&(e.lowestObserverState=t.dependenciesState)}function Ot(e,t){if(1===e.observers.length)e.observers.length=0,Ct(e);else{var n=e.observers,r=e.observersIndexes,o=n.pop();if(o!==t){var i=r[t.__mapid]||0;i?r[o.__mapid]=i:delete r[o.__mapid],n[i]=o}delete r[t.__mapid]}}function Ct(e){!1===e.isPendingUnobservation&&(e.isPendingUnobservation=!0,Et.pendingUnobservations.push(e))}function Tt(){Et.inBatch++}function jt(){if(0==--Et.inBatch){Yt();for(var e=Et.pendingUnobservations,t=0;t0&&Ct(e),!1)}function It(e,t){if(console.log("[mobx.trace] '"+e.name+"' is invalidated due to a change in: '"+t.name+"'"),e.isTracing===bt.BREAK){var n=[];!function e(t,n,r){if(n.length>=1e3)return void n.push("(and many more)");n.push(""+new Array(r).join("\t")+t.name);t.dependencies&&t.dependencies.forEach(function(t){return e(t,n,r+1)})}((r=e,St(Re(r,o))),n,1),new Function("debugger;\n/*\nTracing '"+e.name+"'\n\nYou are entering this break point because derivation '"+e.name+"' is being traced and '"+t.name+"' is now forcing it to update.\nJust follow the stacktrace you should now see in the devtools to see precisely what piece of your code is causing this update\nThe stackframe you are looking for is at least ~6-8 stack-frames up.\n\n"+(e instanceof F?e.derivation.toString():"")+"\n\nThe dependencies for this derivation are:\n\n"+n.join("\n")+"\n*/\n ")()}var r,o}!function(e){e[e.NOT_TRACKING=-1]="NOT_TRACKING",e[e.UP_TO_DATE=0]="UP_TO_DATE",e[e.POSSIBLY_STALE=1]="POSSIBLY_STALE",e[e.STALE=2]="STALE"}(vt||(vt={})),function(e){e[e.NONE=0]="NONE",e[e.LOG=1]="LOG",e[e.BREAK=2]="BREAK"}(bt||(bt={}));var Pt=function(){return function(e){this.cause=e}}();function Rt(e){return e instanceof Pt}function Nt(e){switch(e.dependenciesState){case vt.UP_TO_DATE:return!1;case vt.NOT_TRACKING:case vt.STALE:return!0;case vt.POSSIBLY_STALE:for(var t=Ut(),n=e.observing,r=n.length,o=0;o0;Et.computationDepth>0&&t&&ot(!1),Et.allowStateChanges||!t&&"strict"!==Et.enforceActions||ot(!1)}function Mt(e,t,n){zt(e),e.newObserving=new Array(e.observing.length+100),e.unboundDepsCount=0,e.runId=++Et.runId;var r,o=Et.trackingDerivation;if(Et.trackingDerivation=e,!0===Et.disableErrorBoundaries)r=t.call(n);else try{r=t.call(n)}catch(e){r=new Pt(e)}return Et.trackingDerivation=o,function(e){for(var t=e.observing,n=e.observing=e.newObserving,r=vt.UP_TO_DATE,o=0,i=e.unboundDepsCount,a=0;ar&&(r=s.dependenciesState)}n.length=o,e.newObserving=null,i=t.length;for(;i--;){var s=t[i];0===s.diffValue&&Ot(s,e),s.diffValue=0}for(;o--;){var s=n[o];1===s.diffValue&&(s.diffValue=0,_t(s,e))}r!==vt.UP_TO_DATE&&(e.dependenciesState=r,e.onBecomeStale())}(e),r}function Dt(e){var t=e.observing;e.observing=[];for(var n=t.length;n--;)Ot(t[n],e);e.dependenciesState=vt.NOT_TRACKING}function Ft(e){var t=Ut(),n=e();return Bt(t),n}function Ut(){var e=Et.trackingDerivation;return Et.trackingDerivation=null,e}function Bt(e){Et.trackingDerivation=e}function zt(e){if(e.dependenciesState!==vt.UP_TO_DATE){e.dependenciesState=vt.UP_TO_DATE;for(var t=e.observing,n=t.length;n--;)t[n].lowestObserverState=vt.UP_TO_DATE}}function $t(){for(var e=[],t=0;t0||Et.isRunningReactions||Ht(Vt)}function Vt(){Et.isRunningReactions=!0;for(var e=Et.pendingReactions,t=0;e.length>0;){++t===qt&&(console.error("Reaction doesn't converge to a stable state after "+qt+" iterations. Probably there is a cycle in the reactive function: "+e[0]),e.splice(0));for(var n=e.splice(0),r=0,o=n.length;r1)for(var n=1;n=0;s--)(o=e[s])&&(a=(i<3?o(a):i>3?o(t,n,a):o(t,n))||a);return i>3&&a&&Object.defineProperty(t,n,a),a}function l(e,t){return function(n,r){t(n,r,e)}}function u(e,t){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(e,t)}function c(e,t,n,r){return new(n||(n=Promise))(function(o,i){function a(e){try{l(r.next(e))}catch(e){i(e)}}function s(e){try{l(r.throw(e))}catch(e){i(e)}}function l(e){e.done?o(e.value):new n(function(t){t(e.value)}).then(a,s)}l((r=r.apply(e,t||[])).next())})}function f(e,t){var n,r,o,i,a={label:0,sent:function(){if(1&o[0])throw o[1];return o[1]},trys:[],ops:[]};return i={next:s(0),throw:s(1),return:s(2)},"function"==typeof Symbol&&(i[Symbol.iterator]=function(){return this}),i;function s(i){return function(s){return function(i){if(n)throw new TypeError("Generator is already executing.");for(;a;)try{if(n=1,r&&(o=2&i[0]?r.return:i[0]?r.throw||((o=r.return)&&o.call(r),0):r.next)&&!(o=o.call(r,i[1])).done)return o;switch(r=0,o&&(i=[2&i[0],o.value]),i[0]){case 0:case 1:o=i;break;case 4:return a.label++,{value:i[1],done:!1};case 5:a.label++,r=i[1],i=[0];continue;case 7:i=a.ops.pop(),a.trys.pop();continue;default:if(!(o=(o=a.trys).length>0&&o[o.length-1])&&(6===i[0]||2===i[0])){a=0;continue}if(3===i[0]&&(!o||i[1]>o[0]&&i[1]=e.length&&(e=void 0),{value:e&&e[n++],done:!e}}}}function h(e,t){var n="function"==typeof Symbol&&e[Symbol.iterator];if(!n)return e;var r,o,i=n.call(e),a=[];try{for(;(void 0===t||t-- >0)&&!(r=i.next()).done;)a.push(r.value)}catch(e){o={error:e}}finally{try{r&&!r.done&&(n=i.return)&&n.call(i)}finally{if(o)throw o.error}}return a}function m(){for(var e=[],t=0;t1||s(e,t)})})}function s(e,t){try{(n=o[e](t)).value instanceof v?Promise.resolve(n.value.v).then(l,u):c(i[0][2],n)}catch(e){c(i[0][3],e)}var n}function l(e){s("next",e)}function u(e){s("throw",e)}function c(e,t){e(t),i.shift(),i.length&&s(i[0][0],i[0][1])}}function y(e){var t,n;return t={},r("next"),r("throw",function(e){throw e}),r("return"),t[Symbol.iterator]=function(){return this},t;function r(r,o){t[r]=e[r]?function(t){return(n=!n)?{value:v(e[r](t)),done:"return"===r}:o?o(t):t}:o}}function b(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t,n=e[Symbol.asyncIterator];return n?n.call(e):(e=d(e),t={},r("next"),r("throw"),r("return"),t[Symbol.asyncIterator]=function(){return this},t);function r(n){t[n]=e[n]&&function(t){return new Promise(function(r,o){(function(e,t,n,r){Promise.resolve(r).then(function(t){e({value:t,done:n})},t)})(r,o,(t=e[n](t)).done,t.value)})}}}function w(e,t){return Object.defineProperty?Object.defineProperty(e,"raw",{value:t}):e.raw=t,e}function x(e){if(e&&e.__esModule)return e;var t={};if(null!=e)for(var n in e)Object.hasOwnProperty.call(e,n)&&(t[n]=e[n]);return t.default=e,t}function k(e){return e&&e.__esModule?e:{default:e}}},function(e,t,n){"use strict";(function(e,r){n.d(t,"a",function(){return tn}),n.d(t,"b",function(){return Fe}),n.d(t,"o",function(){return Ee}),n.d(t,"g",function(){return L}),n.d(t,"n",function(){return Ke}),n.d(t,"k",function(){return Ht}),n.d(t,"i",function(){return Pt}),n.d(t,"j",function(){return Lt}),n.d(t,"l",function(){return ee}),n.d(t,"e",function(){return oe}),n.d(t,"m",function(){return ct}),n.d(t,"d",function(){return Ze}),n.d(t,"f",function(){return it}),n.d(t,"h",function(){return Gt}),n.d(t,"c",function(){return le}); +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. + +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +var o=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n])};function i(e,t){function n(){this.constructor=e}o(e,t),e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)}var a=Object.assign||function(e){for(var t,n=1,r=arguments.length;n0)&&!(r=i.next()).done;)a.push(r.value)}catch(e){o={error:e}}finally{try{r&&!r.done&&(n=i.return)&&n.call(i)}finally{if(o)throw o.error}}return a}function l(){for(var e=[],t=0;t2&&te("box");var n=V(t);return new fe(e,Q(n),n.name,!0,n.equals)},shallowBox:function(e,t){return arguments.length>2&&te("shallowBox"),ee.box(e,{name:t,deep:!1})},array:function(e,t){arguments.length>2&&te("array");var n=V(t);return new Ot(e,Q(n),n.name)},shallowArray:function(e,t){return arguments.length>2&&te("shallowArray"),ee.array(e,{name:t,deep:!1})},map:function(e,t){arguments.length>2&&te("map");var n=V(t);return new Rt(e,Q(n),n.name)},shallowMap:function(e,t){return arguments.length>2&&te("shallowMap"),ee.map(e,{name:t,deep:!1})},set:function(e,t){arguments.length>2&&te("set");var n=V(t);return new Dt(e,Q(n),n.name)},object:function(e,t,n){return"string"==typeof arguments[1]&&te("object"),at({},e,t,V(n))},shallowObject:function(e,t){return"string"==typeof arguments[1]&&te("shallowObject"),ee.object(e,{},{name:t,deep:!1})},ref:X,shallow:K,deep:G,struct:J},ee=function(e,t,n){if("string"==typeof arguments[1])return G.apply(null,arguments);if(ut(e))return e;var r=b(e)?ee.object(e,t,n):Array.isArray(e)?ee.array(e,t):E(e)?ee.map(e,t):_(e)?ee.set(e,t):e;if(r!==e)return r;h(!1)};function te(e){h("Expected one or two arguments to observable."+e+". Did you accidentally try to use observable."+e+" as decorator?")}Object.keys(Z).forEach(function(e){return ee[e]=Z[e]});var ne=z(!1,function(e,t,n,r,o){var i=n.get,s=n.set,l=o[0]||{};!function(e,t,n){var r=Bt(e);n.name=r.name+"."+t,n.context=e,r.values[t]=new pe(n),Object.defineProperty(e,t,function(e){return Wt[e]||(Wt[e]={configurable:!1,enumerable:!1,get:function(){return qt(this).read(this,e)},set:function(t){qt(this).write(this,e,t)}})}(t))}(e,t,a({get:i,set:s},l))}),re=ne({equals:D.structural}),oe=function(e,t,n){if("string"==typeof t)return ne.apply(null,arguments);if(null!==e&&"object"==typeof e&&1===arguments.length)return ne.apply(null,arguments);var r="object"==typeof t?t:{};return r.get=e,r.set="function"==typeof t?t:r.set,r.name=r.name||e.name||"",new pe(r)};function ie(e,t){var n=function(){return ae(e,t,this,arguments)};return n.isMobxAction=!0,n}function ae(e,t,n,r){var o=function(e,t,n,r){var o=Ye()&&!!e,i=0;if(o){i=Date.now();var a=r&&r.length||0,s=new Array(a);if(a>0)for(var l=0;l0;je.computationDepth>0&&t&&h(!1),je.allowStateChanges||!t&&"strict"!==je.enforceActions||h(!1)}function xe(e,t,n){Oe(e),e.newObserving=new Array(e.observing.length+100),e.unboundDepsCount=0,e.runId=++je.runId;var r,o=je.trackingDerivation;if(je.trackingDerivation=e,!0===je.disableErrorBoundaries)r=t.call(n);else try{r=t.call(n)}catch(e){r=new ge(e)}return je.trackingDerivation=o,function(e){for(var t=e.observing,n=e.observing=e.newObserving,r=de.UP_TO_DATE,o=0,i=e.unboundDepsCount,a=0;ar&&(r=s.dependenciesState)}n.length=o,e.newObserving=null,i=t.length;for(;i--;){var s=t[i];0===s.diffValue&&Pe(s,e),s.diffValue=0}for(;o--;){var s=n[o];1===s.diffValue&&(s.diffValue=0,Ie(s,e))}r!==de.UP_TO_DATE&&(e.dependenciesState=r,e.onBecomeStale())}(e),r}function ke(e){var t=e.observing;e.observing=[];for(var n=t.length;n--;)Pe(t[n],e);e.dependenciesState=de.NOT_TRACKING}function Ee(e){var t=_e(),n=e();return Se(t),n}function _e(){var e=je.trackingDerivation;return je.trackingDerivation=null,e}function Se(e){je.trackingDerivation=e}function Oe(e){if(e.dependenciesState!==de.UP_TO_DATE){e.dependenciesState=de.UP_TO_DATE;for(var t=e.observing,n=t.length;n--;)t[n].lowestObserverState=de.UP_TO_DATE}}var Te=function(){return function(){this.version=5,this.UNCHANGED={},this.trackingDerivation=null,this.computationDepth=0,this.runId=0,this.mobxGuid=0,this.inBatch=0,this.pendingUnobservations=[],this.pendingReactions=[],this.isRunningReactions=!1,this.allowStateChanges=!0,this.enforceActions=!1,this.spyListeners=[],this.globalReactionErrorHandlers=[],this.computedRequiresReaction=!1,this.disableErrorBoundaries=!1,this.suppressReactionErrors=!1}}(),Ce=!0,Ae=!1,je=function(){var e=p();return e.__mobxInstanceCount>0&&!e.__mobxGlobals&&(Ce=!1),e.__mobxGlobals&&e.__mobxGlobals.version!==(new Te).version&&(Ce=!1),Ce?e.__mobxGlobals?(e.__mobxInstanceCount+=1,e.__mobxGlobals.UNCHANGED||(e.__mobxGlobals.UNCHANGED={}),e.__mobxGlobals):(e.__mobxInstanceCount=1,e.__mobxGlobals=new Te):(setTimeout(function(){Ae||h("There are multiple, different versions of MobX active. Make sure MobX is loaded only once or use `configure({ isolateGlobalState: true })`")},1),new Te)}();function Ie(e,t){var n=e.observers.length;n&&(e.observersIndexes[t.__mapid]=n),e.observers[n]=t,e.lowestObserverState>t.dependenciesState&&(e.lowestObserverState=t.dependenciesState)}function Pe(e,t){if(1===e.observers.length)e.observers.length=0,Ne(e);else{var n=e.observers,r=e.observersIndexes,o=n.pop();if(o!==t){var i=r[t.__mapid]||0;i?r[o.__mapid]=i:delete r[o.__mapid],n[i]=o}delete r[t.__mapid]}}function Ne(e){!1===e.isPendingUnobservation&&(e.isPendingUnobservation=!0,je.pendingUnobservations.push(e))}function Re(){je.inBatch++}function Le(){if(0==--je.inBatch){ze();for(var e=je.pendingUnobservations,t=0;t0&&Ne(e),!1)}function De(e,t){if(console.log("[mobx.trace] '"+e.name+"' is invalidated due to a change in: '"+t.name+"'"),e.isTracing===me.BREAK){var n=[];!function e(t,n,r){if(n.length>=1e3)return void n.push("(and many more)");n.push(""+new Array(r).join("\t")+t.name);t.dependencies&&t.dependencies.forEach(function(t){return e(t,n,r+1)})}((r=e,st(Vt(r,o))),n,1),new Function("debugger;\n/*\nTracing '"+e.name+"'\n\nYou are entering this break point because derivation '"+e.name+"' is being traced and '"+t.name+"' is now forcing it to update.\nJust follow the stacktrace you should now see in the devtools to see precisely what piece of your code is causing this update\nThe stackframe you are looking for is at least ~6-8 stack-frames up.\n\n"+(e instanceof pe?e.derivation.toString().replace(/[*]\//g,"/"):"")+"\n\nThe dependencies for this derivation are:\n\n"+n.join("\n")+"\n*/\n ")()}var r,o}var Fe=function(){function e(e,t,n){void 0===e&&(e="Reaction@"+d()),this.name=e,this.onInvalidate=t,this.errorHandler=n,this.observing=[],this.newObserving=[],this.dependenciesState=de.NOT_TRACKING,this.diffValue=0,this.runId=0,this.unboundDepsCount=0,this.__mapid="#"+d(),this.isDisposed=!1,this._isScheduled=!1,this._isTrackPending=!1,this._isRunning=!1,this.isTracing=me.NONE}return e.prototype.onBecomeStale=function(){this.schedule()},e.prototype.schedule=function(){this._isScheduled||(this._isScheduled=!0,je.pendingReactions.push(this),ze())},e.prototype.isScheduled=function(){return this._isScheduled},e.prototype.runReaction=function(){if(!this.isDisposed){if(Re(),this._isScheduled=!1,be(this)){this._isTrackPending=!0;try{this.onInvalidate(),this._isTrackPending&&Ye()&&He({name:this.name,type:"scheduled-reaction"})}catch(e){this.reportExceptionInDerivation(e)}}Le()}},e.prototype.track=function(e){Re();var t,n=Ye();n&&(t=Date.now(),Ve({name:this.name,type:"reaction"})),this._isRunning=!0;var r=xe(this,e,void 0);this._isRunning=!1,this._isTrackPending=!1,this.isDisposed&&ke(this),ye(r)&&this.reportExceptionInDerivation(r.cause),n&&Ge({time:Date.now()-t}),Le()},e.prototype.reportExceptionInDerivation=function(e){var t=this;if(this.errorHandler)this.errorHandler(e,this);else{if(je.disableErrorBoundaries)throw e;var n="[mobx] Encountered an uncaught exception that was thrown by a reaction or observer component, in: '"+this+"'";je.suppressReactionErrors?console.warn("[mobx] (error in reaction '"+this.name+"' suppressed, fix error of causing action below)"):console.error(n,e),Ye()&&He({type:"error",name:this.name,message:n,error:""+e}),je.globalReactionErrorHandlers.forEach(function(n){return n(e,t)})}},e.prototype.dispose=function(){this.isDisposed||(this.isDisposed=!0,this._isRunning||(Re(),ke(this),Le()))},e.prototype.getDisposer=function(){var e=this.dispose.bind(this);return e.$mobx=this,e},e.prototype.toString=function(){return"Reaction["+this.name+"]"},e.prototype.trace=function(e){void 0===e&&(e=!1),function(){for(var e=[],t=0;t0||je.isRunningReactions||Be($e)}function $e(){je.isRunningReactions=!0;for(var e=je.pendingReactions,t=0;e.length>0;){++t===Ue&&(console.error("Reaction doesn't converge to a stable state after "+Ue+" iterations. Probably there is a cycle in the reactive function: "+e[0]),e.splice(0));for(var n=e.splice(0),r=0,o=n.length;r",e):2===arguments.length&&"function"==typeof t?ie(e,t):1===arguments.length&&"string"==typeof e?Je(e):!0!==r?Je(t).apply(null,arguments):void(e[t]=ie(e.name||t,n.value))};function et(e,t,n){w(e,t,ie(t,n.bind(e)))}function tt(e,t){void 0===t&&(t=f);var n,r=t&&t.name||e.name||"Autorun@"+d();if(!t.scheduler&&!t.delay)n=new Fe(r,function(){this.track(a)},t.onError);else{var o=rt(t),i=!1;n=new Fe(r,function(){i||(i=!0,o(function(){i=!1,n.isDisposed||n.track(a)}))},t.onError)}function a(){e(n)}return n.schedule(),n.getDisposer()}Ze.bound=function(e,t,n,r){return!0===r?(et(e,t,n.value),null):n?{configurable:!0,enumerable:!1,get:function(){return et(this,t,n.value||n.initializer.call(this)),this[t]},set:Xe}:{enumerable:!1,configurable:!0,set:function(e){et(this,t,e)},get:function(){}}};var nt=function(e){return e()};function rt(e){return e.scheduler?e.scheduler:e.delay?function(t){return setTimeout(t,e.delay)}:nt}function ot(e,t,n,r){var o="string"==typeof n?Vt(t,n):Vt(t),i="string"==typeof n?r:n,a=o[e];return"function"!=typeof a?h(!1):(o[e]=function(){a.call(this),i.call(this)},function(){o[e]=a})}function it(e){var t=e.enforceActions,n=e.computedRequiresReaction,r=e.disableErrorBoundaries,o=e.arrayBuffer,i=e.reactionScheduler;if(!0===e.isolateGlobalState&&((je.pendingReactions.length||je.inBatch||je.isRunningReactions)&&h("isolateGlobalState should be called before MobX is running any reactions"),Ae=!0,Ce&&(0==--p().__mobxInstanceCount&&(p().__mobxGlobals=void 0),je=new Te)),void 0!==t){var a=void 0;switch(t){case!0:case"observed":a=!0;break;case!1:case"never":a=!1;break;case"strict":case"always":a="strict";break;default:h("Invalid value for 'enforceActions': '"+t+"', expected 'never', 'always' or 'observed'")}je.enforceActions=a,je.allowStateChanges=!0!==a&&"strict"!==a}void 0!==n&&(je.computedRequiresReaction=!!n),void 0!==r&&(!0===r&&console.warn("WARNING: Debug feature only. MobX will NOT recover from errors if this is on."),je.disableErrorBoundaries=!!r),"number"==typeof o&&jt(o),i&&qe(i)}function at(e,t,n,r){var o=(r=V(r)).defaultDecorator||(!1===r.deep?X:G);B(e),Bt(e,r.name,o.enhancer),Re();try{for(var i in t){var a=Object.getOwnPropertyDescriptor(t,i);0;var s=(n&&i in n?n[i]:a.get?ne:o)(e,i,a,!0);s&&Object.defineProperty(e,i,s)}}finally{Le()}return e}function st(e){var t,n,r={name:e.name};return e.observing&&e.observing.length>0&&(r.dependencies=(t=e.observing,n=[],t.forEach(function(e){-1===n.indexOf(e)&&n.push(e)}),n).map(st)),r}function lt(e,t){if(null==e)return!1;if(void 0!==t){if(Ht(e)){var n=e.$mobx;return n.values&&!!n.values[t]}return!1}return Ht(e)||!!e.$mobx||R(e)||We(e)||ve(e)}function ut(e){return 1!==arguments.length&&h(!1),lt(e)}function ct(e,t,n,r){return"function"==typeof n?function(e,t,n,r){return Qt(e,t).observe(n,r)}(e,t,n,r):function(e,t,n){return Qt(e).observe(t,n)}(e,t,n)}function ft(e,t){void 0===t&&(t=void 0),Re();try{return e.apply(t)}finally{Le()}}function pt(e){return void 0!==e.interceptors&&e.interceptors.length>0}function dt(e,t){var n=e.interceptors||(e.interceptors=[]);return n.push(t),v(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function ht(e,t){var n=_e();try{var r=e.interceptors;if(r)for(var o=0,i=r.length;o0}function vt(e,t){var n=e.changeListeners||(e.changeListeners=[]);return n.push(t),v(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function gt(e,t){var n=_e(),r=e.changeListeners;if(r){for(var o=0,i=(r=r.slice()).length;o0?e.map(this.dehancer):e},e.prototype.intercept=function(e){return dt(this,e)},e.prototype.observe=function(e,t){return void 0===t&&(t=!1),t&&e({object:this.array,type:"splice",index:0,added:this.values.slice(),addedCount:this.values.length,removed:[],removedCount:0}),vt(this,e)},e.prototype.getArrayLength=function(){return this.atom.reportObserved(),this.values.length},e.prototype.setArrayLength=function(e){if("number"!=typeof e||e<0)throw new Error("[mobx.array] Out of range: "+e);var t=this.values.length;if(e!==t)if(e>t){for(var n=new Array(e-t),r=0;r0&&e+t+1>Et&&jt(e+t+1)},e.prototype.spliceWithArray=function(e,t,n){var r=this;we(this.atom);var o=this.values.length;if(void 0===e?e=0:e>o?e=o:e<0&&(e=Math.max(0,o+e)),t=1===arguments.length?o-e:null==t?0:Math.max(0,Math.min(t,o-e)),void 0===n&&(n=c),pt(this)){var i=ht(this,{object:this.array,type:"splice",index:e,removedCount:t,added:n});if(!i)return c;t=i.removedCount,n=i.added}var a=(n=0===n.length?n:n.map(function(e){return r.enhancer(e,void 0)})).length-t;this.updateArrayLength(o,a);var s=this.spliceItemsIntoValues(e,t,n);return 0===t&&0===n.length||this.notifyArraySplice(e,n,s),this.dehanceValues(s)},e.prototype.spliceItemsIntoValues=function(e,t,n){var r;if(n.length<1e4)return(r=this.values).splice.apply(r,l([e,t],n));var o=this.values.slice(e,e+t);return this.values=this.values.slice(0,e).concat(n,this.values.slice(e+t)),o},e.prototype.notifyArrayChildUpdate=function(e,t,n){var r=!this.owned&&Ye(),o=mt(this),i=o||r?{object:this.array,type:"update",index:e,newValue:t,oldValue:n}:null;r&&Ve(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&>(this,i),r&&Ge()},e.prototype.notifyArraySplice=function(e,t,n){var r=!this.owned&&Ye(),o=mt(this),i=o||r?{object:this.array,type:"splice",index:e,removed:n,added:t,removedCount:n.length,addedCount:t.length}:null;r&&Ve(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&>(this,i),r&&Ge()},e}(),Ot=function(e){function t(t,n,r,o){void 0===r&&(r="ObservableArray@"+d()),void 0===o&&(o=!1);var i=e.call(this)||this,a=new St(r,n,i,o);if(x(i,"$mobx",a),t&&t.length){var s=ue(!0);i.spliceWithArray(0,0,t),ce(s)}return kt&&Object.defineProperty(a.array,"0",Tt),i}return i(t,e),t.prototype.intercept=function(e){return this.$mobx.intercept(e)},t.prototype.observe=function(e,t){return void 0===t&&(t=!1),this.$mobx.observe(e,t)},t.prototype.clear=function(){return this.splice(0)},t.prototype.concat=function(){for(var e=[],t=0;t-1&&(this.splice(t,1),!0)},t.prototype.move=function(e,t){function n(e){if(e<0)throw new Error("[mobx.array] Index out of bounds: "+e+" is negative");var t=this.$mobx.values.length;if(e>=t)throw new Error("[mobx.array] Index out of bounds: "+e+" is not smaller than "+t)}if(n.call(this,e),n.call(this,t),e!==t){var r,o=this.$mobx.values;r=e=t.length||"\n"===t[o]?r+"\n":r}},{key:"atDocumentBoundary",value:function(e,t,n){var r=e[t];if(!r)return!0;var o=e[t-1];if(o&&"\n"!==o)return!1;if(n){if(r!==n)return!1}else if(r!==c.DIRECTIVES_END&&r!==c.DOCUMENT_END)return!1;var i=e[t+1],a=e[t+2];if(i!==r||a!==r)return!1;var s=e[t+3];return!s||"\n"===s||"\t"===s||" "===s}},{key:"endOfIdentifier",value:function(e,t){for(var n=e[t],r="<"===n,o=r?["\n","\t"," ",">"]:["\n","\t"," ","[","]","{","}",","];n&&-1===o.indexOf(n);)n=e[t+=1];return r&&">"===n&&(t+=1),t}},{key:"endOfIndent",value:function(e,t){for(var n=e[t];" "===n;)n=e[t+=1];return t}},{key:"endOfLine",value:function(e,t){for(var n=e[t];n&&"\n"!==n;)n=e[t+=1];return t}},{key:"endOfWhiteSpace",value:function(e,t){for(var n=e[t];"\t"===n||" "===n;)n=e[t+=1];return t}},{key:"startOfLine",value:function(e,t){var n=e[t-1];if("\n"===n)return t;for(;n&&"\n"!==n;)n=e[t-=1];return t+1}},{key:"endOfBlockIndent",value:function(t,n,r){var o=e.endOfIndent(t,r);if(o>r+n)return o;var i=e.endOfWhiteSpace(t,o),a=t[i];return a&&"\n"!==a?null:i}},{key:"atBlank",value:function(e,t,n){var r=e[t];return"\n"===r||"\t"===r||" "===r||n&&!r}},{key:"atCollectionItem",value:function(t,n){var r=t[n];return("?"===r||":"===r||"-"===r)&&e.atBlank(t,n+1,!0)}},{key:"nextNodeIsIndented",value:function(e,t,n){return!(!e||t<0)&&(t>0||n&&"-"===e)}},{key:"normalizeOffset",value:function(t,n){var r=t[n];return r?"\n"!==r&&"\n"===t[n-1]?n-1:e.endOfWhiteSpace(t,n):n}},{key:"foldNewline",value:function(t,n,r){for(var o=0,i=!1,a="",s=t[n+1];" "===s||"\t"===s||"\n"===s;){switch(s){case"\n":o=0,n+=1,a+="\n";break;case"\t":o<=r&&(i=!0),n=e.endOfWhiteSpace(t,n+2)-1;break;case" ":o+=1,n+=1}s=t[n+1]}return a||(a=" "),s&&o<=r&&(i=!0),{fold:a,offset:n,error:i}}}]),(0,a.default)(e,[{key:"getPropValue",value:function(e,t,n){if(!this.context)return null;var r=this.context.src,o=this.props[e];return o&&r[o.start]===t?r.slice(o.start+(n?1:0),o.end):null}},{key:"commentHasRequiredWhitespace",value:function(t){var n=this.context.src;if(this.header&&t===this.header.end)return!1;if(!this.valueRange)return!1;var r=this.valueRange.end;return t!==r||e.atBlank(n,r-1)}},{key:"parseComment",value:function(t){var n=this.context.src;if(n[t]===c.COMMENT){var r=e.endOfLine(n,t+1),o=new l.default(t,r);return this.props.push(o),r}return t}},{key:"setOrigRanges",value:function(e,t){return this.range&&(t=this.range.setOrigRange(e,t)),this.valueRange&&this.valueRange.setOrigRange(e,t),this.props.forEach(function(n){return n.setOrigRange(e,t)}),t}},{key:"toString",value:function(){var t=this.context.src,n=this.range,r=this.value;if(null!=r)return r;var o=t.slice(n.start,n.end);return e.addStringTerminator(t,n.end,o)}},{key:"anchor",get:function(){for(var e=0;e0?e.join("\n"):null}},{key:"hasComment",get:function(){if(this.context)for(var e=this.context.src,t=0;t * @license MIT */ -var r=n(191),o=n(192),i=n(101);function a(){return l.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function s(e,t){if(a()=a())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+a().toString(16)+" bytes");return 0|e}function h(e,t){if(l.isBuffer(e))return e.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(e)||e instanceof ArrayBuffer))return e.byteLength;"string"!=typeof e&&(e=""+e);var n=e.length;if(0===n)return 0;for(var r=!1;;)switch(t){case"ascii":case"latin1":case"binary":return n;case"utf8":case"utf-8":case void 0:return B(e).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*n;case"hex":return n>>>1;case"base64":return z(e).length;default:if(r)return B(e).length;t=(""+t).toLowerCase(),r=!0}}function m(e,t,n){var r=e[t];e[t]=e[n],e[n]=r}function g(e,t,n,r,o){if(0===e.length)return-1;if("string"==typeof n?(r=n,n=0):n>2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),n=+n,isNaN(n)&&(n=o?0:e.length-1),n<0&&(n=e.length+n),n>=e.length){if(o)return-1;n=e.length-1}else if(n<0){if(!o)return-1;n=0}if("string"==typeof t&&(t=l.from(t,r)),l.isBuffer(t))return 0===t.length?-1:y(e,t,n,r,o);if("number"==typeof t)return t&=255,l.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?o?Uint8Array.prototype.indexOf.call(e,t,n):Uint8Array.prototype.lastIndexOf.call(e,t,n):y(e,[t],n,r,o);throw new TypeError("val must be string, number or Buffer")}function y(e,t,n,r,o){var i,a=1,s=e.length,l=t.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(e.length<2||t.length<2)return-1;a=2,s/=2,l/=2,n/=2}function c(e,t){return 1===a?e[t]:e.readUInt16BE(t*a)}if(o){var u=-1;for(i=n;is&&(n=s-l),i=n;i>=0;i--){for(var p=!0,f=0;fo&&(r=o):r=o;var i=t.length;if(i%2!=0)throw new TypeError("Invalid hex string");r>i/2&&(r=i/2);for(var a=0;a>8,o=n%256,i.push(o),i.push(r);return i}(t,e.length-n),e,n,r)}function S(e,t,n){return 0===t&&n===e.length?r.fromByteArray(e):r.fromByteArray(e.slice(t,n))}function _(e,t,n){n=Math.min(e.length,n);for(var r=[],o=t;o239?4:c>223?3:c>191?2:1;if(o+p<=n)switch(p){case 1:c<128&&(u=c);break;case 2:128==(192&(i=e[o+1]))&&(l=(31&c)<<6|63&i)>127&&(u=l);break;case 3:i=e[o+1],a=e[o+2],128==(192&i)&&128==(192&a)&&(l=(15&c)<<12|(63&i)<<6|63&a)>2047&&(l<55296||l>57343)&&(u=l);break;case 4:i=e[o+1],a=e[o+2],s=e[o+3],128==(192&i)&&128==(192&a)&&128==(192&s)&&(l=(15&c)<<18|(63&i)<<12|(63&a)<<6|63&s)>65535&&l<1114112&&(u=l)}null===u?(u=65533,p=1):u>65535&&(u-=65536,r.push(u>>>10&1023|55296),u=56320|1023&u),r.push(u),o+=p}return function(e){var t=e.length;if(t<=O)return String.fromCharCode.apply(String,e);var n="",r=0;for(;rthis.length)return"";if((void 0===n||n>this.length)&&(n=this.length),n<=0)return"";if((n>>>=0)<=(t>>>=0))return"";for(e||(e="utf8");;)switch(e){case"hex":return j(this,t,n);case"utf8":case"utf-8":return _(this,t,n);case"ascii":return C(this,t,n);case"latin1":case"binary":return T(this,t,n);case"base64":return S(this,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return A(this,t,n);default:if(r)throw new TypeError("Unknown encoding: "+e);e=(e+"").toLowerCase(),r=!0}}.apply(this,arguments)},l.prototype.equals=function(e){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");return this===e||0===l.compare(this,e)},l.prototype.inspect=function(){var e="",n=t.INSPECT_MAX_BYTES;return this.length>0&&(e=this.toString("hex",0,n).match(/.{2}/g).join(" "),this.length>n&&(e+=" ... ")),""},l.prototype.compare=function(e,t,n,r,o){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");if(void 0===t&&(t=0),void 0===n&&(n=e?e.length:0),void 0===r&&(r=0),void 0===o&&(o=this.length),t<0||n>e.length||r<0||o>this.length)throw new RangeError("out of range index");if(r>=o&&t>=n)return 0;if(r>=o)return-1;if(t>=n)return 1;if(this===e)return 0;for(var i=(o>>>=0)-(r>>>=0),a=(n>>>=0)-(t>>>=0),s=Math.min(i,a),c=this.slice(r,o),u=e.slice(t,n),p=0;po)&&(n=o),e.length>0&&(n<0||t<0)||t>this.length)throw new RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var i=!1;;)switch(r){case"hex":return v(this,e,t,n);case"utf8":case"utf-8":return b(this,e,t,n);case"ascii":return w(this,e,t,n);case"latin1":case"binary":return x(this,e,t,n);case"base64":return k(this,e,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return E(this,e,t,n);default:if(i)throw new TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),i=!0}},l.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};var O=4096;function C(e,t,n){var r="";n=Math.min(e.length,n);for(var o=t;or)&&(n=r);for(var o="",i=t;in)throw new RangeError("Trying to access beyond buffer length")}function P(e,t,n,r,o,i){if(!l.isBuffer(e))throw new TypeError('"buffer" argument must be a Buffer instance');if(t>o||te.length)throw new RangeError("Index out of range")}function R(e,t,n,r){t<0&&(t=65535+t+1);for(var o=0,i=Math.min(e.length-n,2);o>>8*(r?o:1-o)}function N(e,t,n,r){t<0&&(t=4294967295+t+1);for(var o=0,i=Math.min(e.length-n,4);o>>8*(r?o:3-o)&255}function L(e,t,n,r,o,i){if(n+r>e.length)throw new RangeError("Index out of range");if(n<0)throw new RangeError("Index out of range")}function M(e,t,n,r,i){return i||L(e,0,n,4),o.write(e,t,n,r,23,4),n+4}function D(e,t,n,r,i){return i||L(e,0,n,8),o.write(e,t,n,r,52,8),n+8}l.prototype.slice=function(e,t){var n,r=this.length;if((e=~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),(t=void 0===t?r:~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),t0&&(o*=256);)r+=this[e+--t]*o;return r},l.prototype.readUInt8=function(e,t){return t||I(e,1,this.length),this[e]},l.prototype.readUInt16LE=function(e,t){return t||I(e,2,this.length),this[e]|this[e+1]<<8},l.prototype.readUInt16BE=function(e,t){return t||I(e,2,this.length),this[e]<<8|this[e+1]},l.prototype.readUInt32LE=function(e,t){return t||I(e,4,this.length),(this[e]|this[e+1]<<8|this[e+2]<<16)+16777216*this[e+3]},l.prototype.readUInt32BE=function(e,t){return t||I(e,4,this.length),16777216*this[e]+(this[e+1]<<16|this[e+2]<<8|this[e+3])},l.prototype.readIntLE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=this[e],o=1,i=0;++i=(o*=128)&&(r-=Math.pow(2,8*t)),r},l.prototype.readIntBE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=t,o=1,i=this[e+--r];r>0&&(o*=256);)i+=this[e+--r]*o;return i>=(o*=128)&&(i-=Math.pow(2,8*t)),i},l.prototype.readInt8=function(e,t){return t||I(e,1,this.length),128&this[e]?-1*(255-this[e]+1):this[e]},l.prototype.readInt16LE=function(e,t){t||I(e,2,this.length);var n=this[e]|this[e+1]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt16BE=function(e,t){t||I(e,2,this.length);var n=this[e+1]|this[e]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt32LE=function(e,t){return t||I(e,4,this.length),this[e]|this[e+1]<<8|this[e+2]<<16|this[e+3]<<24},l.prototype.readInt32BE=function(e,t){return t||I(e,4,this.length),this[e]<<24|this[e+1]<<16|this[e+2]<<8|this[e+3]},l.prototype.readFloatLE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!0,23,4)},l.prototype.readFloatBE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!1,23,4)},l.prototype.readDoubleLE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!0,52,8)},l.prototype.readDoubleBE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!1,52,8)},l.prototype.writeUIntLE=function(e,t,n,r){(e=+e,t|=0,n|=0,r)||P(this,e,t,n,Math.pow(2,8*n)-1,0);var o=1,i=0;for(this[t]=255&e;++i=0&&(i*=256);)this[t+o]=e/i&255;return t+n},l.prototype.writeUInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,255,0),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),this[t]=255&e,t+1},l.prototype.writeUInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):R(this,e,t,!0),t+2},l.prototype.writeUInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):R(this,e,t,!1),t+2},l.prototype.writeUInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t+3]=e>>>24,this[t+2]=e>>>16,this[t+1]=e>>>8,this[t]=255&e):N(this,e,t,!0),t+4},l.prototype.writeUInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):N(this,e,t,!1),t+4},l.prototype.writeIntLE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=0,a=1,s=0;for(this[t]=255&e;++i>0)-s&255;return t+n},l.prototype.writeIntBE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=n-1,a=1,s=0;for(this[t+i]=255&e;--i>=0&&(a*=256);)e<0&&0===s&&0!==this[t+i+1]&&(s=1),this[t+i]=(e/a>>0)-s&255;return t+n},l.prototype.writeInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,127,-128),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),e<0&&(e=255+e+1),this[t]=255&e,t+1},l.prototype.writeInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):R(this,e,t,!0),t+2},l.prototype.writeInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):R(this,e,t,!1),t+2},l.prototype.writeInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8,this[t+2]=e>>>16,this[t+3]=e>>>24):N(this,e,t,!0),t+4},l.prototype.writeInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),e<0&&(e=4294967295+e+1),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):N(this,e,t,!1),t+4},l.prototype.writeFloatLE=function(e,t,n){return M(this,e,t,!0,n)},l.prototype.writeFloatBE=function(e,t,n){return M(this,e,t,!1,n)},l.prototype.writeDoubleLE=function(e,t,n){return D(this,e,t,!0,n)},l.prototype.writeDoubleBE=function(e,t,n){return D(this,e,t,!1,n)},l.prototype.copy=function(e,t,n,r){if(n||(n=0),r||0===r||(r=this.length),t>=e.length&&(t=e.length),t||(t=0),r>0&&r=this.length)throw new RangeError("sourceStart out of bounds");if(r<0)throw new RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),e.length-t=0;--o)e[o+t]=this[o+n];else if(i<1e3||!l.TYPED_ARRAY_SUPPORT)for(o=0;o>>=0,n=void 0===n?this.length:n>>>0,e||(e=0),"number"==typeof e)for(i=t;i55295&&n<57344){if(!o){if(n>56319){(t-=3)>-1&&i.push(239,191,189);continue}if(a+1===r){(t-=3)>-1&&i.push(239,191,189);continue}o=n;continue}if(n<56320){(t-=3)>-1&&i.push(239,191,189),o=n;continue}n=65536+(o-55296<<10|n-56320)}else o&&(t-=3)>-1&&i.push(239,191,189);if(o=null,n<128){if((t-=1)<0)break;i.push(n)}else if(n<2048){if((t-=2)<0)break;i.push(n>>6|192,63&n|128)}else if(n<65536){if((t-=3)<0)break;i.push(n>>12|224,n>>6&63|128,63&n|128)}else{if(!(n<1114112))throw new Error("Invalid code point");if((t-=4)<0)break;i.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}}return i}function z(e){return r.toByteArray(function(e){if((e=function(e){return e.trim?e.trim():e.replace(/^\s+|\s+$/g,"")}(e).replace(F,"")).length<2)return"";for(;e.length%4!=0;)e+="=";return e}(e))}function $(e,t,n,r){for(var o=0;o=t.length||o>=e.length);++o)t[o+n]=e[o];return o}}).call(this,n(4))},function(e,t,n){"use strict";var r=n(184),o=n(186);function i(){this.protocol=null,this.slashes=null,this.auth=null,this.host=null,this.port=null,this.hostname=null,this.hash=null,this.search=null,this.query=null,this.pathname=null,this.path=null,this.href=null}t.parse=b,t.resolve=function(e,t){return b(e,!1,!0).resolve(t)},t.resolveObject=function(e,t){return e?b(e,!1,!0).resolveObject(t):t},t.format=function(e){o.isString(e)&&(e=b(e));return e instanceof i?e.format():i.prototype.format.call(e)},t.Url=i;var a=/^([a-z0-9.+-]+:)/i,s=/:[0-9]*$/,l=/^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/,c=["{","}","|","\\","^","`"].concat(["<",">",'"',"`"," ","\r","\n","\t"]),u=["'"].concat(c),p=["%","/","?",";","#"].concat(u),f=["/","?","#"],d=/^[+a-z0-9A-Z_-]{0,63}$/,h=/^([+a-z0-9A-Z_-]{0,63})(.*)$/,m={javascript:!0,"javascript:":!0},g={javascript:!0,"javascript:":!0},y={http:!0,https:!0,ftp:!0,gopher:!0,file:!0,"http:":!0,"https:":!0,"ftp:":!0,"gopher:":!0,"file:":!0},v=n(187);function b(e,t,n){if(e&&o.isObject(e)&&e instanceof i)return e;var r=new i;return r.parse(e,t,n),r}i.prototype.parse=function(e,t,n){if(!o.isString(e))throw new TypeError("Parameter 'url' must be a string, not "+typeof e);var i=e.indexOf("?"),s=-1!==i&&i127?R+="x":R+=P[N];if(!R.match(d)){var M=A.slice(0,C),D=A.slice(C+1),F=P.match(h);F&&(M.push(F[1]),D.unshift(F[2])),D.length&&(b="/"+D.join(".")+b),this.hostname=M.join(".");break}}}this.hostname.length>255?this.hostname="":this.hostname=this.hostname.toLowerCase(),j||(this.hostname=r.toASCII(this.hostname));var U=this.port?":"+this.port:"",B=this.hostname||"";this.host=B+U,this.href+=this.host,j&&(this.hostname=this.hostname.substr(1,this.hostname.length-2),"/"!==b[0]&&(b="/"+b))}if(!m[k])for(C=0,I=u.length;C0)&&n.host.split("@"))&&(n.auth=j.shift(),n.host=n.hostname=j.shift());return n.search=e.search,n.query=e.query,o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.href=n.format(),n}if(!E.length)return n.pathname=null,n.search?n.path="/"+n.search:n.path=null,n.href=n.format(),n;for(var _=E.slice(-1)[0],O=(n.host||e.host||E.length>1)&&("."===_||".."===_)||""===_,C=0,T=E.length;T>=0;T--)"."===(_=E[T])?E.splice(T,1):".."===_?(E.splice(T,1),C++):C&&(E.splice(T,1),C--);if(!x&&!k)for(;C--;C)E.unshift("..");!x||""===E[0]||E[0]&&"/"===E[0].charAt(0)||E.unshift(""),O&&"/"!==E.join("/").substr(-1)&&E.push("");var j,A=""===E[0]||E[0]&&"/"===E[0].charAt(0);S&&(n.hostname=n.host=A?"":E.length?E.shift():"",(j=!!(n.host&&n.host.indexOf("@")>0)&&n.host.split("@"))&&(n.auth=j.shift(),n.host=n.hostname=j.shift()));return(x=x||n.host&&E.length)&&!A&&E.unshift(""),E.length?n.pathname=E.join("/"):(n.pathname=null,n.path=null),o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.auth=e.auth||n.auth,n.slashes=n.slashes||e.slashes,n.href=n.format(),n},i.prototype.parseHost=function(){var e=this.host,t=s.exec(e);t&&(":"!==(t=t[0])&&(this.port=t.substr(1)),e=e.substr(0,e.length-t.length)),e&&(this.hostname=e)}},function(e,t,n){e.exports=n(182)()},function(e,t,n){var r; +var r=n(235),o=n(236),i=n(136);function a(){return l.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function s(e,t){if(a()=a())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+a().toString(16)+" bytes");return 0|e}function h(e,t){if(l.isBuffer(e))return e.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(e)||e instanceof ArrayBuffer))return e.byteLength;"string"!=typeof e&&(e=""+e);var n=e.length;if(0===n)return 0;for(var r=!1;;)switch(t){case"ascii":case"latin1":case"binary":return n;case"utf8":case"utf-8":case void 0:return B(e).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*n;case"hex":return n>>>1;case"base64":return z(e).length;default:if(r)return B(e).length;t=(""+t).toLowerCase(),r=!0}}function m(e,t,n){var r=e[t];e[t]=e[n],e[n]=r}function v(e,t,n,r,o){if(0===e.length)return-1;if("string"==typeof n?(r=n,n=0):n>2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),n=+n,isNaN(n)&&(n=o?0:e.length-1),n<0&&(n=e.length+n),n>=e.length){if(o)return-1;n=e.length-1}else if(n<0){if(!o)return-1;n=0}if("string"==typeof t&&(t=l.from(t,r)),l.isBuffer(t))return 0===t.length?-1:g(e,t,n,r,o);if("number"==typeof t)return t&=255,l.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?o?Uint8Array.prototype.indexOf.call(e,t,n):Uint8Array.prototype.lastIndexOf.call(e,t,n):g(e,[t],n,r,o);throw new TypeError("val must be string, number or Buffer")}function g(e,t,n,r,o){var i,a=1,s=e.length,l=t.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(e.length<2||t.length<2)return-1;a=2,s/=2,l/=2,n/=2}function u(e,t){return 1===a?e[t]:e.readUInt16BE(t*a)}if(o){var c=-1;for(i=n;is&&(n=s-l),i=n;i>=0;i--){for(var f=!0,p=0;po&&(r=o):r=o;var i=t.length;if(i%2!=0)throw new TypeError("Invalid hex string");r>i/2&&(r=i/2);for(var a=0;a>8,o=n%256,i.push(o),i.push(r);return i}(t,e.length-n),e,n,r)}function _(e,t,n){return 0===t&&n===e.length?r.fromByteArray(e):r.fromByteArray(e.slice(t,n))}function S(e,t,n){n=Math.min(e.length,n);for(var r=[],o=t;o239?4:u>223?3:u>191?2:1;if(o+f<=n)switch(f){case 1:u<128&&(c=u);break;case 2:128==(192&(i=e[o+1]))&&(l=(31&u)<<6|63&i)>127&&(c=l);break;case 3:i=e[o+1],a=e[o+2],128==(192&i)&&128==(192&a)&&(l=(15&u)<<12|(63&i)<<6|63&a)>2047&&(l<55296||l>57343)&&(c=l);break;case 4:i=e[o+1],a=e[o+2],s=e[o+3],128==(192&i)&&128==(192&a)&&128==(192&s)&&(l=(15&u)<<18|(63&i)<<12|(63&a)<<6|63&s)>65535&&l<1114112&&(c=l)}null===c?(c=65533,f=1):c>65535&&(c-=65536,r.push(c>>>10&1023|55296),c=56320|1023&c),r.push(c),o+=f}return function(e){var t=e.length;if(t<=O)return String.fromCharCode.apply(String,e);var n="",r=0;for(;rthis.length)return"";if((void 0===n||n>this.length)&&(n=this.length),n<=0)return"";if((n>>>=0)<=(t>>>=0))return"";for(e||(e="utf8");;)switch(e){case"hex":return A(this,t,n);case"utf8":case"utf-8":return S(this,t,n);case"ascii":return T(this,t,n);case"latin1":case"binary":return C(this,t,n);case"base64":return _(this,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return j(this,t,n);default:if(r)throw new TypeError("Unknown encoding: "+e);e=(e+"").toLowerCase(),r=!0}}.apply(this,arguments)},l.prototype.equals=function(e){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");return this===e||0===l.compare(this,e)},l.prototype.inspect=function(){var e="",n=t.INSPECT_MAX_BYTES;return this.length>0&&(e=this.toString("hex",0,n).match(/.{2}/g).join(" "),this.length>n&&(e+=" ... ")),""},l.prototype.compare=function(e,t,n,r,o){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");if(void 0===t&&(t=0),void 0===n&&(n=e?e.length:0),void 0===r&&(r=0),void 0===o&&(o=this.length),t<0||n>e.length||r<0||o>this.length)throw new RangeError("out of range index");if(r>=o&&t>=n)return 0;if(r>=o)return-1;if(t>=n)return 1;if(this===e)return 0;for(var i=(o>>>=0)-(r>>>=0),a=(n>>>=0)-(t>>>=0),s=Math.min(i,a),u=this.slice(r,o),c=e.slice(t,n),f=0;fo)&&(n=o),e.length>0&&(n<0||t<0)||t>this.length)throw new RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var i=!1;;)switch(r){case"hex":return y(this,e,t,n);case"utf8":case"utf-8":return b(this,e,t,n);case"ascii":return w(this,e,t,n);case"latin1":case"binary":return x(this,e,t,n);case"base64":return k(this,e,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return E(this,e,t,n);default:if(i)throw new TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),i=!0}},l.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};var O=4096;function T(e,t,n){var r="";n=Math.min(e.length,n);for(var o=t;or)&&(n=r);for(var o="",i=t;in)throw new RangeError("Trying to access beyond buffer length")}function P(e,t,n,r,o,i){if(!l.isBuffer(e))throw new TypeError('"buffer" argument must be a Buffer instance');if(t>o||te.length)throw new RangeError("Index out of range")}function N(e,t,n,r){t<0&&(t=65535+t+1);for(var o=0,i=Math.min(e.length-n,2);o>>8*(r?o:1-o)}function R(e,t,n,r){t<0&&(t=4294967295+t+1);for(var o=0,i=Math.min(e.length-n,4);o>>8*(r?o:3-o)&255}function L(e,t,n,r,o,i){if(n+r>e.length)throw new RangeError("Index out of range");if(n<0)throw new RangeError("Index out of range")}function M(e,t,n,r,i){return i||L(e,0,n,4),o.write(e,t,n,r,23,4),n+4}function D(e,t,n,r,i){return i||L(e,0,n,8),o.write(e,t,n,r,52,8),n+8}l.prototype.slice=function(e,t){var n,r=this.length;if((e=~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),(t=void 0===t?r:~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),t0&&(o*=256);)r+=this[e+--t]*o;return r},l.prototype.readUInt8=function(e,t){return t||I(e,1,this.length),this[e]},l.prototype.readUInt16LE=function(e,t){return t||I(e,2,this.length),this[e]|this[e+1]<<8},l.prototype.readUInt16BE=function(e,t){return t||I(e,2,this.length),this[e]<<8|this[e+1]},l.prototype.readUInt32LE=function(e,t){return t||I(e,4,this.length),(this[e]|this[e+1]<<8|this[e+2]<<16)+16777216*this[e+3]},l.prototype.readUInt32BE=function(e,t){return t||I(e,4,this.length),16777216*this[e]+(this[e+1]<<16|this[e+2]<<8|this[e+3])},l.prototype.readIntLE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=this[e],o=1,i=0;++i=(o*=128)&&(r-=Math.pow(2,8*t)),r},l.prototype.readIntBE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=t,o=1,i=this[e+--r];r>0&&(o*=256);)i+=this[e+--r]*o;return i>=(o*=128)&&(i-=Math.pow(2,8*t)),i},l.prototype.readInt8=function(e,t){return t||I(e,1,this.length),128&this[e]?-1*(255-this[e]+1):this[e]},l.prototype.readInt16LE=function(e,t){t||I(e,2,this.length);var n=this[e]|this[e+1]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt16BE=function(e,t){t||I(e,2,this.length);var n=this[e+1]|this[e]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt32LE=function(e,t){return t||I(e,4,this.length),this[e]|this[e+1]<<8|this[e+2]<<16|this[e+3]<<24},l.prototype.readInt32BE=function(e,t){return t||I(e,4,this.length),this[e]<<24|this[e+1]<<16|this[e+2]<<8|this[e+3]},l.prototype.readFloatLE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!0,23,4)},l.prototype.readFloatBE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!1,23,4)},l.prototype.readDoubleLE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!0,52,8)},l.prototype.readDoubleBE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!1,52,8)},l.prototype.writeUIntLE=function(e,t,n,r){(e=+e,t|=0,n|=0,r)||P(this,e,t,n,Math.pow(2,8*n)-1,0);var o=1,i=0;for(this[t]=255&e;++i=0&&(i*=256);)this[t+o]=e/i&255;return t+n},l.prototype.writeUInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,255,0),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),this[t]=255&e,t+1},l.prototype.writeUInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):N(this,e,t,!0),t+2},l.prototype.writeUInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):N(this,e,t,!1),t+2},l.prototype.writeUInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t+3]=e>>>24,this[t+2]=e>>>16,this[t+1]=e>>>8,this[t]=255&e):R(this,e,t,!0),t+4},l.prototype.writeUInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):R(this,e,t,!1),t+4},l.prototype.writeIntLE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=0,a=1,s=0;for(this[t]=255&e;++i>0)-s&255;return t+n},l.prototype.writeIntBE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=n-1,a=1,s=0;for(this[t+i]=255&e;--i>=0&&(a*=256);)e<0&&0===s&&0!==this[t+i+1]&&(s=1),this[t+i]=(e/a>>0)-s&255;return t+n},l.prototype.writeInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,127,-128),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),e<0&&(e=255+e+1),this[t]=255&e,t+1},l.prototype.writeInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):N(this,e,t,!0),t+2},l.prototype.writeInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):N(this,e,t,!1),t+2},l.prototype.writeInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8,this[t+2]=e>>>16,this[t+3]=e>>>24):R(this,e,t,!0),t+4},l.prototype.writeInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),e<0&&(e=4294967295+e+1),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):R(this,e,t,!1),t+4},l.prototype.writeFloatLE=function(e,t,n){return M(this,e,t,!0,n)},l.prototype.writeFloatBE=function(e,t,n){return M(this,e,t,!1,n)},l.prototype.writeDoubleLE=function(e,t,n){return D(this,e,t,!0,n)},l.prototype.writeDoubleBE=function(e,t,n){return D(this,e,t,!1,n)},l.prototype.copy=function(e,t,n,r){if(n||(n=0),r||0===r||(r=this.length),t>=e.length&&(t=e.length),t||(t=0),r>0&&r=this.length)throw new RangeError("sourceStart out of bounds");if(r<0)throw new RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),e.length-t=0;--o)e[o+t]=this[o+n];else if(i<1e3||!l.TYPED_ARRAY_SUPPORT)for(o=0;o>>=0,n=void 0===n?this.length:n>>>0,e||(e=0),"number"==typeof e)for(i=t;i55295&&n<57344){if(!o){if(n>56319){(t-=3)>-1&&i.push(239,191,189);continue}if(a+1===r){(t-=3)>-1&&i.push(239,191,189);continue}o=n;continue}if(n<56320){(t-=3)>-1&&i.push(239,191,189),o=n;continue}n=65536+(o-55296<<10|n-56320)}else o&&(t-=3)>-1&&i.push(239,191,189);if(o=null,n<128){if((t-=1)<0)break;i.push(n)}else if(n<2048){if((t-=2)<0)break;i.push(n>>6|192,63&n|128)}else if(n<65536){if((t-=3)<0)break;i.push(n>>12|224,n>>6&63|128,63&n|128)}else{if(!(n<1114112))throw new Error("Invalid code point");if((t-=4)<0)break;i.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}}return i}function z(e){return r.toByteArray(function(e){if((e=function(e){return e.trim?e.trim():e.replace(/^\s+|\s+$/g,"")}(e).replace(F,"")).length<2)return"";for(;e.length%4!=0;)e+="=";return e}(e))}function $(e,t,n,r){for(var o=0;o=t.length||o>=e.length);++o)t[o+n]=e[o];return o}}).call(this,n(12))},function(e,t,n){"use strict";var r=n(3);Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var o=r(n(4)),i=r(n(5)),a=function(){function e(t,n){(0,o.default)(this,e),this.start=t,this.end=n||t}return(0,i.default)(e,null,[{key:"copy",value:function(t){return new e(t.start,t.end)}}]),(0,i.default)(e,[{key:"isEmpty",value:function(){return"number"!=typeof this.start||!this.end||this.end<=this.start}},{key:"setOrigRange",value:function(e,t){var n=this.start,r=this.end;if(0===e.length||r<=e[0])return this.origStart=n,this.origEnd=r,t;for(var o=t;on);)++o;this.origStart=n+o;for(var i=o;o=r);)++o;return this.origEnd=r+o,i}}]),e}();t.default=a,e.exports=t.default,e.exports.default=t.default},function(e,t,n){"use strict";var r=n(3);Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var o=r(n(4)),i=r(n(5)),a=r(n(7)),s=r(n(6)),l=r(n(8)),u=r(n(35)),c=function(e){function t(e){var n;return(0,o.default)(this,t),(n=(0,a.default)(this,(0,s.default)(t).call(this))).value=e,n}return(0,l.default)(t,e),(0,i.default)(t,[{key:"toJSON",value:function(e,t){return t&&t.keep?this.value:(0,u.default)(this.value,e,t)}},{key:"toString",value:function(){return String(this.value)}}]),t}(r(n(48)).default);t.default=c,e.exports=t.default,e.exports.default=t.default},function(e,t,n){"use strict";var r=n(228),o=n(230);function i(){this.protocol=null,this.slashes=null,this.auth=null,this.host=null,this.port=null,this.hostname=null,this.hash=null,this.search=null,this.query=null,this.pathname=null,this.path=null,this.href=null}t.parse=b,t.resolve=function(e,t){return b(e,!1,!0).resolve(t)},t.resolveObject=function(e,t){return e?b(e,!1,!0).resolveObject(t):t},t.format=function(e){o.isString(e)&&(e=b(e));return e instanceof i?e.format():i.prototype.format.call(e)},t.Url=i;var a=/^([a-z0-9.+-]+:)/i,s=/:[0-9]*$/,l=/^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/,u=["{","}","|","\\","^","`"].concat(["<",">",'"',"`"," ","\r","\n","\t"]),c=["'"].concat(u),f=["%","/","?",";","#"].concat(c),p=["/","?","#"],d=/^[+a-z0-9A-Z_-]{0,63}$/,h=/^([+a-z0-9A-Z_-]{0,63})(.*)$/,m={javascript:!0,"javascript:":!0},v={javascript:!0,"javascript:":!0},g={http:!0,https:!0,ftp:!0,gopher:!0,file:!0,"http:":!0,"https:":!0,"ftp:":!0,"gopher:":!0,"file:":!0},y=n(231);function b(e,t,n){if(e&&o.isObject(e)&&e instanceof i)return e;var r=new i;return r.parse(e,t,n),r}i.prototype.parse=function(e,t,n){if(!o.isString(e))throw new TypeError("Parameter 'url' must be a string, not "+typeof e);var i=e.indexOf("?"),s=-1!==i&&i127?N+="x":N+=P[R];if(!N.match(d)){var M=j.slice(0,T),D=j.slice(T+1),F=P.match(h);F&&(M.push(F[1]),D.unshift(F[2])),D.length&&(b="/"+D.join(".")+b),this.hostname=M.join(".");break}}}this.hostname.length>255?this.hostname="":this.hostname=this.hostname.toLowerCase(),A||(this.hostname=r.toASCII(this.hostname));var U=this.port?":"+this.port:"",B=this.hostname||"";this.host=B+U,this.href+=this.host,A&&(this.hostname=this.hostname.substr(1,this.hostname.length-2),"/"!==b[0]&&(b="/"+b))}if(!m[k])for(T=0,I=c.length;T0)&&n.host.split("@"))&&(n.auth=A.shift(),n.host=n.hostname=A.shift());return n.search=e.search,n.query=e.query,o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.href=n.format(),n}if(!E.length)return n.pathname=null,n.search?n.path="/"+n.search:n.path=null,n.href=n.format(),n;for(var S=E.slice(-1)[0],O=(n.host||e.host||E.length>1)&&("."===S||".."===S)||""===S,T=0,C=E.length;C>=0;C--)"."===(S=E[C])?E.splice(C,1):".."===S?(E.splice(C,1),T++):T&&(E.splice(C,1),T--);if(!x&&!k)for(;T--;T)E.unshift("..");!x||""===E[0]||E[0]&&"/"===E[0].charAt(0)||E.unshift(""),O&&"/"!==E.join("/").substr(-1)&&E.push("");var A,j=""===E[0]||E[0]&&"/"===E[0].charAt(0);_&&(n.hostname=n.host=j?"":E.length?E.shift():"",(A=!!(n.host&&n.host.indexOf("@")>0)&&n.host.split("@"))&&(n.auth=A.shift(),n.host=n.hostname=A.shift()));return(x=x||n.host&&E.length)&&!j&&E.unshift(""),E.length?n.pathname=E.join("/"):(n.pathname=null,n.path=null),o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.auth=e.auth||n.auth,n.slashes=n.slashes||e.slashes,n.href=n.format(),n},i.prototype.parseHost=function(){var e=this.host,t=s.exec(e);t&&(":"!==(t=t[0])&&(this.port=t.substr(1)),e=e.substr(0,e.length-t.length)),e&&(this.hostname=e)}},function(e,t){var n=e.exports={version:"2.6.5"};"number"==typeof __e&&(__e=n)},function(e,t){e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},function(e,t){var n,r,o=e.exports={};function i(){throw new Error("setTimeout has not been defined")}function a(){throw new Error("clearTimeout has not been defined")}function s(e){if(n===setTimeout)return setTimeout(e,0);if((n===i||!n)&&setTimeout)return n=setTimeout,setTimeout(e,0);try{return n(e,0)}catch(t){try{return n.call(null,e,0)}catch(t){return n.call(this,e,0)}}}!function(){try{n="function"==typeof setTimeout?setTimeout:i}catch(e){n=i}try{r="function"==typeof clearTimeout?clearTimeout:a}catch(e){r=a}}();var l,u=[],c=!1,f=-1;function p(){c&&l&&(c=!1,l.length?u=l.concat(u):f=-1,u.length&&d())}function d(){if(!c){var e=s(p);c=!0;for(var t=u.length;t;){for(l=u,u=[];++f1)for(var n=1;n1&&void 0!==arguments[1]?arguments[1]:null;return(0,i.default)(this,t),(n=(0,s.default)(this,(0,l.default)(t).call(this))).key=e,n.value=r,n.type="PAIR",n}return(0,u.default)(t,e),(0,a.default)(t,[{key:"toJSON",value:function(e,t){var n={},r=this.stringKey;return n[r]=(0,f.default)(this.value,r,t),n}},{key:"toString",value:function(e,t,n){if(!e||!e.doc)return JSON.stringify(this);var r=this.key,i=this.value,a=r instanceof d.default&&r.comment,s=!r||a||r instanceof p.default,l=e,u=l.doc,f=l.indent;e=Object.assign({},e,{implicitKey:!s,indent:f+" "});var h=!1,m=u.schema.stringify(r,e,function(){return a=null},function(){return h=!0});if(m=(0,c.default)(m,e.indent,a),e.allNullValues)return this.comment?(m=(0,c.default)(m,e.indent,this.comment),t&&t()):h&&!a&&n&&n(),e.inFlow?m:"? ".concat(m);m=s?"? ".concat(m,"\n").concat(f,":"):"".concat(m,":"),this.comment&&(m=(0,c.default)(m,e.indent,this.comment),t&&t());var v="",g=null;if(i instanceof d.default){if(i.spaceBefore&&(v="\n"),i.commentBefore){var y=i.commentBefore.replace(/^/gm,"".concat(e.indent,"#"));v+="\n".concat(y)}g=i.comment}else i&&"object"===(0,o.default)(i)&&(i=u.schema.createNode(i,!0));e.implicitKey=!1,h=!1;var b=u.schema.stringify(i,e,function(){return g=null},function(){return h=!0}),w=" ";if(v||this.comment)w="".concat(v,"\n").concat(e.indent);else if(!s&&i instanceof p.default){("["===b[0]||"{"===b[0])&&!b.includes("\n")||(w="\n".concat(e.indent))}return h&&!g&&n&&n(),(0,c.default)(m+w+b,e.indent,g)}},{key:"commentBefore",get:function(){return this.key&&this.key.commentBefore},set:function(e){null==this.key&&(this.key=new h.default(null)),this.key.commentBefore=e}},{key:"stringKey",get:function(){var e=(0,f.default)(this.key);if(null===e)return"";if("object"===(0,o.default)(e))try{return JSON.stringify(e)}catch(e){}return String(e)}}]),t}(d.default);t.default=m,e.exports=t.default,e.exports.default=t.default},function(e,t,n){e.exports=n(226)()},function(e,t,n){var r; /*! Copyright (c) 2017 Jed Watson. Licensed under the MIT License (MIT), see @@ -37,51 +52,52 @@ var r=n(191),o=n(192),i=n(101);function a(){return l.TYPED_ARRAY_SUPPORT?2147483 Licensed under the MIT License (MIT), see http://jedwatson.github.io/classnames */ -!function(){"use strict";var n={}.hasOwnProperty;function o(){for(var e=[],t=0;t=0?e.substr(t).toLowerCase():""},t.getHash=function(e){var t=e.indexOf("#");return t>=0?e.substr(t):"#"},t.stripHash=function(e){var t=e.indexOf("#");return t>=0&&(e=e.substr(0,t)),e},t.isHttp=function(e){var t=s.getProtocol(e);return"http"===t||"https"===t||void 0===t&&r.browser},t.isFileSystemPath=function(e){if(r.browser)return!1;var t=s.getProtocol(e);return void 0===t||"file"===t},t.fromFileSystemPath=function(e){o&&(e=e.replace(/\\/g,"/")),e=encodeURI(e);for(var t=0;t0&&(h=e.exports.formatter.apply(null,s)),n&&n.message&&(h+=(h?" \n":"")+n.message);var m,g,y,v=new t(h);return function(e,t){var n,r;!function(e){if(!d)return!1;var t=Object.getOwnPropertyDescriptor(e,"stack");return!!t&&"function"==typeof t.get}(e)?e.stack=t?p(e.stack,t.stack):f(e.stack):t?function(e,t){var n=Object.getOwnPropertyDescriptor(e,"stack");Object.defineProperty(e,"stack",{get:function(){return p(n.get.apply(e),t.stack)},enumerable:!1,configurable:!0})}(e,t):(n=e,r=Object.getOwnPropertyDescriptor(n,"stack"),Object.defineProperty(n,"stack",{get:function(){return f(r.get.apply(n))},enumerable:!1,configurable:!0}))}(m=v,g=n),l(m,g),(y=v).toJSON=c,y.inspect=u,l(v,r),v}}function l(e,t){if(t&&"object"==typeof t)for(var n=Object.keys(t),r=0;r=0))try{e[o]=t[o]}catch(e){}}}function c(){var e={},t=Object.keys(this);t=t.concat(a);for(var n=0;n=0)return t.splice(n,1),t.join("\n")}return e}}e.exports=s(Error),e.exports.error=s(Error),e.exports.eval=s(EvalError),e.exports.range=s(RangeError),e.exports.reference=s(ReferenceError),e.exports.syntax=s(SyntaxError),e.exports.type=s(TypeError),e.exports.uri=s(URIError),e.exports.formatter=r;var d=!(!Object.getOwnPropertyDescriptor||!Object.defineProperty||"undefined"!=typeof navigator&&/Android/.test(navigator.userAgent))},function(e,t){var n={}.toString;e.exports=function(e){return n.call(e).slice(8,-1)}},function(e,t){e.exports=function(e){try{return!!e()}catch(e){return!0}}},function(e,t,n){var r=n(64),o=n(44);e.exports=function(e){return r(o(e))}},function(e,t,n){"use strict";var r=n(38),o=n(48),i=n(3);function a(e,t,n){var r=[];return e.include.forEach(function(e){n=a(e,t,n)}),e[t].forEach(function(e){n.forEach(function(t,n){t.tag===e.tag&&t.kind===e.kind&&r.push(n)}),n.push(e)}),n.filter(function(e,t){return-1===r.indexOf(t)})}function s(e){this.include=e.include||[],this.implicit=e.implicit||[],this.explicit=e.explicit||[],this.implicit.forEach(function(e){if(e.loadKind&&"scalar"!==e.loadKind)throw new o("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.")}),this.compiledImplicit=a(this,"implicit",[]),this.compiledExplicit=a(this,"explicit",[]),this.compiledTypeMap=function(){var e,t,n={scalar:{},sequence:{},mapping:{},fallback:{}};function r(e){n[e.kind][e.tag]=n.fallback[e.tag]=e}for(e=0,t=arguments.length;ee.length)return;if(!(x instanceof l)){if(m&&b!=t.length-1){if(f.lastIndex=w,!(C=f.exec(e)))break;for(var k=C.index+(h?C[1].length:0),E=C.index+C[0].length,S=b,_=w,O=t.length;S=(_+=t[S].length)&&(++b,w=_);if(t[b]instanceof l)continue;T=S-b,x=e.slice(w,_),C.index-=w}else{f.lastIndex=0;var C=f.exec(x),T=1}if(C){h&&(g=C[1]?C[1].length:0);E=(k=C.index+g)+(C=C[0].slice(g)).length;var j=x.slice(0,k),A=x.slice(E),I=[b,T];j&&(++b,w+=j.length,I.push(j));var P=new l(c,d?r.tokenize(C,d):C,y,C,m);if(I.push(P),A&&I.push(A),Array.prototype.splice.apply(t,I),1!=T&&r.matchGrammar(e,t,n,b,w,!0,c),a)break}else if(a)break}}}}},tokenize:function(e,t,n){var o=[e],i=t.rest;if(i){for(var a in i)t[a]=i[a];delete t.rest}return r.matchGrammar(e,o,t,0,0,!1),o},hooks:{all:{},add:function(e,t){var n=r.hooks.all;n[e]=n[e]||[],n[e].push(t)},run:function(e,t){var n=r.hooks.all[e];if(n&&n.length)for(var o,i=0;o=n[i++];)o(t)}}},o=r.Token=function(e,t,n,r,o){this.type=e,this.content=t,this.alias=n,this.length=0|(r||"").length,this.greedy=!!o};if(o.stringify=function(e,t,n){if("string"==typeof e)return e;if("Array"===r.util.type(e))return e.map(function(n){return o.stringify(n,t,e)}).join("");var i={type:e.type,content:o.stringify(e.content,t,n),tag:"span",classes:["token",e.type],attributes:{},language:t,parent:n};if(e.alias){var a="Array"===r.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(i.classes,a)}r.hooks.run("wrap",i);var s=Object.keys(i.attributes).map(function(e){return e+'="'+(i.attributes[e]||"").replace(/"/g,""")+'"'}).join(" ");return"<"+i.tag+' class="'+i.classes.join(" ")+'"'+(s?" "+s:"")+">"+i.content+""},!n.document)return n.addEventListener?(r.disableWorkerMessageHandler||n.addEventListener("message",function(e){var t=JSON.parse(e.data),o=t.language,i=t.code,a=t.immediateClose;n.postMessage(r.highlight(i,r.languages[o],o)),a&&n.close()},!1),n.Prism):n.Prism;var i=document.currentScript||[].slice.call(document.getElementsByTagName("script")).pop();return i&&(r.filename=i.src,r.manual||i.hasAttribute("data-manual")||("loading"!==document.readyState?window.requestAnimationFrame?window.requestAnimationFrame(r.highlightAll):window.setTimeout(r.highlightAll,16):document.addEventListener("DOMContentLoaded",r.highlightAll))),n.Prism}();e.exports&&(e.exports=r),void 0!==t&&(t.Prism=r),r.languages.markup={comment://,prolog:/<\?[\s\S]+?\?>/,doctype://i,cdata://i,tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s+[^\s>\/=]+(?:=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^\s'">=]+))?)*\s*\/?>/i,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/i,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"attr-value":{pattern:/=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^\s'">=]+)/i,inside:{punctuation:[/^=/,{pattern:/(^|[^\\])["']/,lookbehind:!0}]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:/&#?[\da-z]{1,8};/i},r.languages.markup.tag.inside["attr-value"].inside.entity=r.languages.markup.entity,r.hooks.add("wrap",function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&/,"&"))}),r.languages.xml=r.languages.markup,r.languages.html=r.languages.markup,r.languages.mathml=r.languages.markup,r.languages.svg=r.languages.markup,r.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-]+?.*?(?:;|(?=\s*\{))/i,inside:{rule:/@[\w-]+/}},url:/url\((?:(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1|.*?)\)/i,selector:/[^{}\s][^{};]*?(?=\s*\{)/,string:{pattern:/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},property:/[-_a-z\xA0-\uFFFF][-\w\xA0-\uFFFF]*(?=\s*:)/i,important:/\B!important\b/i,function:/[-a-z0-9]+(?=\()/i,punctuation:/[(){};:]/},r.languages.css.atrule.inside.rest=r.languages.css,r.languages.markup&&(r.languages.insertBefore("markup","tag",{style:{pattern:/()[\s\S]*?(?=<\/style>)/i,lookbehind:!0,inside:r.languages.css,alias:"language-css",greedy:!0}}),r.languages.insertBefore("inside","attr-value",{"style-attr":{pattern:/\s*style=("|')(?:\\[\s\S]|(?!\1)[^\\])*\1/i,inside:{"attr-name":{pattern:/^\s*style/i,inside:r.languages.markup.tag.inside},punctuation:/^\s*=\s*['"]|['"]\s*$/,"attr-value":{pattern:/.+/i,inside:r.languages.css}},alias:"language-css"}},r.languages.markup.tag)),r.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/((?:\b(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/,boolean:/\b(?:true|false)\b/,function:/[a-z0-9_]+(?=\()/i,number:/\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i,operator:/--?|\+\+?|!=?=?|<=?|>=?|==?=?|&&?|\|\|?|\?|\*|\/|~|\^|%/,punctuation:/[{}[\];(),.:]/},r.languages.javascript=r.languages.extend("clike",{keyword:/\b(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|var|void|while|with|yield)\b/,number:/\b(?:0[xX][\dA-Fa-f]+|0[bB][01]+|0[oO][0-7]+|NaN|Infinity)\b|(?:\b\d+\.?\d*|\B\.\d+)(?:[Ee][+-]?\d+)?/,function:/[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*\()/i,operator:/-[-=]?|\+[+=]?|!=?=?|<>?>?=?|=(?:==?|>)?|&[&=]?|\|[|=]?|\*\*?=?|\/=?|~|\^=?|%=?|\?|\.{3}/}),r.languages.insertBefore("javascript","keyword",{regex:{pattern:/((?:^|[^$\w\xA0-\uFFFF."'\])\s])\s*)\/(\[[^\]\r\n]+]|\\.|[^\/\\\[\r\n])+\/[gimyu]{0,5}(?=\s*($|[\r\n,.;})\]]))/,lookbehind:!0,greedy:!0},"function-variable":{pattern:/[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*=\s*(?:function\b|(?:\([^()]*\)|[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*)\s*=>))/i,alias:"function"},constant:/\b[A-Z][A-Z\d_]*\b/}),r.languages.insertBefore("javascript","string",{"template-string":{pattern:/`(?:\\[\s\S]|\${[^}]+}|[^\\`])*`/,greedy:!0,inside:{interpolation:{pattern:/\${[^}]+}/,inside:{"interpolation-punctuation":{pattern:/^\${|}$/,alias:"punctuation"},rest:null}},string:/[\s\S]+/}}}),r.languages.javascript["template-string"].inside.interpolation.inside.rest=r.languages.javascript,r.languages.markup&&r.languages.insertBefore("markup","tag",{script:{pattern:/()[\s\S]*?(?=<\/script>)/i,lookbehind:!0,inside:r.languages.javascript,alias:"language-javascript",greedy:!0}}),r.languages.js=r.languages.javascript,"undefined"!=typeof self&&self.Prism&&self.document&&document.querySelector&&(self.Prism.fileHighlight=function(){var e={js:"javascript",py:"python",rb:"ruby",ps1:"powershell",psm1:"powershell",sh:"bash",bat:"batch",h:"c",tex:"latex"};Array.prototype.slice.call(document.querySelectorAll("pre[data-src]")).forEach(function(t){for(var n,o=t.getAttribute("data-src"),i=t,a=/\blang(?:uage)?-([\w-]+)\b/i;i&&!a.test(i.className);)i=i.parentNode;if(i&&(n=(t.className.match(a)||[,""])[1]),!n){var s=(o.match(/\.(\w+)$/)||[,""])[1];n=e[s]||s}var l=document.createElement("code");l.className="language-"+n,t.textContent="",l.textContent="Loading…",t.appendChild(l);var c=new XMLHttpRequest;c.open("GET",o,!0),c.onreadystatechange=function(){4==c.readyState&&(c.status<400&&c.responseText?(l.textContent=c.responseText,r.highlightElement(l)):c.status>=400?l.textContent="✖ Error "+c.status+" while fetching file: "+c.statusText:l.textContent="✖ Error: File does not exist or is empty")},c.send(null)}),r.plugins.toolbar&&r.plugins.toolbar.registerButton("download-file",function(e){var t=e.element.parentNode;if(t&&/pre/i.test(t.nodeName)&&t.hasAttribute("data-src")&&t.hasAttribute("data-download-link")){var n=t.getAttribute("data-src"),r=document.createElement("a");return r.textContent=t.getAttribute("data-download-link-label")||"Download",r.setAttribute("download",""),r.href=n,r}})},document.addEventListener("DOMContentLoaded",self.Prism.fileHighlight))}).call(this,n(4))},function(e,t){e.exports=!1},function(e,t){var n=0,r=Math.random();e.exports=function(e){return"Symbol(".concat(void 0===e?"":e,")_",(++n+r).toString(36))}},function(e,t){e.exports={}},function(e,t,n){var r=n(84),o=n(66);e.exports=Object.keys||function(e){return r(e,o)}},function(e,t,n){var r=n(61),o=Math.min;e.exports=function(e){return e>0?o(r(e),9007199254740991):0}},function(e,t,n){var r=n(16).f,o=n(23),i=n(2)("toStringTag");e.exports=function(e,t,n){e&&!o(e=n?e:e.prototype,i)&&r(e,i,{configurable:!0,value:t})}},function(e,t,n){"use strict";function r(e){return null==e}e.exports.isNothing=r,e.exports.isObject=function(e){return"object"==typeof e&&null!==e},e.exports.toArray=function(e){return Array.isArray(e)?e:r(e)?[]:[e]},e.exports.repeat=function(e,t){var n,r="";for(n=0;n2?r:e).apply(void 0,o)}}e.memoize=o,e.debounce=i,e.bind=a,e.default={memoize:o,debounce:i,bind:a}})?r.apply(t,o):r)||(e.exports=i)},function(e,t,n){(function(t){!function(t){"use strict";var n={newline:/^\n+/,code:/^( {4}[^\n]+\n*)+/,fences:g,hr:/^ {0,3}((?:- *){3,}|(?:_ *){3,}|(?:\* *){3,})(?:\n+|$)/,heading:/^ *(#{1,6}) *([^\n]+?) *(?:#+ *)?(?:\n+|$)/,nptable:g,blockquote:/^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/,list:/^( {0,3})(bull) [\s\S]+?(?:hr|def|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,html:"^ {0,3}(?:<(script|pre|style)[\\s>][\\s\\S]*?(?:[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?\\?>\\n*|\\n*|\\n*|)[\\s\\S]*?(?:\\n{2,}|$)|<(?!script|pre|style)([a-z][\\w-]*)(?:attribute)*? */?>(?=\\h*\\n)[\\s\\S]*?(?:\\n{2,}|$)|(?=\\h*\\n)[\\s\\S]*?(?:\\n{2,}|$))",def:/^ {0,3}\[(label)\]: *\n? *]+)>?(?:(?: +\n? *| *\n *)(title))? *(?:\n+|$)/,table:g,lheading:/^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/,paragraph:/^([^\n]+(?:\n(?!hr|heading|lheading| {0,3}>|<\/?(?:tag)(?: +|\n|\/?>)|<(?:script|pre|style|!--))[^\n]+)*)/,text:/^[^\n]+/};function r(e){this.tokens=[],this.tokens.links=Object.create(null),this.options=e||w.defaults,this.rules=n.normal,this.options.pedantic?this.rules=n.pedantic:this.options.gfm&&(this.options.tables?this.rules=n.tables:this.rules=n.gfm)}n._label=/(?!\s*\])(?:\\[\[\]]|[^\[\]])+/,n._title=/(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/,n.def=f(n.def).replace("label",n._label).replace("title",n._title).getRegex(),n.bullet=/(?:[*+-]|\d{1,9}\.)/,n.item=/^( *)(bull) ?[^\n]*(?:\n(?!\1bull ?)[^\n]*)*/,n.item=f(n.item,"gm").replace(/bull/g,n.bullet).getRegex(),n.list=f(n.list).replace(/bull/g,n.bullet).replace("hr","\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))").replace("def","\\n+(?="+n.def.source+")").getRegex(),n._tag="address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul",n._comment=//,n.html=f(n.html,"i").replace("comment",n._comment).replace("tag",n._tag).replace("attribute",/ +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/).getRegex(),n.paragraph=f(n.paragraph).replace("hr",n.hr).replace("heading",n.heading).replace("lheading",n.lheading).replace("tag",n._tag).getRegex(),n.blockquote=f(n.blockquote).replace("paragraph",n.paragraph).getRegex(),n.normal=y({},n),n.gfm=y({},n.normal,{fences:/^ {0,3}(`{3,}|~{3,})([^`\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?:\n+|$)|$)/,paragraph:/^/,heading:/^ *(#{1,6}) +([^\n]+?) *#* *(?:\n+|$)/}),n.gfm.paragraph=f(n.paragraph).replace("(?!","(?!"+n.gfm.fences.source.replace("\\1","\\2")+"|"+n.list.source.replace("\\1","\\3")+"|").getRegex(),n.tables=y({},n.gfm,{nptable:/^ *([^|\n ].*\|.*)\n *([-:]+ *\|[-| :]*)(?:\n((?:.*[^>\n ].*(?:\n|$))*)\n*|$)/,table:/^ *\|(.+)\n *\|?( *[-:]+[-| :]*)(?:\n((?: *[^>\n ].*(?:\n|$))*)\n*|$)/}),n.pedantic=y({},n.normal,{html:f("^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+? *(?:\\n{2,}|\\s*$)|\\s]*)*?/?> *(?:\\n{2,}|\\s*$))").replace("comment",n._comment).replace(/tag/g,"(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b").getRegex(),def:/^ *\[([^\]]+)\]: *]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/}),r.rules=n,r.lex=function(e,t){return new r(t).lex(e)},r.prototype.lex=function(e){return e=e.replace(/\r\n|\r/g,"\n").replace(/\t/g," ").replace(/\u00a0/g," ").replace(/\u2424/g,"\n"),this.token(e,!0)},r.prototype.token=function(e,t){var r,o,i,a,s,l,c,u,p,f,d,h,m,g,y,w;for(e=e.replace(/^ +$/gm,"");e;)if((i=this.rules.newline.exec(e))&&(e=e.substring(i[0].length),i[0].length>1&&this.tokens.push({type:"space"})),i=this.rules.code.exec(e))e=e.substring(i[0].length),i=i[0].replace(/^ {4}/gm,""),this.tokens.push({type:"code",text:this.options.pedantic?i:b(i,"\n")});else if(i=this.rules.fences.exec(e))e=e.substring(i[0].length),this.tokens.push({type:"code",lang:i[2]?i[2].trim():i[2],text:i[3]||""});else if(i=this.rules.heading.exec(e))e=e.substring(i[0].length),this.tokens.push({type:"heading",depth:i[1].length,text:i[2]});else if(t&&(i=this.rules.nptable.exec(e))&&(l={type:"table",header:v(i[1].replace(/^ *| *\| *$/g,"")),align:i[2].replace(/^ *|\| *$/g,"").split(/ *\| */),cells:i[3]?i[3].replace(/\n$/,"").split("\n"):[]}).header.length===l.align.length){for(e=e.substring(i[0].length),d=0;d ?/gm,""),this.token(i,t),this.tokens.push({type:"blockquote_end"});else if(i=this.rules.list.exec(e)){for(e=e.substring(i[0].length),c={type:"list_start",ordered:g=(a=i[2]).length>1,start:g?+a:"",loose:!1},this.tokens.push(c),u=[],r=!1,m=(i=i[0].match(this.rules.item)).length,d=0;d1?1===s.length:s.length>1||this.options.smartLists&&s!==a)&&(e=i.slice(d+1).join("\n")+e,d=m-1)),o=r||/\n\n(?!\s*$)/.test(l),d!==m-1&&(r="\n"===l.charAt(l.length-1),o||(o=r)),o&&(c.loose=!0),w=void 0,(y=/^\[[ xX]\] /.test(l))&&(w=" "!==l[1],l=l.replace(/^\[[ xX]\] +/,"")),p={type:"list_item_start",task:y,checked:w,loose:o},u.push(p),this.tokens.push(p),this.token(l,!1),this.tokens.push({type:"list_item_end"});if(c.loose)for(m=u.length,d=0;d?@\[\]\\^_`{|}~])/,autolink:/^<(scheme:[^\s\x00-\x1f<>]*|email)>/,url:g,tag:"^comment|^|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^|^",link:/^!?\[(label)\]\(href(?:\s+(title))?\s*\)/,reflink:/^!?\[(label)\]\[(?!\s*\])((?:\\[\[\]]?|[^\[\]\\])+)\]/,nolink:/^!?\[(?!\s*\])((?:\[[^\[\]]*\]|\\[\[\]]|[^\[\]])*)\](?:\[\])?/,strong:/^__([^\s_])__(?!_)|^\*\*([^\s*])\*\*(?!\*)|^__([^\s][\s\S]*?[^\s])__(?!_)|^\*\*([^\s][\s\S]*?[^\s])\*\*(?!\*)/,em:/^_([^\s_])_(?!_)|^\*([^\s*"<\[])\*(?!\*)|^_([^\s][\s\S]*?[^\s_])_(?!_|[^\spunctuation])|^_([^\s_][\s\S]*?[^\s])_(?!_|[^\spunctuation])|^\*([^\s"<\[][\s\S]*?[^\s*])\*(?!\*)|^\*([^\s*"<\[][\s\S]*?[^\s])\*(?!\*)/,code:/^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/,br:/^( {2,}|\\)\n(?!\s*$)/,del:g,text:/^(`+|[^`])[\s\S]*?(?=[\\?@\\[^_{|}~",o.em=f(o.em).replace(/punctuation/g,o._punctuation).getRegex(),o._escapes=/\\([!"#$%&'()*+,\-.\/:;<=>?@\[\]\\^_`{|}~])/g,o._scheme=/[a-zA-Z][a-zA-Z0-9+.-]{1,31}/,o._email=/[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/,o.autolink=f(o.autolink).replace("scheme",o._scheme).replace("email",o._email).getRegex(),o._attribute=/\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/,o.tag=f(o.tag).replace("comment",n._comment).replace("attribute",o._attribute).getRegex(),o._label=/(?:\[[^\[\]]*\]|\\[\[\]]?|`[^`]*`|[^\[\]\\])*?/,o._href=/\s*(<(?:\\[<>]?|[^\s<>\\])*>|(?:\\[()]?|\([^\s\x00-\x1f\\]*\)|[^\s\x00-\x1f()\\])*?)/,o._title=/"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/,o.link=f(o.link).replace("label",o._label).replace("href",o._href).replace("title",o._title).getRegex(),o.reflink=f(o.reflink).replace("label",o._label).getRegex(),o.normal=y({},o),o.pedantic=y({},o.normal,{strong:/^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,em:/^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/,link:f(/^!?\[(label)\]\((.*?)\)/).replace("label",o._label).getRegex(),reflink:f(/^!?\[(label)\]\s*\[([^\]]*)\]/).replace("label",o._label).getRegex()}),o.gfm=y({},o.normal,{escape:f(o.escape).replace("])","~|])").getRegex(),_extended_email:/[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/,url:/^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/,_backpedal:/(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/,del:/^~+(?=\S)([\s\S]*?\S)~+/,text:f(o.text).replace("]|","~]|").replace("|$","|https?://|ftp://|www\\.|[a-zA-Z0-9.!#$%&'*+/=?^_`{\\|}~-]+@|$").getRegex()}),o.gfm.url=f(o.gfm.url,"i").replace("email",o.gfm._extended_email).getRegex(),o.breaks=y({},o.gfm,{br:f(o.br).replace("{2,}","*").getRegex(),text:f(o.gfm.text).replace("{2,}","*").getRegex()}),i.rules=o,i.output=function(e,t,n){return new i(t,n).output(e)},i.prototype.output=function(e){for(var t,n,r,o,a,s,l="";e;)if(a=this.rules.escape.exec(e))e=e.substring(a[0].length),l+=u(a[1]);else if(a=this.rules.tag.exec(e))!this.inLink&&/^/i.test(a[0])&&(this.inLink=!1),!this.inRawBlock&&/^<(pre|code|kbd|script)(\s|>)/i.test(a[0])?this.inRawBlock=!0:this.inRawBlock&&/^<\/(pre|code|kbd|script)(\s|>)/i.test(a[0])&&(this.inRawBlock=!1),e=e.substring(a[0].length),l+=this.options.sanitize?this.options.sanitizer?this.options.sanitizer(a[0]):u(a[0]):a[0];else if(a=this.rules.link.exec(e))e=e.substring(a[0].length),this.inLink=!0,r=a[2],this.options.pedantic?(t=/^([^'"]*[^\s])\s+(['"])(.*)\2/.exec(r))?(r=t[1],o=t[3]):o="":o=a[3]?a[3].slice(1,-1):"",r=r.trim().replace(/^<([\s\S]*)>$/,"$1"),l+=this.outputLink(a,{href:i.escapes(r),title:i.escapes(o)}),this.inLink=!1;else if((a=this.rules.reflink.exec(e))||(a=this.rules.nolink.exec(e))){if(e=e.substring(a[0].length),t=(a[2]||a[1]).replace(/\s+/g," "),!(t=this.links[t.toLowerCase()])||!t.href){l+=a[0].charAt(0),e=a[0].substring(1)+e;continue}this.inLink=!0,l+=this.outputLink(a,t),this.inLink=!1}else if(a=this.rules.strong.exec(e))e=e.substring(a[0].length),l+=this.renderer.strong(this.output(a[4]||a[3]||a[2]||a[1]));else if(a=this.rules.em.exec(e))e=e.substring(a[0].length),l+=this.renderer.em(this.output(a[6]||a[5]||a[4]||a[3]||a[2]||a[1]));else if(a=this.rules.code.exec(e))e=e.substring(a[0].length),l+=this.renderer.codespan(u(a[2].trim(),!0));else if(a=this.rules.br.exec(e))e=e.substring(a[0].length),l+=this.renderer.br();else if(a=this.rules.del.exec(e))e=e.substring(a[0].length),l+=this.renderer.del(this.output(a[1]));else if(a=this.rules.autolink.exec(e))e=e.substring(a[0].length),r="@"===a[2]?"mailto:"+(n=u(this.mangle(a[1]))):n=u(a[1]),l+=this.renderer.link(r,null,n);else if(this.inLink||!(a=this.rules.url.exec(e))){if(a=this.rules.text.exec(e))e=e.substring(a[0].length),this.inRawBlock?l+=this.renderer.text(a[0]):l+=this.renderer.text(u(this.smartypants(a[0])));else if(e)throw new Error("Infinite loop on byte: "+e.charCodeAt(0))}else{if("@"===a[2])r="mailto:"+(n=u(a[0]));else{do{s=a[0],a[0]=this.rules._backpedal.exec(a[0])[0]}while(s!==a[0]);n=u(a[0]),r="www."===a[1]?"http://"+n:n}e=e.substring(a[0].length),l+=this.renderer.link(r,null,n)}return l},i.escapes=function(e){return e?e.replace(i.rules._escapes,"$1"):e},i.prototype.outputLink=function(e,t){var n=t.href,r=t.title?u(t.title):null;return"!"!==e[0].charAt(0)?this.renderer.link(n,r,this.output(e[1])):this.renderer.image(n,r,u(e[1]))},i.prototype.smartypants=function(e){return this.options.smartypants?e.replace(/---/g,"—").replace(/--/g,"–").replace(/(^|[-\u2014\/(\[{"\s])'/g,"$1‘").replace(/'/g,"’").replace(/(^|[-\u2014\/(\[{\u2018\s])"/g,"$1“").replace(/"/g,"”").replace(/\.{3}/g,"…"):e},i.prototype.mangle=function(e){if(!this.options.mangle)return e;for(var t,n="",r=e.length,o=0;o.5&&(t="x"+t.toString(16)),n+="&#"+t+";";return n},a.prototype.code=function(e,t,n){var r=(t||"").match(/\S*/)[0];if(this.options.highlight){var o=this.options.highlight(e,r);null!=o&&o!==e&&(n=!0,e=o)}return r?'
'+(n?e:u(e,!0))+"
\n":"
"+(n?e:u(e,!0))+"
"},a.prototype.blockquote=function(e){return"
\n"+e+"
\n"},a.prototype.html=function(e){return e},a.prototype.heading=function(e,t,n,r){return this.options.headerIds?"'+e+"\n":""+e+"\n"},a.prototype.hr=function(){return this.options.xhtml?"
\n":"
\n"},a.prototype.list=function(e,t,n){var r=t?"ol":"ul";return"<"+r+(t&&1!==n?' start="'+n+'"':"")+">\n"+e+"\n"},a.prototype.listitem=function(e){return"
  • "+e+"
  • \n"},a.prototype.checkbox=function(e){return" "},a.prototype.paragraph=function(e){return"

    "+e+"

    \n"},a.prototype.table=function(e,t){return t&&(t=""+t+""),"\n\n"+e+"\n"+t+"
    \n"},a.prototype.tablerow=function(e){return"\n"+e+"\n"},a.prototype.tablecell=function(e,t){var n=t.header?"th":"td";return(t.align?"<"+n+' align="'+t.align+'">':"<"+n+">")+e+"\n"},a.prototype.strong=function(e){return""+e+""},a.prototype.em=function(e){return""+e+""},a.prototype.codespan=function(e){return""+e+""},a.prototype.br=function(){return this.options.xhtml?"
    ":"
    "},a.prototype.del=function(e){return""+e+""},a.prototype.link=function(e,t,n){if(null===(e=d(this.options.sanitize,this.options.baseUrl,e)))return n;var r='
    "},a.prototype.image=function(e,t,n){if(null===(e=d(this.options.sanitize,this.options.baseUrl,e)))return n;var r=''+n+'":">"},a.prototype.text=function(e){return e},s.prototype.strong=s.prototype.em=s.prototype.codespan=s.prototype.del=s.prototype.text=function(e){return e},s.prototype.link=s.prototype.image=function(e,t,n){return""+n},s.prototype.br=function(){return""},l.parse=function(e,t){return new l(t).parse(e)},l.prototype.parse=function(e){this.inline=new i(e.links,this.options),this.inlineText=new i(e.links,y({},this.options,{renderer:new s})),this.tokens=e.reverse();for(var t="";this.next();)t+=this.tok();return t},l.prototype.next=function(){return this.token=this.tokens.pop()},l.prototype.peek=function(){return this.tokens[this.tokens.length-1]||0},l.prototype.parseText=function(){for(var e=this.token.text;"text"===this.peek().type;)e+="\n"+this.next().text;return this.inline.output(e)},l.prototype.tok=function(){switch(this.token.type){case"space":return"";case"hr":return this.renderer.hr();case"heading":return this.renderer.heading(this.inline.output(this.token.text),this.token.depth,p(this.inlineText.output(this.token.text)),this.slugger);case"code":return this.renderer.code(this.token.text,this.token.lang,this.token.escaped);case"table":var e,t,n,r,o="",i="";for(n="",e=0;e?@[\]^`{|}~]/g,"").replace(/\s/g,"-");if(this.seen.hasOwnProperty(t)){var n=t;do{this.seen[n]++,t=n+"-"+this.seen[n]}while(this.seen.hasOwnProperty(t))}return this.seen[t]=0,t},u.escapeTest=/[&<>"']/,u.escapeReplace=/[&<>"']/g,u.replacements={"&":"&","<":"<",">":">",'"':""","'":"'"},u.escapeTestNoEncode=/[<>"']|&(?!#?\w+;)/,u.escapeReplaceNoEncode=/[<>"']|&(?!#?\w+;)/g;var h={},m=/^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;function g(){}function y(e){for(var t,n,r=1;r=0&&"\\"===n[o];)r=!r;return r?"|":" |"}).split(/ \|/),r=0;if(n.length>t)n.splice(t);else for(;n.lengthAn error occurred:

    "+u(e.message+"",!0)+"
    ";throw e}}g.exec=g,w.options=w.setOptions=function(e){return y(w.defaults,e),w},w.getDefaults=function(){return{baseUrl:null,breaks:!1,gfm:!0,headerIds:!0,headerPrefix:"",highlight:null,langPrefix:"language-",mangle:!0,pedantic:!1,renderer:new a,sanitize:!1,sanitizer:null,silent:!1,smartLists:!1,smartypants:!1,tables:!0,xhtml:!1}},w.defaults=w.getDefaults(),w.Parser=l,w.parser=l.parse,w.Renderer=a,w.TextRenderer=s,w.Lexer=r,w.lexer=r.lex,w.InlineLexer=i,w.inlineLexer=i.output,w.Slugger=c,w.parse=w,e.exports=w}(this||"undefined"!=typeof window&&window)}).call(this,n(4))},function(e,t){e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},function(e,t){e.exports=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e}},function(e,t){e.exports=function(e){if("function"!=typeof e)throw TypeError(e+" is not a function!");return e}},function(e,t){t.f={}.propertyIsEnumerable},function(e,t,n){"use strict";var r=n(195);e.exports=r},function(e,t,n){"use strict";function r(e,t){Error.call(this),this.name="YAMLException",this.reason=e,this.mark=t,this.message=(this.reason||"(unknown reason)")+(this.mark?" "+this.mark.toString():""),Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):this.stack=(new Error).stack||""}r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r.prototype.toString=function(e){var t=this.name+": ";return t+=this.reason||"(unknown reason)",!e&&this.mark&&(t+=" "+this.mark.toString()),t},e.exports=r},function(e,t,n){"use strict";var r=n(29);e.exports=new r({include:[n(104)],implicit:[n(205),n(206)],explicit:[n(207),n(208),n(209),n(210)]})},function(e,t,n){"use strict";(function(t){!t.version||0===t.version.indexOf("v0.")||0===t.version.indexOf("v1.")&&0!==t.version.indexOf("v1.8.")?e.exports={nextTick:function(e,n,r,o){if("function"!=typeof e)throw new TypeError('"callback" argument must be a function');var i,a,s=arguments.length;switch(s){case 0:case 1:return t.nextTick(e);case 2:return t.nextTick(function(){e.call(null,n)});case 3:return t.nextTick(function(){e.call(null,n,r)});case 4:return t.nextTick(function(){e.call(null,n,r,o)});default:for(i=new Array(s-1),a=0;a0},o.isExternal$Ref=function(e){return o.is$Ref(e)&&"#"!==e.$ref[0]},o.isAllowed$Ref=function(e,t){if(o.is$Ref(e)){if("#/"===e.$ref.substr(0,2)||"#"===e.$ref)return!0;if("#"!==e.$ref[0]&&(!t||t.resolve.external))return!0}},o.isExtended$Ref=function(e){return o.is$Ref(e)&&Object.keys(e).length>1},o.dereference=function(e,t){if(t&&"object"==typeof t&&o.isExtended$Ref(e)){var n={};return Object.keys(e).forEach(function(t){"$ref"!==t&&(n[t]=e[t])}),Object.keys(t).forEach(function(e){e in n||(n[e]=t[e])}),n}return t}},function(e,t,n){(function(e){function n(e,t){for(var n=0,r=e.length-1;r>=0;r--){var o=e[r];"."===o?e.splice(r,1):".."===o?(e.splice(r,1),n++):n&&(e.splice(r,1),n--)}if(t)for(;n--;n)e.unshift("..");return e}var r=/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/,o=function(e){return r.exec(e).slice(1)};function i(e,t){if(e.filter)return e.filter(t);for(var n=[],r=0;r=-1&&!r;o--){var a=o>=0?arguments[o]:e.cwd();if("string"!=typeof a)throw new TypeError("Arguments to path.resolve must be strings");a&&(t=a+"/"+t,r="/"===a.charAt(0))}return(r?"/":"")+(t=n(i(t.split("/"),function(e){return!!e}),!r).join("/"))||"."},t.normalize=function(e){var r=t.isAbsolute(e),o="/"===a(e,-1);return(e=n(i(e.split("/"),function(e){return!!e}),!r).join("/"))||r||(e="."),e&&o&&(e+="/"),(r?"/":"")+e},t.isAbsolute=function(e){return"/"===e.charAt(0)},t.join=function(){var e=Array.prototype.slice.call(arguments,0);return t.normalize(i(e,function(e,t){if("string"!=typeof e)throw new TypeError("Arguments to path.join must be strings");return e}).join("/"))},t.relative=function(e,n){function r(e){for(var t=0;t=0&&""===e[n];n--);return t>n?[]:e.slice(t,n-t+1)}e=t.resolve(e).substr(1),n=t.resolve(n).substr(1);for(var o=r(e.split("/")),i=r(n.split("/")),a=Math.min(o.length,i.length),s=a,l=0;l0?r:n)(e)}},function(e,t,n){"use strict";var r=n(32),o=n(18),i=n(21),a=n(22),s=n(34),l=n(138),c=n(37),u=n(142),p=n(2)("iterator"),f=!([].keys&&"next"in[].keys()),d=function(){return this};e.exports=function(e,t,n,h,m,g,y){l(n,t,h);var v,b,w,x=function(e){if(!f&&e in _)return _[e];switch(e){case"keys":case"values":return function(){return new n(this,e)}}return function(){return new n(this,e)}},k=t+" Iterator",E="values"==m,S=!1,_=e.prototype,O=_[p]||_["@@iterator"]||m&&_[m],C=O||x(m),T=m?E?x("entries"):C:void 0,j="Array"==t&&_.entries||O;if(j&&(w=u(j.call(new e)))!==Object.prototype&&w.next&&(c(w,k,!0),r||"function"==typeof w[p]||a(w,p,d)),E&&O&&"values"!==O.name&&(S=!0,C=function(){return O.call(this)}),r&&!y||!f&&!S&&_[p]||a(_,p,C),s[t]=C,s[k]=d,m)if(v={values:E?C:x("values"),keys:g?C:x("keys"),entries:T},y)for(b in v)b in _||i(_,b,v[b]);else o(o.P+o.F*(f||S),t,v);return v}},function(e,t,n){var r=n(14),o=n(139),i=n(66),a=n(65)("IE_PROTO"),s=function(){},l=function(){var e,t=n(59)("iframe"),r=i.length;for(t.style.display="none",n(85).appendChild(t),t.src="javascript:",(e=t.contentWindow.document).open(),e.write("