diff --git a/.flake8 b/.flake8 index b5bbd68..b0d5088 100644 --- a/.flake8 +++ b/.flake8 @@ -1,5 +1,5 @@ [flake8] -extend-ignore = E124,E128,E301,E302,E305,E402,E501,E261 +extend-ignore = E124,E128,E301,E302,E305,E402,E501,E261,W504 # E124: closing bracket does not match visual indentation # E128: continuation line under-indented for visual indent # E301: expected 1 blank line, found 0 @@ -7,4 +7,5 @@ extend-ignore = E124,E128,E301,E302,E305,E402,E501,E261 # E305: expected 2 blank lines after class or function definition, found 1 # E402: module level import not at top of file # E501: line too long (82 > 79 characters) -# E261: at least two spaces before inline comment \ No newline at end of file +# E261: at least two spaces before inline comment +# W504: line break after binary operator \ No newline at end of file diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 347be89..0000000 --- a/.pylintrc +++ /dev/null @@ -1,651 +0,0 @@ -[MAIN] - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Clear in-memory caches upon conclusion of linting. Useful if running pylint -# in a server-like mode. -clear-cache-post-run=no - -# Load and enable all available extensions. Use --list-extensions to see a list -# all available extensions. -#enable-all-extensions= - -# In error mode, messages with a category besides ERROR or FATAL are -# suppressed, and no reports are done by default. Error mode is compatible with -# disabling specific errors. -#errors-only= - -# Always return a 0 (non-error) status code, even if lint errors are found. -# This is primarily useful in continuous integration scripts. -#exit-zero= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-allow-list= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -extension-pkg-whitelist= - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -fail-on= - -# Specify a score threshold under which the program will exit with error. -fail-under=10 - -# Interpret the stdin as a python script, whose filename needs to be passed as -# the module_or_package argument. -#from-stdin= - -# Files or directories to be skipped. They should be base names, not paths. -ignore=CVS - -# Add files or directories matching the regular expressions patterns to the -# ignore-list. The regex matches against paths and can be in Posix or Windows -# format. Because '\\' represents the directory delimiter on Windows systems, -# it can't be used as an escape character. -ignore-paths= - -# Files or directories matching the regular expression patterns are skipped. -# The regex matches against base names, not paths. The default value ignores -# Emacs file locks -ignore-patterns=^\.# - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use, and will cap the count on Windows to -# avoid hangs. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Minimum Python version to use for version dependent checks. Will default to -# the version used to run pylint. -py-version=3.8 - -# Discover python modules and packages in the file system subtree. -recursive=no - -# Add paths to the list of the source roots. Supports globbing patterns. The -# source root is an absolute path or a path relative to the current working -# directory used to determine a package namespace for modules located under the -# source root. -source-roots= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - -# In verbose mode, extra non-checker-related info will be displayed. -#verbose= - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. If left empty, argument names will be checked with the set -# naming style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. If left empty, attribute names will be checked with the set naming -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. If left empty, class attribute names will be checked -# with the set naming style. -#class-attribute-rgx= - -# Naming style matching correct class constant names. -class-const-naming-style=UPPER_CASE - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. If left empty, class constant names will be checked with -# the set naming style. -#class-const-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. If left empty, class names will be checked with the set naming style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. If left empty, constant names will be checked with the set naming -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. If left empty, function names will be checked with the set -# naming style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _ - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. If left empty, inline iteration names will be checked -# with the set naming style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. If left empty, method names will be checked with the set naming style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. If left empty, module names will be checked with the set naming style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Regular expression matching correct type alias names. If left empty, type -# alias names will be checked with the set naming style. -#typealias-rgx= - -# Regular expression matching correct type variable names. If left empty, type -# variable names will be checked with the set naming style. -#typevar-rgx= - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. If left empty, variable names will be checked with the set -# naming style. -#variable-rgx= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - asyncSetUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[DESIGN] - -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -exclude-too-few-public-methods= - -# List of qualified class names to ignore when counting class parents (see -# R0901) -ignored-parents= - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when caught. -overgeneral-exceptions=builtins.BaseException,builtins.Exception - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow explicit reexports by alias from a package __init__. -allow-reexport-from-package=no - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules= - -# Output a graph (.gv or any supported image format) of external dependencies -# to the given file (report RP0402 must not be disabled). -ext-import-graph= - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be -# disabled). -import-graph= - -# Output a graph (.gv or any supported image format) of internal dependencies -# to the given file (report RP0402 must not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, -# UNDEFINED. -confidence=HIGH, - CONTROL_FLOW, - INFERENCE, - INFERENCE_FAILURE, - UNDEFINED - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-implicit-booleaness-not-comparison-to-string, - use-implicit-booleaness-not-comparison-to-zero, - use-symbolic-message-instead, - missing-module-docstring, - line-too-long, - wrong-import-position, - wrong-import-order, - protected-access, - missing-function-docstring, - missing-class-docstring, - invalid-name, - superfluous-parens, - import-outside-toplevel, - redefined-builtin, - too-many-arguments, - too-few-public-methods, - too-many-instance-attributes, - too-many-locals, - duplicate-code, - no-else-return - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable= - - -[METHOD_ARGS] - -# List of qualified names (i.e., library.method) which require a timeout -# parameter e.g. 'requests.api.get,requests.api.post' -timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -notes-rgx= - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit,argparse.parse_error - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'fatal', 'error', 'warning', 'refactor', -# 'convention', and 'info' which contain the number of messages in each -# category, as well as 'statement' which is the total number of statements -# analyzed. This score is used by the global evaluation report (RP0004). -evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -msg-template= - -# Set the output format. Available formats are: text, parseable, colorized, -# json2 (improved json format), json (old json format) and msvs (visual -# studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -#output-format= - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[SIMILARITIES] - -# Comments are removed from the similarity computation -ignore-comments=yes - -# Docstrings are removed from the similarity computation -ignore-docstrings=yes - -# Imports are removed from the similarity computation -ignore-imports=yes - -# Signatures are removed from the similarity computation -ignore-signatures=yes - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. No available dictionaries : You need to install -# both the python package and the system dependency for enchant to work. -spelling-dict= - -# List of comma separated words that should be considered directives if they -# appear at the beginning of a comment and should not be checked. -spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of symbolic message names to ignore for Mixin members. -ignored-checks-for-mixins=no-member, - not-async-context-manager, - not-context-manager, - attribute-defined-outside-init - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# Regex pattern to define which classes are considered mixins. -mixin-class-rgx=.*[Mm]ixin - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of names allowed to shadow builtins -allowed-redefined-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/api/index.py b/api/index.py index 4504a67..c0da17a 100755 --- a/api/index.py +++ b/api/index.py @@ -7,11 +7,11 @@ # See https://github.com/orgs/vercel/discussions/46 import sys -sys.path.append("../python/protocaas") -from api_helpers.routers.processor.router import router as processor_router -from api_helpers.routers.compute_resource.router import router as compute_resource_router -from api_helpers.routers.client.router import router as client_router -from api_helpers.routers.gui.router import router as gui_router +sys.path.append("../python") +from protocaas.api_helpers.routers.processor.router import router as processor_router +from protocaas.api_helpers.routers.compute_resource.router import router as compute_resource_router +from protocaas.api_helpers.routers.client.router import router as client_router +from protocaas.api_helpers.routers.gui.router import router as gui_router app = FastAPI() diff --git a/python/protocaas/api_helpers/clients/MockMongoClient.py b/python/protocaas/api_helpers/clients/MockMongoClient.py index f2cf881..e043fa2 100644 --- a/python/protocaas/api_helpers/clients/MockMongoClient.py +++ b/python/protocaas/api_helpers/clients/MockMongoClient.py @@ -9,6 +9,8 @@ def __getitem__(self, key: str): if key not in self._dbs: self._dbs[key] = MockMongoDatabase() return self._dbs[key] + def clear_databases(self): + self._dbs = {} class MockMongoDatabase: def __init__(self): diff --git a/python/protocaas/api_helpers/clients/_get_mongo_client.py b/python/protocaas/api_helpers/clients/_get_mongo_client.py index 96e6071..66ebc62 100644 --- a/python/protocaas/api_helpers/clients/_get_mongo_client.py +++ b/python/protocaas/api_helpers/clients/_get_mongo_client.py @@ -3,17 +3,13 @@ from motor.motor_asyncio import AsyncIOMotorClient from ..core.settings import get_settings from .MockMongoClient import MockMongoClient +from ...mock import using_mock _globals = { 'mock_mongo_client': None } -print('----- a1') - -def _set_use_mock_mongo_client(use_mock: bool) -> None: # For testing - _globals['mock_mongo_client'] = MockMongoClient() if use_mock else None # type: ignore - def _get_mongo_client() -> Union[AsyncIOMotorClient, MockMongoClient]: # We want one async mongo client per event loop loop = asyncio.get_event_loop() @@ -21,8 +17,11 @@ def _get_mongo_client() -> Union[AsyncIOMotorClient, MockMongoClient]: return loop._mongo_client # type: ignore # If we're using a mock client, return it - if _globals['mock_mongo_client']: + if using_mock(): client = _globals['mock_mongo_client'] # type: ignore + if client is None: + client = MockMongoClient() + _globals['mock_mongo_client'] = client # type: ignore else: # Otherwise, create a new client and store it in the global variable mongo_uri = get_settings().MONGO_URI @@ -35,3 +34,8 @@ def _get_mongo_client() -> Union[AsyncIOMotorClient, MockMongoClient]: setattr(loop, '_mongo_client', client) return client + +def _clear_mock_mongo_databases(): + client: MockMongoClient = _globals['mock_mongo_client'] # type: ignore + if client is not None: + client.clear_databases() diff --git a/python/protocaas/api_helpers/clients/pubsub.py b/python/protocaas/api_helpers/clients/pubsub.py index 8fcf4a7..cafda74 100644 --- a/python/protocaas/api_helpers/clients/pubsub.py +++ b/python/protocaas/api_helpers/clients/pubsub.py @@ -1,19 +1,15 @@ import json import aiohttp import urllib.parse -import asyncio from ..core.settings import get_settings +from ...mock import using_mock class PubsubError(Exception): pass -def _set_use_mock_pubsub_client(use_mock: bool) -> None: # For testing - loop = asyncio.get_event_loop() - setattr(loop, '_use_mock_pubsub_client', use_mock) - async def publish_pubsub_message(*, channel: str, message: dict): - if hasattr(asyncio.get_event_loop(), '_use_mock_pubsub_client') and asyncio.get_event_loop()._use_mock_pubsub_client: # type: ignore + if using_mock(): # don't actually publish the message for the mock case return True diff --git a/python/protocaas/api_helpers/routers/compute_resource/router.py b/python/protocaas/api_helpers/routers/compute_resource/router.py index 17a3c1e..2fd27a2 100644 --- a/python/protocaas/api_helpers/routers/compute_resource/router.py +++ b/python/protocaas/api_helpers/routers/compute_resource/router.py @@ -6,15 +6,10 @@ from ...core.protocaas_types import ProtocaasComputeResourceApp, ProtocaasJob, ComputeResourceSpec, PubsubSubscription from ...clients.db import fetch_compute_resource, fetch_compute_resource_jobs, update_compute_resource_node, set_compute_resource_spec from ...core.settings import get_settings +from ....mock import using_mock router = APIRouter() -_globals = { - '_use_compute_resource_mock_pubsub': False -} -def _set_use_compute_resource_mock_pubsub(use_mock_pubsub: bool): - _globals['_use_compute_resource_mock_pubsub'] = use_mock_pubsub - # get apps class GetAppsResponse(BaseModel): apps: List[ProtocaasComputeResourceApp] @@ -72,7 +67,7 @@ async def compute_resource_get_pubsub_subscription( compute_resource = await fetch_compute_resource(compute_resource_id) if compute_resource is None: raise ComputeResourceNotFoundException(f"No compute resource with ID {compute_resource_id}") - if _globals['_use_compute_resource_mock_pubsub']: + if using_mock(): subscription = PubsubSubscription( pubnubSubscribeKey='mock-subscribe-key', pubnubChannel=compute_resource_id, diff --git a/python/protocaas/api_helpers/routers/gui/compute_resource_routes.py b/python/protocaas/api_helpers/routers/gui/compute_resource_routes.py index a7e0d87..ec2bd86 100644 --- a/python/protocaas/api_helpers/routers/gui/compute_resource_routes.py +++ b/python/protocaas/api_helpers/routers/gui/compute_resource_routes.py @@ -9,16 +9,11 @@ from ...clients.db import fetch_compute_resource, fetch_compute_resources_for_user, update_compute_resource, fetch_compute_resource_jobs from ...clients.db import register_compute_resource as db_register_compute_resource from ...core.settings import get_settings +from ....mock import using_mock router = APIRouter() -_globals = { - '_use_gui_mock_pubsub': False -} -def _set_use_gui_mock_pubsub(use_mock_pubsub: bool): - _globals['_use_gui_mock_pubsub'] = use_mock_pubsub - # get compute resource class GetComputeResourceResponse(BaseModel): computeResource: ProtocaasComputeResource @@ -133,7 +128,7 @@ async def get_pubsub_subscription(compute_resource_id): if compute_resource is None: raise ComputeResourceNotFoundException(f"No compute resource with ID {compute_resource_id}") - if _globals['_use_gui_mock_pubsub']: + if using_mock(): subscription = PubsubSubscription( pubnubSubscribeKey='mock-subscribe-key', pubnubChannel=compute_resource_id, diff --git a/python/protocaas/mock.py b/python/protocaas/mock.py new file mode 100644 index 0000000..48eb55d --- /dev/null +++ b/python/protocaas/mock.py @@ -0,0 +1,9 @@ +_globals = { + 'use_mock': False +} + +def using_mock() -> bool: + return _globals['use_mock'] + +def set_use_mock(use_mock: bool): + _globals['use_mock'] = use_mock diff --git a/python/protocaas/sdk/App.py b/python/protocaas/sdk/App.py index 645a38e..bd6886b 100644 --- a/python/protocaas/sdk/App.py +++ b/python/protocaas/sdk/App.py @@ -1,4 +1,4 @@ -from typing import List, Union +from typing import List, Union, Type import os import json import shutil @@ -9,6 +9,7 @@ from ._run_job import _run_job from ..common.protocaas_types import ComputeResourceSlurmOpts from ._load_spec_from_uri import _load_spec_from_uri +from .ProcessorBase import ProcessorBase class ProtocaasAppException(Exception): @@ -38,15 +39,13 @@ def __init__(self, name: str, *, help: str, app_image: Union[str, None] = None, self._aws_batch_job_definition: Union[str, None] = None self._slurm_opts: Union[ComputeResourceSlurmOpts, None] = None - def add_processor(self, processor_func): - """Add a processor function to the app + def add_processor(self, processor_class: Type[ProcessorBase]): + """Add a processor to the app Args: - processor_func: The processor function which needs to have been decorated to be a protocaas processor. + processor_class (Type[ProcessorBase]): The processor class for the processor """ - if not hasattr(processor_func, 'protocaas_processor'): - raise ProtocaasAppException('The processor function must be decorated with @processor') - P = AppProcessor.from_func(processor_func) + P = AppProcessor.from_processor_class(processor_class) self._processors.append(P) def run(self): @@ -55,6 +54,13 @@ def run(self): JOB_PRIVATE_KEY = os.environ.get('JOB_PRIVATE_KEY', None) JOB_INTERNAL = os.environ.get('JOB_INTERNAL', None) APP_EXECUTABLE = os.environ.get('APP_EXECUTABLE', None) + SPEC_OUTPUT_FILE = os.environ.get('SPEC_OUTPUT_FILE', None) + if SPEC_OUTPUT_FILE is not None: + if JOB_ID is not None: + raise Exception('Cannot set both JOB_ID and SPEC_OUTPUT_FILE') + with open(SPEC_OUTPUT_FILE, 'w') as f: + json.dump(self.get_spec(), f, indent=4) + return if JOB_ID is not None: if JOB_PRIVATE_KEY is None: raise KeyError('JOB_PRIVATE_KEY is not set') @@ -136,18 +142,18 @@ def _run_job(self, *, job_id: str, job_private_key: str): processor_name = job.processor_name processor = next((p for p in self._processors if p._name == processor_name), None) assert processor, f'Processor not found: {processor_name}' - assert hasattr(processor, '_processor_func'), f'Processor does not have a _processor_func attribute: {processor_name}' - processor_func = processor._processor_func - assert processor_func is not None, f'Processor function is None: {processor_name}' + if not processor._processor_class: + raise Exception(f'Processor does not have a processor_class: {processor_name}') + processor_class = processor._processor_class - # Assemble the kwargs for the processor function - kwargs = {} + # Assemble the context for the processor function + context = object() for input in processor._inputs: if not input.list: # this input is not a list input_file = next((i for i in job.inputs if i._name == input.name), None) assert input_file, f'Input not found: {input.name}' - kwargs[input.name] = input_file + setattr(context, input.name, input_file) else: # this input is a list the_list: List[InputFile] = [] @@ -160,20 +166,21 @@ def _run_job(self, *, job_id: str, job_private_key: str): break the_list.append(input_file) ii += 1 - kwargs[input.name] = the_list + setattr(context, input.name, the_list) for output in processor._outputs: output_file = next((o for o in job.outputs if o._name == output.name), None) assert output_file is not None, f'Output not found: {output.name}' - kwargs[output.name] = output_file + setattr(context, output.name, output_file) for parameter in processor._parameters: job_parameter = next((p for p in job.parameters if p.name == parameter.name), None) if job_parameter is None: - kwargs[parameter.name] = parameter.default + # The parameter was not set, so use the default + setattr(context, parameter.name, parameter.default) else: - kwargs[parameter.name] = job_parameter.value + setattr(context, parameter.name, job_parameter.value) # Run the processor function - processor_func(**kwargs) + processor_class.run(context) # Check that all outputs were set for output in processor._outputs: diff --git a/python/protocaas/sdk/AppProcessor.py b/python/protocaas/sdk/AppProcessor.py index c9a4879..6181e93 100644 --- a/python/protocaas/sdk/AppProcessor.py +++ b/python/protocaas/sdk/AppProcessor.py @@ -1,15 +1,18 @@ -from typing import Any, List, Union, Dict +from typing import Any, List, Union, Dict, Type from dataclasses import dataclass +import inspect - -_NO_DEFAULT = object() +from ._get_context_attributes_for_dataclass import _get_context_attributes_for_dataclass +from .ProcessorBase import ProcessorBase, _default_not_specified +from .InputFile import InputFile +from .OutputFile import OutputFile @dataclass class AppProcessorInput: """An input file of a processor in an app""" name: str help: str - list: str + list: bool def get_spec(self): ret: Dict[str, Any] = { 'name': self.name, @@ -58,7 +61,7 @@ def get_spec(self): 'help': self.help, 'type': _type_to_string(self.type) } - if self.default != _NO_DEFAULT: + if self.default != _default_not_specified: ret['default'] = self.default if self.options is not None: ret['options'] = self.options @@ -67,7 +70,7 @@ def get_spec(self): return ret @staticmethod def from_spec(spec): - default = spec.get('default', _NO_DEFAULT) + default = spec.get('default', _default_not_specified) options = spec.get('options', None) secret = spec.get('secret', False) return AppProcessorParameter( @@ -118,25 +121,28 @@ class AppProcessor: def __init__(self, *, name: str, help: str, + label: str, inputs: List[AppProcessorInput], outputs: List[AppProcessorOutput], parameters: List[AppProcessorParameter], attributes: List[AppProcessorAttribute], tags: List[AppProcessorTag], - func=None + processor_class: Union[Type[ProcessorBase], None] = None ) -> None: self._name = name self._help = help + self._label = label self._inputs = inputs self._outputs = outputs self._parameters = parameters self._attributes = attributes self._tags = tags - self._processor_func = func + self._processor_class = processor_class def get_spec(self): return { 'name': self._name, 'help': self._help, + 'label': self._label, 'inputs': [i.get_spec() for i in self._inputs], 'outputs': [o.get_spec() for o in self._outputs], 'parameters': [p.get_spec() for p in self._parameters], @@ -153,6 +159,7 @@ def from_spec(spec): return AppProcessor( name=spec['name'], help=spec['help'], + label=spec['label'], inputs=inputs, outputs=outputs, parameters=parameters, @@ -160,61 +167,128 @@ def from_spec(spec): tags=tags ) @staticmethod - def from_func(processor_func): - pp = getattr(processor_func, 'protocaas_processor', None) - if pp is None: - raise AppProcessorException('Processor function must be decorated with @processor') - name = pp['name'] - help = pp['help'] - inputs = getattr(processor_func, 'protocaas_inputs', []) - outputs = getattr(processor_func, 'protocaas_outputs', []) - parameters = getattr(processor_func, 'protocaas_parameters', []) - attributes = getattr(processor_func, 'protocaas_attributes', []) - tags = getattr(processor_func, 'protocaas_tags', []) - _inputs = [AppProcessorInput(name=i['name'], help=i['help'], list=i['list']) for i in inputs] - _outputs = [AppProcessorOutput(name=o['name'], help=o['help']) for o in outputs] - _parameters = [AppProcessorParameter(name=p['name'], help=p['help'], type=p['type'], default=p['default'], options=p.get('options', None), secret=p.get('secret', False)) for p in parameters] - _attributes = [AppProcessorAttribute(name=a['name'], value=a['value']) for a in attributes] - _tags = [AppProcessorTag(tag=t) for t in tags] + def from_processor_class(processor_class: Type[ProcessorBase]): + name = processor_class.name + help = processor_class.help + label = processor_class.label + tags = processor_class.tags + attributes = processor_class.attributes + + _attributes: List[AppProcessorAttribute] = [] + for key, value in attributes.items(): + _attributes.append(AppProcessorAttribute( + name=key, + value=value + )) + _tags = [AppProcessorTag(tag=tag) for tag in tags] + + inputs, outputs, parameters = _get_context_inputs_outputs_parameters_for_processor(processor_class) + return AppProcessor( name=name, help=help, - inputs=_inputs, - outputs=_outputs, - parameters=_parameters, + label=label, + inputs=inputs, + outputs=outputs, + parameters=parameters, attributes=_attributes, tags=_tags, - func=processor_func + processor_class=processor_class ) +def _get_context_inputs_outputs_parameters_for_processor(processor_class: Type[ProcessorBase]): + run_signature = inspect.signature(processor_class.run) + run_parameters = run_signature.parameters + if len(run_parameters) != 1: + raise Exception('The run method should have exactly one parameter') + context_param = list(run_parameters.values())[0] + + return _get_context_inputs_outputs_parameters_for_dataclass(context_param.annotation) + +def _get_context_inputs_outputs_parameters_for_dataclass(x: Type[Any]): + context_attributes = _get_context_attributes_for_dataclass(x) + + inputs: List[AppProcessorInput] = [] + outputs: List[AppProcessorOutput] = [] + parameters: List[AppProcessorParameter] = [] + for context_attribute in context_attributes: + if context_attribute.type_hint == InputFile or context_attribute.type_hint == List[InputFile]: + is_list = context_attribute.type_hint == List[InputFile] + field = context_attribute.field + inputs.append(AppProcessorInput( + name=context_attribute.name, + help=field.help, + list=is_list + )) + # check to make sure other fields are not set + if field.options is not None: + raise AppProcessorException(f"Input {context_attribute.name} has options set - only parameters can have options") + if field.secret is not None: + raise AppProcessorException(f"Input {context_attribute.name} has secret set - only parameters can have secret set") + if field.default is not _default_not_specified: + raise AppProcessorException(f"Input {context_attribute.name} has default set - only parameters can have default set") + elif context_attribute.type_hint == OutputFile: + field = context_attribute.field + outputs.append(AppProcessorOutput( + name=context_attribute.name, + help=field.help + )) + # check to make sure other fields are not set + if field.options is not None: + raise AppProcessorException(f"Input {context_attribute.name} has options set - only parameters can have options") + if field.secret is not None: + raise AppProcessorException(f"Input {context_attribute.name} has secret set - only parameters can have secret set") + if field.default is not _default_not_specified: + raise AppProcessorException(f"Input {context_attribute.name} has default set - only parameters can have default set") + elif _is_valid_parameter_type(context_attribute.type_hint): + parameters.append(AppProcessorParameter( + name=context_attribute.name, + help=context_attribute.field.help, + type=context_attribute.type_hint, + default=context_attribute.field.default, + options=context_attribute.field.options, + secret=context_attribute.field.secret if context_attribute.field.secret is not None else False + )) + elif _is_dataclass(context_attribute.type_hint): + inputs0, outputs0, parameters0 = _get_context_inputs_outputs_parameters_for_dataclass(context_attribute.type_hint) + for input0 in inputs0: + input0.name = f'{context_attribute.name}.{input0.name}' + inputs.append(input0) + for output0 in outputs0: + output0.name = f'{context_attribute.name}.{output0.name}' + outputs.append(output0) + for parameter0 in parameters0: + parameter0.name = f'{context_attribute.name}.{parameter0.name}' + parameters.append(parameter0) + else: + raise AppProcessorException(f"Unsupported type for {context_attribute.name}: {context_attribute.type_hint}") + return inputs, outputs, parameters + +def _is_dataclass(type: Any): + return hasattr(type, '__dataclass_fields__') + +_type_map = { + 'str': str, + 'int': int, + 'float': float, + 'bool': bool, + 'List[str]': List[str], + 'List[int]': List[int], + 'List[float]': List[float], + 'List[bool]': List[bool] +} + def _type_to_string(type: Any): - type_map = { - str: 'str', - int: 'int', - float: 'float', - bool: 'bool', - List[str]: 'List[str]', - List[int]: 'List[int]', - List[float]: 'List[float]', - List[bool]: 'List[bool]' - } - try: - return type_map[type] - except KeyError as exc: - raise ValueError(f'Unexpected type: {type}') from exc + for key, value in _type_map.items(): + if value == type: + return key + raise ValueError(f'Unexpected type: {type}') def _type_from_string(type: str): - type_map = { - 'str': str, - 'int': int, - 'float': float, - 'bool': bool, - 'List[str]': List[str], - 'List[int]': List[int], - 'List[float]': List[float], - 'List[bool]': List[bool] - } try: - return type_map[type] + return _type_map[type] except KeyError as exc: raise ValueError(f'Unexpected type: {type}') from exc + +def _is_valid_parameter_type(type: Any): + return type in _type_map.values() diff --git a/python/protocaas/sdk/ProcessorBase.py b/python/protocaas/sdk/ProcessorBase.py new file mode 100644 index 0000000..0b73141 --- /dev/null +++ b/python/protocaas/sdk/ProcessorBase.py @@ -0,0 +1,43 @@ +from typing import Any, Optional, List + +_default_not_specified = object() + + +class _Field: + def __init__(self, *, + help: str = '', + default: Optional[Any] = _default_not_specified, # only applies to parameters + options: Optional[List[Any]] = None, # only applies to parameters + secret: Optional[bool] = None # only applies to parameters + ): + self.default = default + self.help = help + self.options = options + self.secret = secret + +# We need to use a function here rather than a class so that we can return the Any type +def field(*, + help: str = '', + default: Optional[Any] = _default_not_specified, # only applies to parameters + options: Optional[List[Any]] = None, # only applies to parameters + secret: Optional[bool] = None # only applies to parameters +) -> Any: # it's important that this returns Any so that the linter is okay with using it + return _Field( + help=help, + default=default, + options=options, + secret=secret + ) + +class ProcessorBase: + name: str + label: str + help: str + tags: List[str] + attributes: dict + + @staticmethod + def run( + context: Any + ): + raise NotImplementedError() diff --git a/python/protocaas/sdk/__init__.py b/python/protocaas/sdk/__init__.py index 5826684..f0d21fc 100644 --- a/python/protocaas/sdk/__init__.py +++ b/python/protocaas/sdk/__init__.py @@ -4,4 +4,4 @@ from .OutputFile import OutputFile from .App import App -from .decorators import processor, input, output, parameter, attribute, tags, input_list +from .ProcessorBase import ProcessorBase, field diff --git a/python/protocaas/sdk/_get_context_attributes_for_dataclass.py b/python/protocaas/sdk/_get_context_attributes_for_dataclass.py new file mode 100644 index 0000000..49922fc --- /dev/null +++ b/python/protocaas/sdk/_get_context_attributes_for_dataclass.py @@ -0,0 +1,32 @@ +from typing import get_type_hints, Any, Type, List +from dataclasses import dataclass +import inspect +from protocaas.sdk.ProcessorBase import _Field + + +@dataclass +class ContextAttribute: + name: str + field: _Field + type_hint: Any = None + +def _get_context_attributes_for_dataclass(x: Type[Any]) -> List[ContextAttribute]: + context_attributes = inspect.getmembers(x, lambda a: not (inspect.isroutine(a))) # exclude methods + context_attributes = [ + ContextAttribute( + name=attribute[0], + field=attribute[1] + ) + for attribute in context_attributes + if not (attribute[0].startswith('__') and attribute[0].endswith('__')) # exclude attributes such as __module__, __dict__, etc. + ] + for ca in context_attributes: + if not isinstance(ca.field, _Field): + raise Exception(f"Attribute {ca.name} is not a _Field") + type_hints = get_type_hints(x) + for attr_name, attr_type in type_hints.items(): + ca = next((ca for ca in context_attributes if ca.name == attr_name), None) + if not ca: + raise Exception(f"Unexpected: Attribute {attr_name} is not defined in the context class") + ca.type_hint = attr_type + return context_attributes diff --git a/python/protocaas/sdk/_make_spec_file.py b/python/protocaas/sdk/_make_spec_file.py index 5b08c06..c657d76 100644 --- a/python/protocaas/sdk/_make_spec_file.py +++ b/python/protocaas/sdk/_make_spec_file.py @@ -1,44 +1,48 @@ -from pathlib import Path -from typing import Optional -import importlib.util import os - -import protocaas.sdk as pr +import subprocess +from pathlib import Path -def make_app_spec_file_function(app_dir: str, spec_output_file: Optional[str] = None): +def make_app_spec_file_function(app_dir: str, spec_output_file: str): # Ensure the directory path is an absolute path app_dir_path = Path(app_dir).resolve() if spec_output_file is None: spec_output_file = str(app_dir_path / 'spec.json') - # Construct the absolute path to main.py in the specified directory - main_module_path = app_dir_path / 'main.py' - - # Check if main.py exists - if not main_module_path.exists(): - raise FileNotFoundError(f"main.py not found in {app_dir_path}") - - # Create a module name from the directory path - module_name = app_dir_path.name - - # Use importlib to load the module - os.environ['PROTOCAAS_GENERATE_SPEC'] = '1' - spec = importlib.util.spec_from_file_location(module_name, str(main_module_path)) - if spec is None: - raise ImportError(f"Unable to get spec for module {module_name} from {main_module_path}") - module = importlib.util.module_from_spec(spec) - if spec.loader is None: - raise ImportError(f"Unable to get loader for module {module_name} from {main_module_path}") - spec.loader.exec_module(module) - - # Check if the App class exists in the loaded module - if hasattr(module, 'app') and isinstance(getattr(module, 'app'), pr.App): - # Create an instance of the App class - app_instance = module.app - - # Call the make_spec_file method - app_instance.make_spec_file(spec_output_file) - else: - raise AttributeError("App class not found in main.py") + executable_path = str(app_dir_path / 'main.py') + + env = os.environ.copy() + env['SPEC_OUTPUT_FILE'] = spec_output_file + subprocess.run([executable_path], env=env) + + # When we do it the following way, the inspection of type hints in the processor context does not work + + # # Construct the absolute path to main.py in the specified directory + # main_module_path = app_dir_path / 'main.py' + + # # Check if main.py exists + # if not main_module_path.exists(): + # raise FileNotFoundError(f"main.py not found in {app_dir_path}") + + # # Create a module name from the directory path + # module_name = app_dir_path.name + + # # Use importlib to load the module + # spec = importlib.util.spec_from_file_location(module_name, str(main_module_path)) + # if spec is None: + # raise ImportError(f"Unable to get spec for module {module_name} from {main_module_path}") + # module = importlib.util.module_from_spec(spec) + # if spec.loader is None: + # raise ImportError(f"Unable to get loader for module {module_name} from {main_module_path}") + # spec.loader.exec_module(module) + + # # Check if the App class exists in the loaded module + # if hasattr(module, 'app') and isinstance(getattr(module, 'app'), pr.App): + # # Create an instance of the App class + # app_instance = module.app + + # # Call the make_spec_file method + # app_instance.make_spec_file(spec_output_file) + # else: + # raise AttributeError("App class not found in main.py") diff --git a/python/protocaas/sdk/decorators.py b/python/protocaas/sdk/decorators.py deleted file mode 100644 index d17e44d..0000000 --- a/python/protocaas/sdk/decorators.py +++ /dev/null @@ -1,76 +0,0 @@ -from typing import List, Optional, Any -from .AppProcessor import _NO_DEFAULT - - -# This decorator is used to define a processor -def processor(name, help: Optional[str] = None): - def decorator(func): - setattr(func, 'protocaas_processor', {'name': name, 'help': help}) - return func - return decorator - -# This decorator is used to add an attribute to a processor -def attribute(name: str, value: Any): - def decorator(func): - if not hasattr(func, 'protocaas_attributes'): - setattr(func, 'protocaas_attributes', []) - attributes: list = getattr(func, 'protocaas_attributes') - attributes.insert(0, {'name': name, 'value': value}) - return func - return decorator - -# This decorator is used to add tags to a processor -def tags(tag_list: List[str]): - def decorator(func): - if not hasattr(func, 'protocaas_tags'): - setattr(func, 'protocaas_tags', []) - tags0: list = getattr(func, 'protocaas_tags') - for tag in tag_list[::-1]: - tags0.insert(0, tag) - return func - return decorator - -# This decorator is used to add an input to a processor -def input(name, help=None): - def decorator(func): - if not hasattr(func, 'protocaas_inputs'): - setattr(func, 'protocaas_inputs', []) - inputs: list = getattr(func, 'protocaas_inputs') - inputs.insert(0, {'name': name, 'help': help, 'list': False}) - return func - return decorator - -# This decorator is used to add an input list to a processor -def input_list(name, help=None): - def decorator(func): - if not hasattr(func, 'protocaas_inputs'): - setattr(func, 'protocaas_inputs', []) - inputs: list = getattr(func, 'protocaas_inputs') - inputs.insert(0, {'name': name, 'help': help, 'list': True}) - return func - return decorator - -# This decorator is used to add an output to a processor -def output(name, help=None): - def decorator(func): - if not hasattr(func, 'protocaas_outputs'): - setattr(func, 'protocaas_outputs', []) - outputs: list = getattr(func, 'protocaas_outputs') - outputs.insert(0, {'name': name, 'help': help}) - return func - return decorator - -# This decorator is used to add a parameter to a processor -def parameter(name, *, help: str, type, default=_NO_DEFAULT, options: Optional[list] = None, secret: bool = False): - def decorator(func): - if not hasattr(func, 'protocaas_parameters'): - setattr(func, 'protocaas_parameters', []) - parameters: list = getattr(func, 'protocaas_parameters') - pp = {'name': name, 'help': help, 'type': type, 'default': default} - if options is not None: - pp['options'] = options - if secret: - pp['secret'] = True - parameters.insert(0, pp) - return func - return decorator diff --git a/python/tests/test_api.py b/python/tests/test_api.py index daf4703..e40e85e 100644 --- a/python/tests/test_api.py +++ b/python/tests/test_api.py @@ -9,8 +9,6 @@ async def test_api(): # important to put the tests inside so we don't get an import error when running the non-api tests from protocaas.api_helpers.core.protocaas_types import ProtocaasProjectUser, ComputeResourceSpecProcessor - from protocaas.api_helpers.clients._get_mongo_client import _set_use_mock_mongo_client - from protocaas.api_helpers.clients.pubsub import _set_use_mock_pubsub_client from protocaas.api_helpers.routers.gui._authenticate_gui_request import _create_mock_github_access_token from protocaas.common._crypto_keys import _sign_message_str, sign_message from protocaas.api_helpers.routers.gui.project_routes import create_project, CreateProjectRequest @@ -25,26 +23,22 @@ async def test_api(): from protocaas.api_helpers.routers.gui.project_routes import delete_project from protocaas.api_helpers.routers.gui.project_routes import get_jobs from protocaas.api_helpers.routers.gui.create_job_route import create_job_handler, CreateJobRequest - from protocaas.api_helpers.routers.gui.compute_resource_routes import _set_use_gui_mock_pubsub from protocaas.api_helpers.routers.gui.job_routes import get_job from protocaas.api_helpers.routers.gui.job_routes import delete_job from protocaas.api_helpers.routers.compute_resource.router import compute_resource_get_unfinished_jobs - from protocaas.api_helpers.routers.compute_resource.router import _set_use_compute_resource_mock_pubsub from protocaas.api_helpers.routers.processor.router import processor_update_job_status, ProcessorUpdateJobStatusRequest from protocaas.compute_resource.register_compute_resource import register_compute_resource from protocaas.compute_resource.start_compute_resource import start_compute_resource from protocaas.common._api_request import _use_api_test_client from protocaas.api_helpers.routers.gui.compute_resource_routes import register_compute_resource as register_compute_resource_handler, RegisterComputeResourceRequest + from protocaas.mock import set_use_mock + from protocaas.api_helpers.clients._get_mongo_client import _clear_mock_mongo_databases from fastapi.testclient import TestClient app = _get_fastapi_app() test_client = TestClient(app) _use_api_test_client(test_client) - - _set_use_mock_mongo_client(True) - _set_use_mock_pubsub_client(True) - _set_use_compute_resource_mock_pubsub(True) - _set_use_gui_mock_pubsub(True) + set_use_mock(True) github_access_token = _create_mock_github_access_token() try: @@ -297,10 +291,8 @@ async def test_api(): assert len(jobs) == 0 finally: _use_api_test_client(None) - _set_use_mock_mongo_client(False) - _set_use_mock_pubsub_client(False) - _set_use_gui_mock_pubsub(False) - _set_use_compute_resource_mock_pubsub(False) + set_use_mock(False) + _clear_mock_mongo_databases() def _get_fastapi_app(): from fastapi import FastAPI