Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change poe tasks naming #604

Merged
merged 1 commit into from
Feb 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 0 additions & 25 deletions .flake8

This file was deleted.

7 changes: 2 additions & 5 deletions .github/workflows/codestyle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,5 @@ jobs:
# TODO install dev dependencies only (https://github.com/python-poetry/poetry/issues/2572)
run: poetry install --no-root

- name: Run black formatter in check mode
run: poetry run poe codeformat

- name: Run flake8 codestyle checker
run: poetry run poe codestyle
- name: Run formatters in check mode
run: poetry run poe checks_codestyle
4 changes: 2 additions & 2 deletions .github/workflows/integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@ jobs:
- name: Log in to GitHub Docker repository
run: echo ${{ secrets.GITHUB_TOKEN }} | docker login docker.pkg.github.com -u ${{github.actor}} --password-stdin

- name: Run unit tests
- name: Run integration tests
env:
GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: poetry run poe integration_test
run: poetry run poe tests_integration

- name: Upload test logs
uses: actions/upload-artifact@v2
Expand Down
9 changes: 3 additions & 6 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,11 @@ jobs:
- name: Install dependencies
run: poetry install

- name: Run black formatter in check mode
run: poetry run poe codeformat

- name: Run flake8 codestyle checker
run: poetry run poe codestyle
- name: Run formatters in check mode
run: poetry run poe checks_codestyle

- name: Run unit tests
run: poetry run poe unit_test
run: poetry run poe tests_unit

build:
needs: [test]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,4 @@ jobs:
run: poetry install

- name: Run unit tests
run: poetry run poe unit_test
run: poetry run poe tests_unit
4 changes: 1 addition & 3 deletions goth/address.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ def __init__(self, template: str, default: Dict[str, object]):

def substitute(self, mapping: Optional[Mapping[str, object]] = None, **kwargs):
"""Replace values in string with `mapping`, merge default and mapping first."""
return super(DefaultTemplate, self).substitute(
self._with_default(mapping or {}), **kwargs
)
return super(DefaultTemplate, self).substitute(self._with_default(mapping or {}), **kwargs)

def safe_substitute(self, mapping: Optional[Mapping[str, object]] = None, **kwargs):
"""Replace values in string with `mapping`, merge default and mapping first.
Expand Down
17 changes: 4 additions & 13 deletions goth/api_monitor/api_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,10 +107,7 @@ def content(self) -> str:
return self.http_response.content.decode("utf-8")

def __str__(self) -> str:
return (
f"[response ({self.status_code})] "
f"{self.request.header_str}; body: {self.content}"
)
return f"[response ({self.status_code})] {self.request.header_str}; body: {self.content}"


class APIError(APIEvent):
Expand Down Expand Up @@ -176,9 +173,7 @@ def is_collect_demands_request(event: APIEvent, sub_id: str = "") -> bool:
"""Check if `event` is a request of CollectDemants operation."""

sub_id_re = sub_id if sub_id else "[^/]+"
return _match_event(
event, APIRequest, "GET", f"^/market-api/v1/offers/{sub_id_re}/events"
)
return _match_event(event, APIRequest, "GET", f"^/market-api/v1/offers/{sub_id_re}/events")


def is_subscribe_offer_request(event: APIEvent) -> bool:
Expand All @@ -191,9 +186,7 @@ def is_unsubscribe_offer_request(event: APIEvent, sub_id: str = "") -> bool:
"""Check if `event` is a request of UnsubscribeOffer operation."""

sub_id_re = sub_id if sub_id else "[^/]+"
return _match_event(
event, APIRequest, "DELETE", f"^/market-api/v1/offers/{sub_id_re}$"
)
return _match_event(event, APIRequest, "DELETE", f"^/market-api/v1/offers/{sub_id_re}$")


def is_subscribe_offer_response(event: APIEvent) -> bool:
Expand All @@ -205,9 +198,7 @@ def is_subscribe_offer_response(event: APIEvent) -> bool:
def is_invoice_send_response(event: APIEvent) -> bool:
"""Check if `event` is a response for InvoiceSend operation."""

return _match_event(
event, APIResponse, "POST", "^/payment-api/v1/provider/invoices/.*/send$"
)
return _match_event(event, APIResponse, "POST", "^/payment-api/v1/provider/invoices/.*/send$")


def get_response_json(event: APIEvent):
Expand Down
4 changes: 1 addition & 3 deletions goth/api_monitor/router_addon.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,7 @@ class RouterAddon:
`name_to_port` should be injective so the inverse map should be well defined.
"""

def __init__(
self, node_names: Mapping[str, str], ports: Mapping[str, Mapping[int, int]]
):
def __init__(self, node_names: Mapping[str, str], ports: Mapping[str, Mapping[int, int]]):
self._logger = logging.getLogger(__name__)
self._node_names = node_names
self._name_to_port = {}
Expand Down
12 changes: 2 additions & 10 deletions goth/assertions/assertions.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,20 +180,12 @@ def done(self) -> bool:
@property
def accepted(self) -> bool:
"""Return `True` iff this assertion finished execution successfuly."""
return (
self._task is not None
and self._task.done()
and self._task.exception() is None
)
return self._task is not None and self._task.done() and self._task.exception() is None

@property
def failed(self) -> bool:
"""Return `True` iff this assertion finished execution by failing."""
return (
self._task is not None
and self._task.done()
and self._task.exception() is not None
)
return self._task is not None and self._task.done() and self._task.exception() is not None

def result(self) -> Any:
"""Return the result of this assertion.
Expand Down
8 changes: 2 additions & 6 deletions goth/assertions/monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,9 +305,7 @@ async def _check_assertions(self, events_ended: bool) -> None:
"""

event_descr = (
f"#{len(self._events)} ({self._events[-1]})"
if not events_ended
else "EndOfEvents"
f"#{len(self._events)} ({self._events[-1]})" if not events_ended else "EndOfEvents"
)

# Notify all active (not done) assertions about the new event.
Expand All @@ -323,9 +321,7 @@ async def _check_assertions(self, events_ended: bool) -> None:
continue

self._reported.add(a)
self._logger.debug(
"Assertion '%s' finished after event %s", a.name, event_descr
)
self._logger.debug("Assertion '%s' finished after event %s", a.name, event_descr)
if a.accepted:
result = a.result()
msg = colors.green("Assertion '%s' succeeded; result: %s", style="bold")
Expand Down
4 changes: 1 addition & 3 deletions goth/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,9 +299,7 @@ def load_yaml(
type_name = node["type"]
use_proxy = node.get("use-proxy", False)

payment_config_name = node.get(
"payment-config", DEFAULT_PAYMENT_CONFIG_NAME
)
payment_config_name = node.get("payment-config", DEFAULT_PAYMENT_CONFIG_NAME)
payment_config = get_payment_config(payment_config_name)

class_, volumes, privileged_mode, env_dict = node_types[type_name]
Expand Down
16 changes: 4 additions & 12 deletions goth/gftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,7 @@ def run_gftp_server(gftp_container: str, gftp_volume: Path):

# Start the command and create a socket
api_client = docker.APIClient()
exec_id = api_client.exec_create(
gftp_container, "gftp server", stdin=True, tty=False
)
exec_id = api_client.exec_create(gftp_container, "gftp server", stdin=True, tty=False)
socket = api_client.exec_start(exec_id["Id"], socket=True)._sock

def container_path_to_volume_path(container_path: Path) -> Path:
Expand Down Expand Up @@ -151,9 +149,7 @@ def response_reader():
# it's from the command's stderr
logger.debug("stderr: %s", response.strip())
else:
raise ValueError(
f"Unexpected stream type in a frame header: {stream_type}"
)
raise ValueError(f"Unexpected stream type in a frame header: {stream_type}")

# Reading responses needs to be done in a separate thread, otherwise it'll block
reader_thread = threading.Thread(target=response_reader, daemon=True)
Expand All @@ -168,16 +164,12 @@ def response_reader():
if method == "publish":
files = params["files"]
container_files = copy_files_to_volume(files)
logger.debug(
"replaced `files`; original: %s, new: %s", files, container_files
)
logger.debug("replaced `files`; original: %s, new: %s", files, container_files)
params["files"] = container_files
line = json.dumps(msg) + "\n"
elif method == "receive":
output_file = Path(params["output_file"])
container_file = (
Path(CONTAINER_MOUNT_POINT) / "out" / _mangle_path(output_file)
)
container_file = Path(CONTAINER_MOUNT_POINT) / "out" / _mangle_path(output_file)
logger.debug(
"replaced `output_file`; original: %s, new: %s",
output_file,
Expand Down
20 changes: 5 additions & 15 deletions goth/runner/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,13 +123,9 @@ def __init__(
)
self._nginx_service_address = None
self._pending_api_assertions = []
self._web_server = (
WebServer(web_root_path, web_server_port) if web_root_path else None
)
self._web_server = WebServer(web_root_path, web_server_port) if web_root_path else None

def get_probes(
self, probe_type: Type[ProbeType], name: str = ""
) -> List[ProbeType]:
def get_probes(self, probe_type: Type[ProbeType], name: str = "") -> List[ProbeType]:
"""Get probes by name or type.

`probe_type` can be a type directly inheriting from `Probe`, as well as a
Expand All @@ -141,9 +137,7 @@ def get_probes(
probes = [p for p in probes if isinstance(p, probe_type)]
return cast(List[ProbeType], probes)

def add_api_assertion(
self, func: AssertionFunction, name=None
) -> Assertion[APIEvent]:
def add_api_assertion(self, func: AssertionFunction, name=None) -> Assertion[APIEvent]:
"""Add an assertion for API events to this runner proxy.

If the proxy is already running, the returned assertion will be started.
Expand All @@ -170,9 +164,7 @@ def check_assertion_errors(self, *extra_monitors: EventMonitor) -> None:
extra_monitors,
)
)
failed = chain.from_iterable(
monitor.failed for monitor in monitors if monitor is not None
)
failed = chain.from_iterable(monitor.failed for monitor in monitors if monitor is not None)
for assertion in failed:
# We assume all failed assertions were already reported
# in their corresponding log files. Now we only need to raise
Expand Down Expand Up @@ -245,9 +237,7 @@ async def _start_nodes(self):
awaitables = [probe.start_agents() for probe in self.probes]
await asyncio.gather(*awaitables)

async def _start_proxy(
self, node_names: Dict[str, str], ports: Dict[str, dict]
) -> None:
async def _start_proxy(self, node_names: Dict[str, str], ports: Dict[str, dict]) -> None:

self.proxy = Proxy(
node_names=node_names,
Expand Down
4 changes: 1 addition & 3 deletions goth/runner/cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@
from goth.runner.container import DockerContainer


class YagnaDockerCli(
DockerJSONCommandRunner, YagnaAppKeyMixin, YagnaIdMixin, YagnaPaymentMixin
):
class YagnaDockerCli(DockerJSONCommandRunner, YagnaAppKeyMixin, YagnaIdMixin, YagnaPaymentMixin):
"""A class for running the `yagna` command inside a docker container."""

def __init__(self, container: DockerContainer):
Expand Down
8 changes: 2 additions & 6 deletions goth/runner/cli/yagna_app_key_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@ def app_key_create(
Return the application key parsed from the command's output.
"""

args = make_args(
"app-key", "create", name, role=role, id=alias_or_addr, data_dir=data_dir
)
args = make_args("app-key", "create", name, role=role, id=alias_or_addr, data_dir=data_dir)
try:
output = self.run_json_command(str, *args)
return output
Expand All @@ -45,9 +43,7 @@ def app_key_create(
raise KeyAlreadyExistsError(name)
raise ce

def app_key_drop(
self: CommandRunner, name: str, address: str = "", data_dir: str = ""
) -> str:
def app_key_drop(self: CommandRunner, name: str, address: str = "", data_dir: str = "") -> str:
"""Run `<cmd> app-key drop <name>` with optional extra args.

Return the command's output.
Expand Down
12 changes: 3 additions & 9 deletions goth/runner/cli/yagna_id_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@ def id_create(
)
output = self.run_json_command(Dict, *args)
result = unwrap_ok_err_json(output)
return Identity(
result["alias"], result["isDefault"], result["isLocked"], result["nodeId"]
)
return Identity(result["alias"], result["isDefault"], result["isLocked"], result["nodeId"])

def id_show(
self: CommandRunner, data_dir: str = "", alias_or_addr: str = ""
Expand Down Expand Up @@ -74,11 +72,7 @@ def id_update(
"""Return the output of `<yagna-cmd> id update`."""

set_default_str = "--set-default" if set_default else None
args = make_args(
"id", "update", alias_or_addr, set_default_str, data_dir=data_dir
)
args = make_args("id", "update", alias_or_addr, set_default_str, data_dir=data_dir)
output = self.run_json_command(Dict, *args)
result = unwrap_ok_err_json(output)
return Identity(
result["alias"], result["isDefault"], result["isLocked"], result["nodeId"]
)
return Identity(result["alias"], result["isDefault"], result["isLocked"], result["nodeId"])
8 changes: 2 additions & 6 deletions goth/runner/cli/yagna_payment_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,8 @@ def from_dict(source: dict) -> "PaymentStatus":
"""Parse a dict into an instance of `PaymentStatus`."""
return PaymentStatus(
amount=float(source["amount"]),
incoming=Payments(
**{key: float(value) for key, value in source["incoming"].items()}
),
outgoing=Payments(
**{key: float(value) for key, value in source["outgoing"].items()}
),
incoming=Payments(**{key: float(value) for key, value in source["incoming"].items()}),
outgoing=Payments(**{key: float(value) for key, value in source["outgoing"].items()}),
reserved=float(source["reserved"]),
)

Expand Down
12 changes: 3 additions & 9 deletions goth/runner/container/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,9 +168,7 @@ def _find_expected_binaries(root_path: Path) -> List[Path]:
return binary_paths


def _setup_build_context(
context_dir: Path, env: YagnaBuildEnvironment, dockerfile: Path
) -> None:
def _setup_build_context(context_dir: Path, env: YagnaBuildEnvironment, dockerfile: Path) -> None:
"""Set up the build context for `docker build` command.
This function prepares a directory to be used as build context for
Expand All @@ -180,9 +178,7 @@ def _setup_build_context(
"""
env_dict: dict = asdict(env)
filtered_env = {k: v for k, v in env_dict.items() if v is not None}
logger.info(
"Setting up Docker build context. path=%s, env=%s", context_dir, filtered_env
)
logger.info("Setting up Docker build context. path=%s, env=%s", context_dir, filtered_env)

context_binary_dir: Path = context_dir / "bin"
context_deb_dir: Path = context_dir / "deb"
Expand Down Expand Up @@ -222,7 +218,5 @@ def _setup_build_context(
for repo in DEB_RELEASE_REPOS:
_download_release(context_deb_dir, repo)

logger.debug(
"Copying Dockerfile. source=%s, destination=%s", dockerfile, context_dir
)
logger.debug("Copying Dockerfile. source=%s, destination=%s", dockerfile, context_dir)
shutil.copy2(dockerfile, context_dir / "Dockerfile")
Loading