Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .cookiecutterrc
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,4 @@ default_context:
project_slug: 'osparc-python-runner'
project_type: 'computational'
release_date: '2020'
version: '1.3.0'
version: '2.0.0'
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.3.0
2.0.0
2 changes: 1 addition & 1 deletion VERSION_INTEGRATION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.0.0
1.1.0
21 changes: 9 additions & 12 deletions docker-compose-meta.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,24 +17,21 @@ services:
input data file", "type": "data:*/*"}, "input_5": {"displayOrder": 5, "label":
"Additional input data - optional", "description": "Any additional input
data file", "type": "data:*/*"}}}'
io.simcore.integration-version: '{"integration-version": "1.0.0"}'
io.simcore.integration-version: '{"integration-version": "1.1.0"}'
io.simcore.key: '{"key": "simcore/services/comp/osparc-python-runner"}'
io.simcore.name: '{"name": "oSparc Python Runner"}'
io.simcore.outputs: '{"outputs": {"output_1": {"displayOrder": 1, "label":
"Output data", "description": "The data produced by the script and saved
under OUTPUT_FOLDER/output_1 as output_1.zip", "type": "data:*/*", "fileToKeyMap":
{"output_1.zip": "output_1"}}, "output_2": {"displayOrder": 2, "label":
under OUTPUT_FOLDER/output_1 as output_1.zip", "type": "data:*/*"}, "output_2":
{"displayOrder": 2, "label": "Output data", "description": "The data produced
by the script and saved under OUTPUT_FOLDER/output_2 as output_2.zip", "type":
"data:*/*"}, "output_3": {"displayOrder": 3, "label": "Output data", "description":
"The data produced by the script and saved under OUTPUT_FOLDER/output_3
as output_3.zip", "type": "data:*/*"}, "output_4": {"displayOrder": 4, "label":
"Output data", "description": "The data produced by the script and saved
under OUTPUT_FOLDER/output_2 as output_2.zip", "type": "data:*/*", "fileToKeyMap":
{"output_2.zip": "output_2"}}, "output_3": {"displayOrder": 3, "label":
"Output data", "description": "The data produced by the script and saved
under OUTPUT_FOLDER/output_3 as output_3.zip", "type": "data:*/*", "fileToKeyMap":
{"output_3.zip": "output_3"}}, "output_4": {"displayOrder": 4, "label":
"Output data", "description": "The data produced by the script and saved
under OUTPUT_FOLDER/output_4 as output_4.zip", "type": "data:*/*", "fileToKeyMap":
{"output_4.zip": "output_4"}}}}'
under OUTPUT_FOLDER/output_4 as output_4.zip", "type": "data:*/*"}}}'
io.simcore.type: '{"type": "computational"}'
io.simcore.version: '{"version": "1.3.0"}'
io.simcore.version: '{"version": "2.0.0"}'
org.label-schema.build-date: ${BUILD_DATE}
org.label-schema.schema-version: '1.0'
org.label-schema.vcs-ref: ${VCS_REF}
Expand Down
12 changes: 2 additions & 10 deletions metadata/metadata.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
name: oSparc Python Runner
key: simcore/services/comp/osparc-python-runner
type: computational
integration-version: 1.0.0
version: 1.3.0
integration-version: 1.1.0
version: 2.0.0
description: oSparc Python Runner
contact: anderegg@itis.swiss
authors:
Expand Down Expand Up @@ -44,26 +44,18 @@ outputs:
label: Output data
description: The data produced by the script and saved under OUTPUT_FOLDER/output_1 as output_1.zip
type: data:*/*
fileToKeyMap:
output_1.zip: output_1
output_2:
displayOrder: 2
label: Output data
description: The data produced by the script and saved under OUTPUT_FOLDER/output_2 as output_2.zip
type: data:*/*
fileToKeyMap:
output_2.zip: output_2
output_3:
displayOrder: 3
label: Output data
description: The data produced by the script and saved under OUTPUT_FOLDER/output_3 as output_3.zip
type: data:*/*
fileToKeyMap:
output_3.zip: output_3
output_4:
displayOrder: 4
label: Output data
description: The data produced by the script and saved under OUTPUT_FOLDER/output_4 as output_4.zip
type: data:*/*
fileToKeyMap:
output_4.zip: output_4
1 change: 1 addition & 0 deletions requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ black
coverage
docker
jsonschema
pylint
pytest
pytest-cookies
pytest-cov
Expand Down
66 changes: 48 additions & 18 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
#
# pip-compile --output-file=requirements.txt
#
arrow==1.2.2
arrow==1.2.3
# via jinja2-time
attrs==21.4.0
astroid==2.12.9
# via pylint
attrs==22.1.0
# via
# jsonschema
# pytest
Expand All @@ -18,54 +20,70 @@ bump2version==1.0.1
# via bumpversion
bumpversion==0.6.0
# via -r requirements.in
certifi==2021.10.8
certifi==2022.9.14
# via requests
chardet==4.0.0
chardet==5.0.0
# via binaryornot
charset-normalizer==2.0.12
charset-normalizer==2.1.1
# via requests
click==8.1.0
click==8.1.3
# via
# black
# cookiecutter
cookiecutter==2.1.1
# via pytest-cookies
coverage[toml]==6.3.2
coverage[toml]==6.4.4
# via
# -r requirements.in
# pytest-cov
dill==0.3.5.1
# via pylint
docker==6.0.0
# via -r requirements.in
idna==3.3
idna==3.4
# via requests
importlib-resources==5.9.0
# via jsonschema
iniconfig==1.1.1
# via pytest
jinja2==3.1.1
isort==5.10.1
# via pylint
jinja2==3.1.2
# via
# cookiecutter
# jinja2-time
jinja2-time==0.2.0
# via cookiecutter
jsonschema==4.16.0
# via -r requirements.in
lazy-object-proxy==1.7.1
# via astroid
markupsafe==2.1.1
# via jinja2
mccabe==0.7.0
# via pylint
mypy-extensions==0.4.3
# via black
packaging==21.3
# via
# docker
# pytest
# pytest-sugar
pathspec==0.9.0
# via black
platformdirs==2.5.1
pathspec==0.10.1
# via black
pkgutil-resolve-name==1.3.10
# via jsonschema
platformdirs==2.5.2
# via
# black
# pylint
pluggy==1.0.0
# via pytest
py==1.11.0
# via pytest
pyparsing==3.0.7
pylint==2.15.2
# via -r requirements.in
pyparsing==3.0.9
# via packaging
pyrsistent==0.18.1
# via jsonschema
Expand All @@ -89,30 +107,42 @@ pytest-sugar==0.9.5
# via -r requirements.in
python-dateutil==2.8.2
# via arrow
python-slugify==6.1.1
python-slugify==6.1.2
# via cookiecutter
pyyaml==6.0
# via
# -r requirements.in
# cookiecutter
requests==2.27.1
requests==2.28.1
# via
# cookiecutter
# docker
six==1.16.0
# via python-dateutil
termcolor==1.1.0
termcolor==2.0.1
# via pytest-sugar
text-unidecode==1.3
# via python-slugify
tomli==2.0.1
# via
# black
# coverage
# pylint
# pytest
urllib3==1.26.9
tomlkit==0.11.4
# via pylint
typing-extensions==4.3.0
# via
# astroid
# black
# pylint
urllib3==1.26.12
# via
# docker
# requests
websocket-client==1.3.2
websocket-client==1.4.1
# via docker
wrapt==1.14.1
# via astroid
zipp==3.8.1
# via importlib-resources
8 changes: 8 additions & 0 deletions service.cli/run
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,14 @@ INPUT_4=$INPUT_FOLDER/input_4
export INPUT_4
INPUT_5=$INPUT_FOLDER/input_5
export INPUT_5
OUTPUT_1=$OUTPUT_FOLDER/output_1
export OUTPUT_1
OUTPUT_2=$OUTPUT_FOLDER/output_2
export OUTPUT_2
OUTPUT_3=$OUTPUT_FOLDER/output_3
export OUTPUT_3
OUTPUT_4=$OUTPUT_FOLDER/output_4
export OUTPUT_4

exec execute.sh

88 changes: 23 additions & 65 deletions src/osparc_python_runner/main.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,15 @@
import json
import logging
import os
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Dict

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("osparc-python-main")


ENVIRONS = ["INPUT_FOLDER", "OUTPUT_FOLDER"]
try:
INPUT_FOLDER, OUTPUT_FOLDER = [Path(os.environ[v]) for v in ENVIRONS]
except KeyError:
raise ValueError("Required env vars {ENVIRONS} were not set")

# NOTE: sync with schema in metadata!!
NUM_INPUTS = 5
NUM_OUTPUTS = 4
OUTPUT_SUBFOLDER_ENV_TEMPLATE = "OUTPUT_{}"
OUTPUT_SUBFOLDER_TEMPLATE = "output_{}"
OUTPUT_FILE_TEMPLATE = "output_{}.zip"
INPUT_1 = Path(os.environ["INPUT_1"])


def _find_user_code_entrypoint(code_dir: Path) -> Path:
Expand Down Expand Up @@ -58,59 +45,44 @@ def _ensure_pip_requirements(code_dir: Path) -> Path:
f"pipreqs --savepath={requirements} --force {code_dir}".split(),
shell=False,
check=True,
cwd=INPUT_FOLDER,
cwd=INPUT_1,
)

# TODO log subprocess.run

else:
requirements = requirements[0]
logger.info(f"Found: {requirements}")
logger.info("Found: %s", requirements)
return requirements


# TODO: Next version of integration will take care of this and maybe the ENVs as well
def _ensure_output_subfolders_exist() -> Dict[str, str]:
output_envs = {}
for n in range(1, NUM_OUTPUTS + 1):
output_sub_folder_env = f"OUTPUT_{n}"
output_sub_folder = OUTPUT_FOLDER / OUTPUT_SUBFOLDER_TEMPLATE.format(n)
# NOTE: exist_ok for forward compatibility in case they are already created
output_sub_folder.mkdir(parents=True, exist_ok=True)
output_envs[output_sub_folder_env] = f"{output_sub_folder}"
logger.info(
"Output ENVs available: %s",
json.dumps(output_envs, indent=2),
)
return output_envs


def _ensure_input_environment() -> Dict[str, str]:
input_envs = {
f"INPUT_{n}": os.environ[f"INPUT_{n}"] for n in range(1, NUM_INPUTS + 1)
}
logger.info(
"Input ENVs available: %s",
json.dumps(input_envs, indent=2),
)
return input_envs
def _show_io_environments() -> None:
for io_type in ["input", "output"]:
logger.info(
"%s ENVs available: %s",
io_type.capitalize(),
json.dumps(
list(
filter(
lambda x, io_type=io_type: f"{io_type.upper()}_" in x,
os.environ,
)
),
indent=2,
),
)


def setup():
input_envs = _ensure_input_environment()
output_envs = _ensure_output_subfolders_exist()
_show_io_environments()
logger.info("Available data:")
os.system("ls -tlah")

user_code_entrypoint = _find_user_code_entrypoint(INPUT_FOLDER)
requirements_txt = _ensure_pip_requirements(INPUT_FOLDER)
user_code_entrypoint = _find_user_code_entrypoint(INPUT_1)
requirements_txt = _ensure_pip_requirements(INPUT_1)

logger.info("Preparing launch script ...")
venv_dir = Path.home() / ".venv"
bash_input_env_export = [f"export {env}={path}" for env, path in input_envs.items()]
bash_output_env_export = [
f"export {env}='{path}'" for env, path in output_envs.items()
]
script = [
"#!/bin/sh",
"set -o errexit",
Expand All @@ -120,30 +92,16 @@ def setup():
f'python3 -m venv --system-site-packages --symlinks --upgrade "{venv_dir}"',
f'"{venv_dir}/bin/pip" install -U pip wheel setuptools',
f'"{venv_dir}/bin/pip" install -r "{requirements_txt}"',
"\n".join(bash_input_env_export),
"\n".join(bash_output_env_export),
f'echo "Executing code {user_code_entrypoint.name}..."',
f'"{venv_dir}/bin/python3" "{user_code_entrypoint}"',
'echo "DONE ..."',
]
main_script_path = Path("main.sh")
main_script_path.write_text("\n".join(script))
main_script_path.write_text("\n".join(script), encoding="utf-8")


def teardown():
logger.info("Zipping output...")
for n in range(1, NUM_OUTPUTS + 1):
output_path = OUTPUT_FOLDER / f"output_{n}"
archive_file_path = OUTPUT_FOLDER / OUTPUT_FILE_TEMPLATE.format(n)
logger.info("Zipping %s into %s...", output_path, archive_file_path)
shutil.make_archive(
f"{(archive_file_path.parent / archive_file_path.stem)}",
format="zip",
root_dir=output_path,
logger=logger,
)
logger.info("Zipping %s into %s done", output_path, archive_file_path)
logger.info("Zipping done.")
logger.info("Completed")


if __name__ == "__main__":
Expand Down
Loading