From dec48765345c3c4daa88697d7511e0ac8b2c9a01 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Thu, 22 May 2025 15:54:12 +0200 Subject: [PATCH 01/26] initial working configuration --- .gitignore | 7 + .osparc/common/docker-compose.overwrite.yml | 5 + .osparc/common/metadata.yml | 14 ++ .osparc/common/runtime.yml | 1 + .../docker-compose.overwrite.yml | 7 + .../osparc-python-runner-pytorch/metadata.yml | 46 ++++ .../osparc-python-runner-pytorch/runtime.yml | 14 ++ .../docker-compose.overwrite.yml | 7 + .../metadata.yml | 46 ++++ .../runtime.yml | 14 ++ Makefile | 86 ++++++++ common/Dockerfile | 76 +++++++ common/entrypoint.sh | 73 +++++++ common/main.py | 201 ++++++++++++++++++ osparc-python-runner-pytorch/Dockerfile | 14 ++ .../requirements.txt | 4 + .../service.cli/execute.sh | 17 ++ osparc-python-runner-pytorch/service.cli/run | 25 +++ osparc-python-runner-tensorflow/Dockerfile | 14 ++ .../requirements.txt | 1 + .../service.cli/execute.sh | 17 ++ .../service.cli/run | 25 +++ 22 files changed, 714 insertions(+) create mode 100644 .gitignore create mode 100644 .osparc/common/docker-compose.overwrite.yml create mode 100644 .osparc/common/metadata.yml create mode 100644 .osparc/common/runtime.yml create mode 100644 .osparc/osparc-python-runner-pytorch/docker-compose.overwrite.yml create mode 100644 .osparc/osparc-python-runner-pytorch/metadata.yml create mode 100644 .osparc/osparc-python-runner-pytorch/runtime.yml create mode 100644 .osparc/osparc-python-runner-tensorflow/docker-compose.overwrite.yml create mode 100644 .osparc/osparc-python-runner-tensorflow/metadata.yml create mode 100644 .osparc/osparc-python-runner-tensorflow/runtime.yml create mode 100644 Makefile create mode 100644 common/Dockerfile create mode 100755 common/entrypoint.sh create mode 100644 common/main.py create mode 100644 osparc-python-runner-pytorch/Dockerfile create mode 100644 osparc-python-runner-pytorch/osparc_python_runner_pytorch/requirements.txt create mode 100755 osparc-python-runner-pytorch/service.cli/execute.sh create mode 100755 osparc-python-runner-pytorch/service.cli/run create mode 100644 osparc-python-runner-tensorflow/Dockerfile create mode 100644 osparc-python-runner-tensorflow/osparc_python_runner_tensorflow/requirements.txt create mode 100755 osparc-python-runner-tensorflow/service.cli/execute.sh create mode 100755 osparc-python-runner-tensorflow/service.cli/run diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..63b8de7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +# not interested in storing this file +docker-compose.yml + + +*.ignore.* + +.venv/ \ No newline at end of file diff --git a/.osparc/common/docker-compose.overwrite.yml b/.osparc/common/docker-compose.overwrite.yml new file mode 100644 index 0000000..706cc8e --- /dev/null +++ b/.osparc/common/docker-compose.overwrite.yml @@ -0,0 +1,5 @@ +services: + common: + build: + context: ./common + dockerfile: Dockerfile diff --git a/.osparc/common/metadata.yml b/.osparc/common/metadata.yml new file mode 100644 index 0000000..5091782 --- /dev/null +++ b/.osparc/common/metadata.yml @@ -0,0 +1,14 @@ +name: "Base runner-ml image" +thumbnail: +description: "used only for building not to be published" +key: simcore/services/comp/common +version: 0.0.0 +integration-version: 2.0.0 +type: dynamic +authors: + - name: "Andrei Neagu" + email: "neagu@itis.swiss" + affiliation: "IT'IS Foundation" +contact: "neagu@itis.swiss" +inputs: {} +outputs: {} diff --git a/.osparc/common/runtime.yml b/.osparc/common/runtime.yml new file mode 100644 index 0000000..24e49db --- /dev/null +++ b/.osparc/common/runtime.yml @@ -0,0 +1 @@ +settings: [] diff --git a/.osparc/osparc-python-runner-pytorch/docker-compose.overwrite.yml b/.osparc/osparc-python-runner-pytorch/docker-compose.overwrite.yml new file mode 100644 index 0000000..855dc59 --- /dev/null +++ b/.osparc/osparc-python-runner-pytorch/docker-compose.overwrite.yml @@ -0,0 +1,7 @@ +services: + osparc-python-runner-pytorch: + depends_on: + - common + build: + context: ./osparc-python-runner-pytorch + dockerfile: Dockerfile diff --git a/.osparc/osparc-python-runner-pytorch/metadata.yml b/.osparc/osparc-python-runner-pytorch/metadata.yml new file mode 100644 index 0000000..b7eb203 --- /dev/null +++ b/.osparc/osparc-python-runner-pytorch/metadata.yml @@ -0,0 +1,46 @@ +name: oSparc Python Runner PyTorch +key: simcore/services/comp/osparc-python-runner-pytorch +type: computational +integration-version: 2.0.0 +version: 1.0.4 +description: oSparc Python Runner PyTorch +contact: guidon@itis.swiss +thumbnail: https://i.imgur.com/iKoFIN0.png +authors: + - name: Manuel Guidon + email: guidon@itis.swiss + affiliation: IT'IS Foundation +inputs: + input_1: + displayOrder: 1 + label: Input data + description: Any code, requirements or data file + type: data:*/* + input_2: + displayOrder: 2 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* + input_3: + displayOrder: 3 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* + input_4: + displayOrder: 4 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* + input_5: + displayOrder: 5 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* +outputs: + output_1: + displayOrder: 1 + label: Output data + description: All data produced by the script is zipped as output_data.zip + type: data:*/* + fileToKeyMap: + output_data.zip: output_1 diff --git a/.osparc/osparc-python-runner-pytorch/runtime.yml b/.osparc/osparc-python-runner-pytorch/runtime.yml new file mode 100644 index 0000000..d29bfd9 --- /dev/null +++ b/.osparc/osparc-python-runner-pytorch/runtime.yml @@ -0,0 +1,14 @@ +settings: + - name: Resources + type: Resources + value: + Limits: + NanoCPUs: 0 + MemoryBytes: 0 + Reservations: + NanoCPUs: 0 + MemoryBytes: 0 + GenericResources: + - DiscreteResourceSpec: + Kind: VRAM + Value: 1 \ No newline at end of file diff --git a/.osparc/osparc-python-runner-tensorflow/docker-compose.overwrite.yml b/.osparc/osparc-python-runner-tensorflow/docker-compose.overwrite.yml new file mode 100644 index 0000000..3f9fdf4 --- /dev/null +++ b/.osparc/osparc-python-runner-tensorflow/docker-compose.overwrite.yml @@ -0,0 +1,7 @@ +services: + osparc-python-runner-tensorflow: + depends_on: + - common + build: + context: ./osparc-python-runner-tensorflow + dockerfile: Dockerfile diff --git a/.osparc/osparc-python-runner-tensorflow/metadata.yml b/.osparc/osparc-python-runner-tensorflow/metadata.yml new file mode 100644 index 0000000..3be42e0 --- /dev/null +++ b/.osparc/osparc-python-runner-tensorflow/metadata.yml @@ -0,0 +1,46 @@ +name: oSparc Python Runner Tensorflow +key: simcore/services/comp/osparc-python-runner-tensorflow +type: computational +integration-version: 2.0.0 +version: 1.0.4 +description: oSparc Python Runner Tensorflow +contact: guidon@itis.swiss +thumbnail: https://i.imgur.com/iKoFIN0.png +authors: + - name: Manuel Guidon + email: guidon@itis.swiss + affiliation: IT'IS Foundation +inputs: + input_1: + displayOrder: 1 + label: Input data + description: Any code, requirements or data file + type: data:*/* + input_2: + displayOrder: 2 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* + input_3: + displayOrder: 3 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* + input_4: + displayOrder: 4 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* + input_5: + displayOrder: 5 + label: Additional input data - optional + description: Any additional input data file + type: data:*/* +outputs: + output_1: + displayOrder: 1 + label: Output data + description: All data produced by the script is zipped as output_data.zip + type: data:*/* + fileToKeyMap: + output_data.zip: output_1 diff --git a/.osparc/osparc-python-runner-tensorflow/runtime.yml b/.osparc/osparc-python-runner-tensorflow/runtime.yml new file mode 100644 index 0000000..d29bfd9 --- /dev/null +++ b/.osparc/osparc-python-runner-tensorflow/runtime.yml @@ -0,0 +1,14 @@ +settings: + - name: Resources + type: Resources + value: + Limits: + NanoCPUs: 0 + MemoryBytes: 0 + Reservations: + NanoCPUs: 0 + MemoryBytes: 0 + GenericResources: + - DiscreteResourceSpec: + Kind: VRAM + Value: 1 \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..975010c --- /dev/null +++ b/Makefile @@ -0,0 +1,86 @@ +# minimalistic utility to test and develop locally + +SHELL = /bin/sh +.DEFAULT_GOAL := help + +export IMAGE_PYTORCH=osparc-python-runner-pytorch +export IMAGE_TENSORFLOW=osparc-python-runner-tensorflow +export TAG_PYTORCH=1.0.4 +export TAG_TENSORFLOW=1.0.4 + +# PYTHON ENVIRON --------------------------------------------------------------------------------------- +.PHONY: devenv +.venv: + @python3 --version + python3 -m venv $@ + # upgrading package managers + $@/bin/pip install --upgrade uv + +devenv: .venv ## create a python virtual environment with tools to dev, run and tests cookie-cutter + # installing extra tools + @$ /dev/null 2>&1 || \ + (echo "ERROR: You must mount '${INPUT_FOLDER}' to deduce user and group ids" && exit 1) +stat "${OUTPUT_FOLDER}" > /dev/null 2>&1 || \ + (echo "ERROR: You must mount '${OUTPUT_FOLDER}' to deduce user and group ids" && exit 1) + +# NOTE: expects docker run ... -v /path/to/input/folder:${INPUT_FOLDER} +# check input/output folders are owned by the same user +if [ "$(stat -c %u "${INPUT_FOLDER}")" -ne "$(stat -c %u "${OUTPUT_FOLDER}")" ] +then + echo "ERROR: '${INPUT_FOLDER}' and '${OUTPUT_FOLDER}' have different user id's. not allowed" && exit 1 +fi +# check input/outputfolders are owned by the same group +if [ "$(stat -c %g "${INPUT_FOLDER}")" -ne "$(stat -c %g "${OUTPUT_FOLDER}")" ] +then + echo "ERROR: '${INPUT_FOLDER}' and '${OUTPUT_FOLDER}' have different group id's. not allowed" && exit 1 +fi + +echo "setting correct user id/group id..." +HOST_USERID=$(stat --format=%u "${INPUT_FOLDER}") +HOST_GROUPID=$(stat --format=%g "${INPUT_FOLDER}") +CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) +if [ "$HOST_USERID" -eq 0 ] +then + echo "Warning: Folder mounted owned by root user... adding $SC_USER_NAME to root..." + adduser "$SC_USER_NAME" root +else + echo "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." + # take host's credentials in $SC_USER_NAME + if [ -z "$CONT_GROUPNAME" ] + then + echo "Creating new group my$SC_USER_NAME" + CONT_GROUPNAME=my$SC_USER_NAME + addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" + else + echo "group already exists" + fi + echo "adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + + echo "changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" + + echo "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; + # change user property of files already around + echo "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; +fi + +echo "Starting $* ..." +echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")" +echo " local dir : $(ls -al)" +echo " input dir : $(ls -al "${INPUT_FOLDER}")" +echo " output dir : $(ls -al "${OUTPUT_FOLDER}")" + +su --command "export PATH=${PATH}:/home/$SC_USER_NAME/service.cli; $*" "$SC_USER_NAME" \ No newline at end of file diff --git a/common/main.py b/common/main.py new file mode 100644 index 0000000..12ad5b9 --- /dev/null +++ b/common/main.py @@ -0,0 +1,201 @@ +import logging +import os +import shutil +import subprocess +import sys +import tempfile +import zipfile +from pathlib import Path + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("osparc-python-main") + + +ENVIRONS = ["INPUT_FOLDER", "OUTPUT_FOLDER"] +input_dir, output_dir = [Path(os.environ.get(v, None)) for v in ENVIRONS] + +# TODO: sync with schema in metadata!! +OUTPUT_FILE = "output_data.zip" + +FILE_DIR = os.path.realpath(__file__) + + +def copy(src, dest): + try: + src, dest = str(src), str(dest) + shutil.copytree( + src, dest, ignore=shutil.ignore_patterns("*.zip", "__pycache__", ".*") + ) + except OSError as err: + # If the error was caused because the source wasn't a directory + if err.errno == shutil.errno.ENOTDIR: + shutil.copy(src, dest) + else: + logger.error("Directory not copied. Error: %s", err) + + +def clean_dir(dirpath: Path): + for root, dirs, files in os.walk(dirpath): + for f in files: + os.unlink(os.path.join(root, f)) + for d in dirs: + shutil.rmtree(os.path.join(root, d)) + + +def run_cmd(cmd: str): + subprocess.run(cmd.split(), shell=False, check=True, cwd=input_dir) + # TODO: deal with stdout, log? and error?? + + +def unzip_dir(parent: Path): + for filepath in list(parent.rglob("*.zip")): + if zipfile.is_zipfile(filepath): + with zipfile.ZipFile(filepath) as zf: + zf.extractall(filepath.parent) + + +def zipdir(dirpath: Path, ziph: zipfile.ZipFile): + """Zips directory and archives files relative to dirpath""" + for root, dirs, files in os.walk(dirpath): + for filename in files: + filepath = os.path.join(root, filename) + ziph.write(filepath, arcname=os.path.relpath(filepath, dirpath)) + dirs[:] = [name for name in dirs if not name.startswith(".")] + + +def ensure_main_entrypoint(code_dir: Path) -> Path: + code_files = list(code_dir.rglob("*.py")) + + if not code_files: + raise ValueError("No python code found") + + if len(code_files) > 1: + code_files = list(code_dir.rglob("main.py")) + if not code_files: + raise ValueError("No entrypoint found (e.g. main.py)") + if len(code_files) > 1: + raise ValueError(f"Many entrypoints found: {code_files}") + + main_py = code_files[0] + return main_py + + +def ensure_requirements(code_dir: Path) -> Path: + requirements = list(code_dir.rglob("requirements.txt")) + if len(requirements) > 1: + raise ValueError(f"Many requirements found: {requirements}") + + elif not requirements: + # deduce requirements using pipreqs + logger.info("Not found. Recreating requirements ...") + requirements = code_dir / "requirements.txt" + run_cmd(f"pipreqs --savepath={requirements} --force {code_dir}") + + else: + requirements = requirements[0] + + # we want to make sure that no already installed libraries are being touched + + # the requirements file of this service + runner_requirements = Path(FILE_DIR).parent / "requirements.txt" + + # this will be the one from the user augmented by a constraint to the runner one + requirements_in = code_dir / "requirements.in" + + # tmp file for creating the new one + tmp_requirements = code_dir / "requirements.tmp" + + with open(requirements, "r") as f: + with open(tmp_requirements, "w") as f2: + f2.write(f"-c {runner_requirements}\n") + f2.write(f.read()) + + os.rename(tmp_requirements, requirements_in) + + return Path(requirements_in) + + +def setup(): + logger.info("Cleaning output ...") + clean_dir(output_dir) + + # TODO: snapshot_before = list(input_dir.rglob("*")) + + # NOTE The inputs defined in ${INPUT_FOLDER}/inputs.json are available as env variables by their key in capital letters + # For example: input_1 -> $INPUT_1 + # + + logger.info("Processing input ...") + unzip_dir(input_dir) + + # logger.info("Copying input to output ...") + # copy(input_dir, code_dir) + + logger.info("Searching main entrypoint ...") + main_py = ensure_main_entrypoint(input_dir) + logger.info("Found %s as main entrypoint", main_py) + + logger.info("Searching requirements ...") + logger.info(input_dir) + requirements_in = ensure_requirements(input_dir) + + requirements = requirements_in.parent / "requirements.compiled" + + logger.info("Preparing launch script ...") + venv_dir = Path.home() / ".venv" + script = [ + "#!/bin/sh", + "set -o errexit", + "set -o nounset", + "IFS=$(printf '\\n\\t')", + f"echo compiling {requirements_in} into {requirements} ...", + f"pip-compile --upgrade --build-isolation --output-file {requirements} {requirements_in}", + 'echo "Creating virtual environment ..."', + f"python3 -m venv --system-site-packages --symlinks --upgrade {venv_dir}", + f"{venv_dir}/bin/pip install -U pip wheel setuptools", + f"{venv_dir}/bin/pip install -r {requirements}", + f'echo "Executing code {main_py.name}..."', + f"{venv_dir}/bin/python3 {main_py}", + 'echo "DONE ..."', + ] + main_script_path = Path("main.sh") + with main_script_path.open("w") as fp: + for line in script: + print(f"{line}\n", file=fp) + + # # TODO: take snapshot + # logger.info("Creating virtual environment ...") + # run_cmd("python3 -m venv --system-site-packages --symlinks --upgrade .venv") + # run_cmd(".venv/bin/pip install -U pip wheel setuptools") + # run_cmd(f".venv/bin/pip install -r {requirements}") + + # # TODO: take snapshot + # logger.info("Executing code ...") + # run_cmd(f".venv/bin/python3 {main_py}") + + +def teardown(): + logger.info("Zipping output ....") + + # TODO: sync zipped name with docker/labels/outputs.json + with tempfile.TemporaryDirectory() as tmpdir: + zipped_file = Path(f"{tmpdir}/{OUTPUT_FILE}") + with zipfile.ZipFile(str(zipped_file), "w", zipfile.ZIP_DEFLATED) as zh: + zipdir(output_dir, zh) + + logger.info("Cleaning output") + clean_dir(output_dir) + + logger.info("Moving %s", zipped_file.name) + shutil.move(str(zipped_file), str(output_dir)) + + +if __name__ == "__main__": + action = "setup" if len(sys.argv) == 1 else sys.argv[1] + try: + if action == "setup": + setup() + else: + teardown() + except Exception as err: # pylint: disable=broad-except + logger.error("%s . Stopping %s", err, action) diff --git a/osparc-python-runner-pytorch/Dockerfile b/osparc-python-runner-pytorch/Dockerfile new file mode 100644 index 0000000..1b147ac --- /dev/null +++ b/osparc-python-runner-pytorch/Dockerfile @@ -0,0 +1,14 @@ +FROM simcore/services/comp/common:0.0.0 AS service-base + +LABEL maintainer="Andrei Neagu " + +# copy simcore service cli +COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} service.cli/ service.cli/ + +# copy binaries from build +COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_pytorch osparc_python_runner_pytorch + +# copy commong main.py +RUN mv main.py osparc_python_runner_pytorch/main.py + +RUN uv pip install --system -r osparc_python_runner_pytorch/requirements.txt \ No newline at end of file diff --git a/osparc-python-runner-pytorch/osparc_python_runner_pytorch/requirements.txt b/osparc-python-runner-pytorch/osparc_python_runner_pytorch/requirements.txt new file mode 100644 index 0000000..62c2049 --- /dev/null +++ b/osparc-python-runner-pytorch/osparc_python_runner_pytorch/requirements.txt @@ -0,0 +1,4 @@ +--index-url https://download.pytorch.org/whl/cu128 +torch==2.7.0 +torchaudio==2.7.0 +torchvision==0.22.0 \ No newline at end of file diff --git a/osparc-python-runner-pytorch/service.cli/execute.sh b/osparc-python-runner-pytorch/service.cli/execute.sh new file mode 100755 index 0000000..2b49c45 --- /dev/null +++ b/osparc-python-runner-pytorch/service.cli/execute.sh @@ -0,0 +1,17 @@ +#!/bin/sh +# set sh strict mode +set -o errexit +set -o nounset +IFS=$(printf '\n\t') + +cd /home/scu/osparc_python_runner_pytorch + +echo "starting service as" +echo User : "$(id "$(whoami)")" +echo Workdir : "$(pwd)" +echo "..." +echo + +python3 main.py setup +/bin/sh main.sh +python3 main.py teardown \ No newline at end of file diff --git a/osparc-python-runner-pytorch/service.cli/run b/osparc-python-runner-pytorch/service.cli/run new file mode 100755 index 0000000..e2bd973 --- /dev/null +++ b/osparc-python-runner-pytorch/service.cli/run @@ -0,0 +1,25 @@ + +#!/bin/sh +#--------------------------------------------------------------- +# AUTO-GENERATED CODE, do not modify this will be overwritten!!! +#--------------------------------------------------------------- +# shell strict mode: +set -o errexit +set -o nounset +IFS=$(printf '\n\t') +cd "$(dirname "$0")" +json_input=$INPUT_FOLDER/inputs.json + +INPUT_1=$INPUT_FOLDER/input_1 +export INPUT_1 +INPUT_2=$INPUT_FOLDER/input_2 +export INPUT_2 +INPUT_3=$INPUT_FOLDER/input_3 +export INPUT_3 +INPUT_4=$INPUT_FOLDER/input_4 +export INPUT_4 +INPUT_5=$INPUT_FOLDER/input_5 +export INPUT_5 + +exec execute.sh + \ No newline at end of file diff --git a/osparc-python-runner-tensorflow/Dockerfile b/osparc-python-runner-tensorflow/Dockerfile new file mode 100644 index 0000000..6596cc5 --- /dev/null +++ b/osparc-python-runner-tensorflow/Dockerfile @@ -0,0 +1,14 @@ +FROM simcore/services/comp/common:0.0.0 AS service-base + +LABEL maintainer="Andrei Neagu " + +# copy simcore service cli +COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} service.cli/ service.cli/ + +# copy binaries from build +COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_tensorflow osparc_python_runner_tensorflow + +# copy commong main.py +RUN mv main.py osparc_python_runner_tensorflow/main.py + +RUN uv pip install --system -r osparc_python_runner_tensorflow/requirements.txt \ No newline at end of file diff --git a/osparc-python-runner-tensorflow/osparc_python_runner_tensorflow/requirements.txt b/osparc-python-runner-tensorflow/osparc_python_runner_tensorflow/requirements.txt new file mode 100644 index 0000000..eced62a --- /dev/null +++ b/osparc-python-runner-tensorflow/osparc_python_runner_tensorflow/requirements.txt @@ -0,0 +1 @@ +tensorflow[and-cuda]==2.18.1 \ No newline at end of file diff --git a/osparc-python-runner-tensorflow/service.cli/execute.sh b/osparc-python-runner-tensorflow/service.cli/execute.sh new file mode 100755 index 0000000..f723e29 --- /dev/null +++ b/osparc-python-runner-tensorflow/service.cli/execute.sh @@ -0,0 +1,17 @@ +#!/bin/sh +# set sh strict mode +set -o errexit +set -o nounset +IFS=$(printf '\n\t') + +cd /home/scu/osparc_python_runner_tensorflow + +echo "starting service as" +echo User : "$(id "$(whoami)")" +echo Workdir : "$(pwd)" +echo "..." +echo + +python3 main.py setup +/bin/sh main.sh +python3 main.py teardown \ No newline at end of file diff --git a/osparc-python-runner-tensorflow/service.cli/run b/osparc-python-runner-tensorflow/service.cli/run new file mode 100755 index 0000000..e2bd973 --- /dev/null +++ b/osparc-python-runner-tensorflow/service.cli/run @@ -0,0 +1,25 @@ + +#!/bin/sh +#--------------------------------------------------------------- +# AUTO-GENERATED CODE, do not modify this will be overwritten!!! +#--------------------------------------------------------------- +# shell strict mode: +set -o errexit +set -o nounset +IFS=$(printf '\n\t') +cd "$(dirname "$0")" +json_input=$INPUT_FOLDER/inputs.json + +INPUT_1=$INPUT_FOLDER/input_1 +export INPUT_1 +INPUT_2=$INPUT_FOLDER/input_2 +export INPUT_2 +INPUT_3=$INPUT_FOLDER/input_3 +export INPUT_3 +INPUT_4=$INPUT_FOLDER/input_4 +export INPUT_4 +INPUT_5=$INPUT_FOLDER/input_5 +export INPUT_5 + +exec execute.sh + \ No newline at end of file From 51e0f7a7308cbc1ba34381f83e21f50b4de2d9f7 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 09:01:45 +0200 Subject: [PATCH 02/26] added to ignore --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 63b8de7..4093972 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ docker-compose.yml *.ignore.* -.venv/ \ No newline at end of file +.venv/ +docker-compose.yml \ No newline at end of file From 31bbd905b4ba129b1aacc55742def62e93cbb9b8 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 09:09:01 +0200 Subject: [PATCH 03/26] updated assets with links --- .osparc/osparc-python-runner-pytorch/metadata.yml | 5 +++-- .osparc/osparc-python-runner-tensorflow/metadata.yml | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.osparc/osparc-python-runner-pytorch/metadata.yml b/.osparc/osparc-python-runner-pytorch/metadata.yml index b7eb203..acc3e2d 100644 --- a/.osparc/osparc-python-runner-pytorch/metadata.yml +++ b/.osparc/osparc-python-runner-pytorch/metadata.yml @@ -3,9 +3,10 @@ key: simcore/services/comp/osparc-python-runner-pytorch type: computational integration-version: 2.0.0 version: 1.0.4 -description: oSparc Python Runner PyTorch +description: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/full/services/simcore_services_comp_osparc-python-runner-pytorch.md +icon: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/icons/s4l/simcore_services_comp_osparc-python-runner-pytorch.png +thumbnail: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/thumbnails/s4l/simcore_services_comp_osparc-python-runner-pytorch.png contact: guidon@itis.swiss -thumbnail: https://i.imgur.com/iKoFIN0.png authors: - name: Manuel Guidon email: guidon@itis.swiss diff --git a/.osparc/osparc-python-runner-tensorflow/metadata.yml b/.osparc/osparc-python-runner-tensorflow/metadata.yml index 3be42e0..286b396 100644 --- a/.osparc/osparc-python-runner-tensorflow/metadata.yml +++ b/.osparc/osparc-python-runner-tensorflow/metadata.yml @@ -3,9 +3,10 @@ key: simcore/services/comp/osparc-python-runner-tensorflow type: computational integration-version: 2.0.0 version: 1.0.4 -description: oSparc Python Runner Tensorflow +description: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/full/services/simcore_services_comp_osparc-python-runner-tensorflow.md +icon: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/icons/s4l/simcore_services_comp_osparc-python-runner-tensorflow.png +thumbnail: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/thumbnails/s4l/simcore_services_comp_osparc-python-runner-tensorflow.png contact: guidon@itis.swiss -thumbnail: https://i.imgur.com/iKoFIN0.png authors: - name: Manuel Guidon email: guidon@itis.swiss From eccb2c77e930e1f9aad1292f86e5c80fe7735ee3 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 11:30:18 +0200 Subject: [PATCH 04/26] revamped Dockerfile for faster builds --- common/Dockerfile | 43 ++++------------------ osparc-python-runner-pytorch/Dockerfile | 2 +- osparc-python-runner-tensorflow/Dockerfile | 2 +- 3 files changed, 9 insertions(+), 38 deletions(-) diff --git a/common/Dockerfile b/common/Dockerfile index 4bfd3d4..faf62ab 100644 --- a/common/Dockerfile +++ b/common/Dockerfile @@ -11,35 +11,15 @@ RUN adduser \ --gecos "" \ --shell /bin/sh --home /home/${SC_USER_NAME} ${SC_USER_NAME} -RUN apt-get update \ - && apt-get -y install --no-install-recommends \ - build-essential \ - ca-certificates \ - curl \ - git \ - jq \ - libbz2-dev \ - libffi-dev \ - liblzma-dev \ - libncurses5-dev \ - libreadline-dev \ - libsqlite3-dev \ - libssl-dev \ - libxml2-dev \ - libxmlsec1-dev \ - llvm \ - make \ - python3-minimal \ - python3-pip \ - python3-venv \ - tk-dev \ - wget \ - xz-utils \ - zlib1g-dev \ - && rm --recursive --force /var/lib/apt/lists/* - COPY --from=ghcr.io/astral-sh/uv:0.7.6 /uv /uvx /bin/ +# installing python and setting as default also creating a default system wide venv +# NOTE: make sure the same version of python is used when running make requirements +ARG PYTHON_VERSION=3.12.10 +RUN uv python install ${PYTHON_VERSION} --default --preview && \ + uv venv .venv +ENV PATH="/home/${SC_USER_NAME}/.venv/bin:$PATH" + FROM base AS production ENV SC_BUILD_TARGET=production @@ -61,15 +41,6 @@ ENV LD_LIBRARY_PATH=/usr/local/cuda/lib64:${LD_LIBRARY_PATH} ENV LC_ALL=C.UTF-8 ENV LANG=C.UTF-8 -# installing python via pyenv and setting it as default -# NOTE: make sure the same version of python is used when running make requirements -ARG PYTHON_VERSION=3.12.10 -ENV PYENV_ROOT=/home/${SC_USER_NAME}/.pyenv -ENV PATH="${PYENV_ROOT}/shims:${PYENV_ROOT}/bin:${PATH}" -RUN git clone https://github.com/pyenv/pyenv.git /home/${SC_USER_NAME}/.pyenv && \ - git clone https://github.com/pyenv/pyenv-virtualenv.git $(pyenv root)/plugins/pyenv-virtualenv && \ - pyenv install ${PYTHON_VERSION} && \ - pyenv global ${PYTHON_VERSION} ENTRYPOINT [ "/bin/sh", "docker/entrypoint.sh", "/bin/sh", "-c" ] CMD ["run"] diff --git a/osparc-python-runner-pytorch/Dockerfile b/osparc-python-runner-pytorch/Dockerfile index 1b147ac..776013e 100644 --- a/osparc-python-runner-pytorch/Dockerfile +++ b/osparc-python-runner-pytorch/Dockerfile @@ -11,4 +11,4 @@ COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_pytorch osparc # copy commong main.py RUN mv main.py osparc_python_runner_pytorch/main.py -RUN uv pip install --system -r osparc_python_runner_pytorch/requirements.txt \ No newline at end of file +RUN uv pip install -r osparc_python_runner_pytorch/requirements.txt \ No newline at end of file diff --git a/osparc-python-runner-tensorflow/Dockerfile b/osparc-python-runner-tensorflow/Dockerfile index 6596cc5..4b24563 100644 --- a/osparc-python-runner-tensorflow/Dockerfile +++ b/osparc-python-runner-tensorflow/Dockerfile @@ -11,4 +11,4 @@ COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_tensorflow osp # copy commong main.py RUN mv main.py osparc_python_runner_tensorflow/main.py -RUN uv pip install --system -r osparc_python_runner_tensorflow/requirements.txt \ No newline at end of file +RUN uv pip install -r osparc_python_runner_tensorflow/requirements.txt \ No newline at end of file From 082d40009736ef67039dad94aa3d03a427d0ed12 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 11:53:03 +0200 Subject: [PATCH 05/26] fixed entrypoint and uv --- common/Dockerfile | 1 + common/entrypoint.sh | 14 ++++++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/common/Dockerfile b/common/Dockerfile index faf62ab..83670dd 100644 --- a/common/Dockerfile +++ b/common/Dockerfile @@ -19,6 +19,7 @@ ARG PYTHON_VERSION=3.12.10 RUN uv python install ${PYTHON_VERSION} --default --preview && \ uv venv .venv ENV PATH="/home/${SC_USER_NAME}/.venv/bin:$PATH" +ENV UV_HTTP_TIMEOUT=120 FROM base AS production diff --git a/common/entrypoint.sh b/common/entrypoint.sh index 1708634..1cb7f2f 100755 --- a/common/entrypoint.sh +++ b/common/entrypoint.sh @@ -51,11 +51,17 @@ else else echo "group already exists" fi - echo "adding $SC_USER_NAME to group $CONT_GROUPNAME..." - adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + + # add user if missing + if id "$SC_USER_NAME" >/dev/null 2>&1; then + echo "User $SC_USER_NAME already exists" + else + echo "adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" - echo "changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" - usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" + echo "changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" + fi echo "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; From 0d1f39db3e1b3aad6f729fce97574c9e9d805633 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:15:18 +0200 Subject: [PATCH 06/26] added mandatory outputs for validation --- validation-pytorch/outputs/output_1/.gitkeep | 0 validation-pytorch/outputs/output_2/.gitkeep | 0 validation-pytorch/outputs/output_3/.gitkeep | 0 validation-pytorch/outputs/output_4/.gitkeep | 0 validation-tensorflow/outputs/output_1/.gitkeep | 0 validation-tensorflow/outputs/output_2/.gitkeep | 0 validation-tensorflow/outputs/output_3/.gitkeep | 0 validation-tensorflow/outputs/output_4/.gitkeep | 0 8 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 validation-pytorch/outputs/output_1/.gitkeep create mode 100644 validation-pytorch/outputs/output_2/.gitkeep create mode 100644 validation-pytorch/outputs/output_3/.gitkeep create mode 100644 validation-pytorch/outputs/output_4/.gitkeep create mode 100644 validation-tensorflow/outputs/output_1/.gitkeep create mode 100644 validation-tensorflow/outputs/output_2/.gitkeep create mode 100644 validation-tensorflow/outputs/output_3/.gitkeep create mode 100644 validation-tensorflow/outputs/output_4/.gitkeep diff --git a/validation-pytorch/outputs/output_1/.gitkeep b/validation-pytorch/outputs/output_1/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/validation-pytorch/outputs/output_2/.gitkeep b/validation-pytorch/outputs/output_2/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/validation-pytorch/outputs/output_3/.gitkeep b/validation-pytorch/outputs/output_3/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/validation-pytorch/outputs/output_4/.gitkeep b/validation-pytorch/outputs/output_4/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/validation-tensorflow/outputs/output_1/.gitkeep b/validation-tensorflow/outputs/output_1/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/validation-tensorflow/outputs/output_2/.gitkeep b/validation-tensorflow/outputs/output_2/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/validation-tensorflow/outputs/output_3/.gitkeep b/validation-tensorflow/outputs/output_3/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/validation-tensorflow/outputs/output_4/.gitkeep b/validation-tensorflow/outputs/output_4/.gitkeep new file mode 100644 index 0000000..e69de29 From ab2a0c26ca7f99d513a3cf3dd5f62a0a710d3392 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:33:52 +0200 Subject: [PATCH 07/26] added more outputs --- .../osparc-python-runner-pytorch/metadata.yml | 25 +++++++++++++++++-- .../metadata.yml | 25 +++++++++++++++++-- 2 files changed, 46 insertions(+), 4 deletions(-) diff --git a/.osparc/osparc-python-runner-pytorch/metadata.yml b/.osparc/osparc-python-runner-pytorch/metadata.yml index acc3e2d..78d683c 100644 --- a/.osparc/osparc-python-runner-pytorch/metadata.yml +++ b/.osparc/osparc-python-runner-pytorch/metadata.yml @@ -41,7 +41,28 @@ outputs: output_1: displayOrder: 1 label: Output data - description: All data produced by the script is zipped as output_data.zip + description: The data produced by the script and saved under OUTPUT_FOLDER/output_1 as output_1.zip type: data:*/* fileToKeyMap: - output_data.zip: output_1 + output_1.zip: output_1 + output_2: + displayOrder: 2 + label: Output data + description: The data produced by the script and saved under OUTPUT_FOLDER/output_2 as output_2.zip + type: data:*/* + fileToKeyMap: + output_2.zip: output_2 + output_3: + displayOrder: 3 + label: Output data + description: The data produced by the script and saved under OUTPUT_FOLDER/output_3 as output_3.zip + type: data:*/* + fileToKeyMap: + output_3.zip: output_3 + output_4: + displayOrder: 4 + label: Output data + description: The data produced by the script and saved under OUTPUT_FOLDER/output_4 as output_4.zip + type: data:*/* + fileToKeyMap: + output_4.zip: output_4 \ No newline at end of file diff --git a/.osparc/osparc-python-runner-tensorflow/metadata.yml b/.osparc/osparc-python-runner-tensorflow/metadata.yml index 286b396..43d275c 100644 --- a/.osparc/osparc-python-runner-tensorflow/metadata.yml +++ b/.osparc/osparc-python-runner-tensorflow/metadata.yml @@ -41,7 +41,28 @@ outputs: output_1: displayOrder: 1 label: Output data - description: All data produced by the script is zipped as output_data.zip + description: The data produced by the script and saved under OUTPUT_FOLDER/output_1 as output_1.zip type: data:*/* fileToKeyMap: - output_data.zip: output_1 + output_1.zip: output_1 + output_2: + displayOrder: 2 + label: Output data + description: The data produced by the script and saved under OUTPUT_FOLDER/output_2 as output_2.zip + type: data:*/* + fileToKeyMap: + output_2.zip: output_2 + output_3: + displayOrder: 3 + label: Output data + description: The data produced by the script and saved under OUTPUT_FOLDER/output_3 as output_3.zip + type: data:*/* + fileToKeyMap: + output_3.zip: output_3 + output_4: + displayOrder: 4 + label: Output data + description: The data produced by the script and saved under OUTPUT_FOLDER/output_4 as output_4.zip + type: data:*/* + fileToKeyMap: + output_4.zip: output_4 \ No newline at end of file From a7385ab25c4f9def5e0b34e8a0a032ed639eb6c4 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:35:03 +0200 Subject: [PATCH 08/26] made to build and execute --- common/Dockerfile | 21 +- common/main.py | 217 +++++++----------- osparc-python-runner-pytorch/Dockerfile | 5 - .../service.cli/execute.sh | 8 +- osparc-python-runner-tensorflow/Dockerfile | 5 - .../service.cli/execute.sh | 8 +- 6 files changed, 102 insertions(+), 162 deletions(-) diff --git a/common/Dockerfile b/common/Dockerfile index 83670dd..8db1f1d 100644 --- a/common/Dockerfile +++ b/common/Dockerfile @@ -4,22 +4,24 @@ LABEL maintainer="Andrei Neagu " ENV SC_USER_ID=8004 ENV SC_USER_NAME=scu - RUN adduser \ --uid ${SC_USER_ID} \ --disabled-password \ --gecos "" \ --shell /bin/sh --home /home/${SC_USER_NAME} ${SC_USER_NAME} +# Install uv tool COPY --from=ghcr.io/astral-sh/uv:0.7.6 /uv /uvx /bin/ +ENV UV_HTTP_TIMEOUT=120 -# installing python and setting as default also creating a default system wide venv -# NOTE: make sure the same version of python is used when running make requirements +# Install Python globally for all users ARG PYTHON_VERSION=3.12.10 -RUN uv python install ${PYTHON_VERSION} --default --preview && \ - uv venv .venv -ENV PATH="/home/${SC_USER_NAME}/.venv/bin:$PATH" -ENV UV_HTTP_TIMEOUT=120 +ENV UV_PYTHON_INSTALL_DIR=/opt/uv-python +RUN mkdir -p /opt/uv-python \ + && uv venv /opt/venv --python=python${PYTHON_VERSION%.*} \ + && chmod -R a+rx /opt/uv-python \ + && chown -R ${SC_USER_NAME}:${SC_USER_NAME} /opt/venv +ENV PATH="/opt/venv/bin:$PATH" FROM base AS production @@ -31,18 +33,13 @@ ENV PATH="/home/${SC_USER_NAME}/service.cli:${PATH}" WORKDIR /home/${SC_USER_NAME} -# copy docker bootup scripts COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} entrypoint.sh docker/ - -# copy main.py COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} main.py main.py ENV LD_LIBRARY_PATH=/usr/local/cuda/lib64:${LD_LIBRARY_PATH} - ENV LC_ALL=C.UTF-8 ENV LANG=C.UTF-8 - ENTRYPOINT [ "/bin/sh", "docker/entrypoint.sh", "/bin/sh", "-c" ] CMD ["run"] diff --git a/common/main.py b/common/main.py index 12ad5b9..c70aeaa 100644 --- a/common/main.py +++ b/common/main.py @@ -1,69 +1,32 @@ +import json import logging import os import shutil import subprocess import sys -import tempfile -import zipfile from pathlib import Path +from typing import Dict logging.basicConfig(level=logging.INFO) logger = logging.getLogger("osparc-python-main") ENVIRONS = ["INPUT_FOLDER", "OUTPUT_FOLDER"] -input_dir, output_dir = [Path(os.environ.get(v, None)) for v in ENVIRONS] +try: + INPUT_FOLDER, OUTPUT_FOLDER = [Path(os.environ[v]) for v in ENVIRONS] +except KeyError: + raise ValueError("Required env vars {ENVIRONS} were not set") -# TODO: sync with schema in metadata!! -OUTPUT_FILE = "output_data.zip" +# NOTE: sync with schema in metadata!! +NUM_INPUTS = 5 +NUM_OUTPUTS = 4 +OUTPUT_SUBFOLDER_ENV_TEMPLATE = "OUTPUT_{}" +OUTPUT_SUBFOLDER_TEMPLATE = "output_{}" +OUTPUT_FILE_TEMPLATE = "output_{}.zip" -FILE_DIR = os.path.realpath(__file__) - -def copy(src, dest): - try: - src, dest = str(src), str(dest) - shutil.copytree( - src, dest, ignore=shutil.ignore_patterns("*.zip", "__pycache__", ".*") - ) - except OSError as err: - # If the error was caused because the source wasn't a directory - if err.errno == shutil.errno.ENOTDIR: - shutil.copy(src, dest) - else: - logger.error("Directory not copied. Error: %s", err) - - -def clean_dir(dirpath: Path): - for root, dirs, files in os.walk(dirpath): - for f in files: - os.unlink(os.path.join(root, f)) - for d in dirs: - shutil.rmtree(os.path.join(root, d)) - - -def run_cmd(cmd: str): - subprocess.run(cmd.split(), shell=False, check=True, cwd=input_dir) - # TODO: deal with stdout, log? and error?? - - -def unzip_dir(parent: Path): - for filepath in list(parent.rglob("*.zip")): - if zipfile.is_zipfile(filepath): - with zipfile.ZipFile(filepath) as zf: - zf.extractall(filepath.parent) - - -def zipdir(dirpath: Path, ziph: zipfile.ZipFile): - """Zips directory and archives files relative to dirpath""" - for root, dirs, files in os.walk(dirpath): - for filename in files: - filepath = os.path.join(root, filename) - ziph.write(filepath, arcname=os.path.relpath(filepath, dirpath)) - dirs[:] = [name for name in dirs if not name.startswith(".")] - - -def ensure_main_entrypoint(code_dir: Path) -> Path: +def _find_user_code_entrypoint(code_dir: Path) -> Path: + logger.info("Searching for script main entrypoint ...") code_files = list(code_dir.rglob("*.py")) if not code_files: @@ -77,10 +40,12 @@ def ensure_main_entrypoint(code_dir: Path) -> Path: raise ValueError(f"Many entrypoints found: {code_files}") main_py = code_files[0] + logger.info("Found %s as main entrypoint", main_py) return main_py -def ensure_requirements(code_dir: Path) -> Path: +def _ensure_pip_requirements(code_dir: Path) -> Path: + logger.info("Searching for requirements file ...") requirements = list(code_dir.rglob("requirements.txt")) if len(requirements) > 1: raise ValueError(f"Many requirements found: {requirements}") @@ -89,105 +54,93 @@ def ensure_requirements(code_dir: Path) -> Path: # deduce requirements using pipreqs logger.info("Not found. Recreating requirements ...") requirements = code_dir / "requirements.txt" - run_cmd(f"pipreqs --savepath={requirements} --force {code_dir}") + subprocess.run( + f"pipreqs --savepath={requirements} --force {code_dir}".split(), + shell=False, + check=True, + cwd=INPUT_FOLDER, + ) + + # TODO log subprocess.run else: requirements = requirements[0] - - # we want to make sure that no already installed libraries are being touched - - # the requirements file of this service - runner_requirements = Path(FILE_DIR).parent / "requirements.txt" - - # this will be the one from the user augmented by a constraint to the runner one - requirements_in = code_dir / "requirements.in" - - # tmp file for creating the new one - tmp_requirements = code_dir / "requirements.tmp" - - with open(requirements, "r") as f: - with open(tmp_requirements, "w") as f2: - f2.write(f"-c {runner_requirements}\n") - f2.write(f.read()) - - os.rename(tmp_requirements, requirements_in) - - return Path(requirements_in) + logger.info(f"Found: {requirements}") + return requirements + + +# TODO: Next version of integration will take care of this and maybe the ENVs as well +def _ensure_output_subfolders_exist() -> Dict[str, str]: + output_envs = {} + for n in range(1, NUM_OUTPUTS + 1): + output_sub_folder_env = f"OUTPUT_{n}" + output_sub_folder = OUTPUT_FOLDER / OUTPUT_SUBFOLDER_TEMPLATE.format(n) + # NOTE: exist_ok for forward compatibility in case they are already created + output_sub_folder.mkdir(parents=True, exist_ok=True) + output_envs[output_sub_folder_env] = f"{output_sub_folder}" + logger.info( + "Output ENVs available: %s", + json.dumps(output_envs, indent=2), + ) + return output_envs + + +def _ensure_input_environment() -> Dict[str, str]: + input_envs = { + f"INPUT_{n}": os.environ[f"INPUT_{n}"] for n in range(1, NUM_INPUTS + 1) + } + logger.info( + "Input ENVs available: %s", + json.dumps(input_envs, indent=2), + ) + return input_envs def setup(): - logger.info("Cleaning output ...") - clean_dir(output_dir) + input_envs = _ensure_input_environment() + output_envs = _ensure_output_subfolders_exist() + logger.info("Available data:") + os.system("ls -tlah") - # TODO: snapshot_before = list(input_dir.rglob("*")) - - # NOTE The inputs defined in ${INPUT_FOLDER}/inputs.json are available as env variables by their key in capital letters - # For example: input_1 -> $INPUT_1 - # - - logger.info("Processing input ...") - unzip_dir(input_dir) - - # logger.info("Copying input to output ...") - # copy(input_dir, code_dir) - - logger.info("Searching main entrypoint ...") - main_py = ensure_main_entrypoint(input_dir) - logger.info("Found %s as main entrypoint", main_py) - - logger.info("Searching requirements ...") - logger.info(input_dir) - requirements_in = ensure_requirements(input_dir) - - requirements = requirements_in.parent / "requirements.compiled" + user_code_entrypoint = _find_user_code_entrypoint(INPUT_FOLDER) + requirements_txt = _ensure_pip_requirements(INPUT_FOLDER) logger.info("Preparing launch script ...") - venv_dir = Path.home() / ".venv" + bash_input_env_export = [f"export {env}={path}" for env, path in input_envs.items()] + bash_output_env_export = [ + f"export {env}='{path}'" for env, path in output_envs.items() + ] script = [ "#!/bin/sh", "set -o errexit", "set -o nounset", "IFS=$(printf '\\n\\t')", - f"echo compiling {requirements_in} into {requirements} ...", - f"pip-compile --upgrade --build-isolation --output-file {requirements} {requirements_in}", - 'echo "Creating virtual environment ..."', - f"python3 -m venv --system-site-packages --symlinks --upgrade {venv_dir}", - f"{venv_dir}/bin/pip install -U pip wheel setuptools", - f"{venv_dir}/bin/pip install -r {requirements}", - f'echo "Executing code {main_py.name}..."', - f"{venv_dir}/bin/python3 {main_py}", + f'uv pip install -r "{requirements_txt}"', + "\n".join(bash_input_env_export), + "\n".join(bash_output_env_export), + f'echo "Executing code {user_code_entrypoint.name}..."', + f'"python" "{user_code_entrypoint}"', 'echo "DONE ..."', ] - main_script_path = Path("main.sh") - with main_script_path.open("w") as fp: - for line in script: - print(f"{line}\n", file=fp) - - # # TODO: take snapshot - # logger.info("Creating virtual environment ...") - # run_cmd("python3 -m venv --system-site-packages --symlinks --upgrade .venv") - # run_cmd(".venv/bin/pip install -U pip wheel setuptools") - # run_cmd(f".venv/bin/pip install -r {requirements}") - - # # TODO: take snapshot - # logger.info("Executing code ...") - # run_cmd(f".venv/bin/python3 {main_py}") + main_sh_path = Path("main.sh") + logger.info("main_sh_path: %s", main_sh_path.absolute()) # TODO: remove this line + main_sh_path.write_text("\n".join(script)) def teardown(): - logger.info("Zipping output ....") - - # TODO: sync zipped name with docker/labels/outputs.json - with tempfile.TemporaryDirectory() as tmpdir: - zipped_file = Path(f"{tmpdir}/{OUTPUT_FILE}") - with zipfile.ZipFile(str(zipped_file), "w", zipfile.ZIP_DEFLATED) as zh: - zipdir(output_dir, zh) - - logger.info("Cleaning output") - clean_dir(output_dir) - - logger.info("Moving %s", zipped_file.name) - shutil.move(str(zipped_file), str(output_dir)) + logger.info("Zipping output...") + for n in range(1, NUM_OUTPUTS + 1): + output_path = OUTPUT_FOLDER / f"output_{n}" + archive_file_path = OUTPUT_FOLDER / OUTPUT_FILE_TEMPLATE.format(n) + logger.info("Zipping %s into %s...", output_path, archive_file_path) + shutil.make_archive( + f"{(archive_file_path.parent / archive_file_path.stem)}", + format="zip", + root_dir=output_path, + logger=logger, + ) + logger.info("Zipping %s into %s done", output_path, archive_file_path) + logger.info("Zipping done.") if __name__ == "__main__": diff --git a/osparc-python-runner-pytorch/Dockerfile b/osparc-python-runner-pytorch/Dockerfile index 776013e..f2ce4b9 100644 --- a/osparc-python-runner-pytorch/Dockerfile +++ b/osparc-python-runner-pytorch/Dockerfile @@ -2,13 +2,8 @@ FROM simcore/services/comp/common:0.0.0 AS service-base LABEL maintainer="Andrei Neagu " -# copy simcore service cli COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} service.cli/ service.cli/ - -# copy binaries from build COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_pytorch osparc_python_runner_pytorch - -# copy commong main.py RUN mv main.py osparc_python_runner_pytorch/main.py RUN uv pip install -r osparc_python_runner_pytorch/requirements.txt \ No newline at end of file diff --git a/osparc-python-runner-pytorch/service.cli/execute.sh b/osparc-python-runner-pytorch/service.cli/execute.sh index 2b49c45..d098890 100755 --- a/osparc-python-runner-pytorch/service.cli/execute.sh +++ b/osparc-python-runner-pytorch/service.cli/execute.sh @@ -7,11 +7,11 @@ IFS=$(printf '\n\t') cd /home/scu/osparc_python_runner_pytorch echo "starting service as" -echo User : "$(id "$(whoami)")" -echo Workdir : "$(pwd)" +echo " User : $(id "$(whoami)")" +echo " Workdir : $(pwd)" echo "..." echo -python3 main.py setup +python main.py setup /bin/sh main.sh -python3 main.py teardown \ No newline at end of file +python main.py teardown \ No newline at end of file diff --git a/osparc-python-runner-tensorflow/Dockerfile b/osparc-python-runner-tensorflow/Dockerfile index 4b24563..c61a446 100644 --- a/osparc-python-runner-tensorflow/Dockerfile +++ b/osparc-python-runner-tensorflow/Dockerfile @@ -2,13 +2,8 @@ FROM simcore/services/comp/common:0.0.0 AS service-base LABEL maintainer="Andrei Neagu " -# copy simcore service cli COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} service.cli/ service.cli/ - -# copy binaries from build COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_tensorflow osparc_python_runner_tensorflow - -# copy commong main.py RUN mv main.py osparc_python_runner_tensorflow/main.py RUN uv pip install -r osparc_python_runner_tensorflow/requirements.txt \ No newline at end of file diff --git a/osparc-python-runner-tensorflow/service.cli/execute.sh b/osparc-python-runner-tensorflow/service.cli/execute.sh index f723e29..35558cf 100755 --- a/osparc-python-runner-tensorflow/service.cli/execute.sh +++ b/osparc-python-runner-tensorflow/service.cli/execute.sh @@ -7,11 +7,11 @@ IFS=$(printf '\n\t') cd /home/scu/osparc_python_runner_tensorflow echo "starting service as" -echo User : "$(id "$(whoami)")" -echo Workdir : "$(pwd)" +echo " User : $(id "$(whoami)")" +echo " Workdir : $(pwd)" echo "..." echo -python3 main.py setup +python main.py setup /bin/sh main.sh -python3 main.py teardown \ No newline at end of file +python main.py teardown \ No newline at end of file From 51cbd949487c15b7ee749a9be787587b9a64d962 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:37:01 +0200 Subject: [PATCH 09/26] validation setup --- Makefile | 22 ++++++++++++++++------ docker-compose-local.yml | 9 +++++++++ 2 files changed, 25 insertions(+), 6 deletions(-) create mode 100644 docker-compose-local.yml diff --git a/Makefile b/Makefile index 975010c..8e6066e 100644 --- a/Makefile +++ b/Makefile @@ -66,16 +66,26 @@ build: | compose-spec ## build docker image docker compose build # To test built service locally ------------------------------------------------------------------------- -.PHONY: run-local -run-local: ## runs image with local configuration +.PHONY: run-pytorch-local +run-pytorch-local: ## runs pytorch image with local configuration + IMAGE_TO_RUN=${IMAGE_PYTORCH} \ + TAG_TO_RUN=${TAG_PYTORCH} \ + VALIDATION_DIR=validation-pytorch \ + docker compose --file docker-compose-local.yml up + +.PHONY: run-tensorflow-local +run-tensorflow-local: ## runs tensorflow image with local configuration + IMAGE_TO_RUN=${IMAGE_TENSORFLOW} \ + TAG_TO_RUN=${TAG_TENSORFLOW} \ + VALIDATION_DIR=validation-tensorflow \ docker compose --file docker-compose-local.yml up .PHONY: publish-local publish-local: ## push to local throw away registry to test integration - docker tag simcore/services/comp/${IMAGE_PYTORCH}:${DOCKER_IMAGE_TAG} registry:5000/simcore/services/comp/$(IMAGE_PYTORCH):$(DOCKER_IMAGE_TAG) - docker push registry:5000/simcore/services/comp/$(IMAGE_PYTORCH):$(DOCKER_IMAGE_TAG) - docker tag simcore/services/comp/${IMAGE_TENSORFLOW}:${DOCKER_IMAGE_TAG} registry:5000/simcore/services/comp/$(IMAGE_TENSORFLOW):$(DOCKER_IMAGE_TAG) - docker push registry:5000/simcore/services/comp/$(IMAGE_TENSORFLOW):$(DOCKER_IMAGE_TAG) + docker tag simcore/services/comp/${IMAGE_PYTORCH}:${TAG_PYTORCH} registry:5000/simcore/services/comp/${IMAGE_PYTORCH}:${TAG_PYTORCH} + docker push registry:5000/simcore/services/comp/${IMAGE_PYTORCH}:${TAG_PYTORCH} + docker tag simcore/services/comp/${IMAGE_TENSORFLOW}:${TAG_TENSORFLOW} registry:5000/simcore/services/comp/${IMAGE_TENSORFLOW}:${TAG_TENSORFLOW} + docker push registry:5000/simcore/services/comp/${IMAGE_TENSORFLOW}:${TAG_TENSORFLOW} @curl registry:5000/v2/_catalog | jq .PHONY: help diff --git a/docker-compose-local.yml b/docker-compose-local.yml new file mode 100644 index 0000000..1f630e1 --- /dev/null +++ b/docker-compose-local.yml @@ -0,0 +1,9 @@ +services: + runner-ml: + image: simcore/services/comp/${IMAGE_TO_RUN}:${TAG_TO_RUN} + environment: + - INPUT_FOLDER=/tmp/inputs + - OUTPUT_FOLDER=/tmp/outputs + volumes: + - ${PWD}/${VALIDATION_DIR}/inputs:/tmp/inputs + - ${PWD}/${VALIDATION_DIR}/outputs:/tmp/outputs From 7894fe2adbb17e7c5c5d95def54b499a4aa9e512 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:38:38 +0200 Subject: [PATCH 10/26] added ci --- .github/workflows/check-image.yml | 39 +++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 .github/workflows/check-image.yml diff --git a/.github/workflows/check-image.yml b/.github/workflows/check-image.yml new file mode 100644 index 0000000..a5441f2 --- /dev/null +++ b/.github/workflows/check-image.yml @@ -0,0 +1,39 @@ +name: Build and check image + +on: + # Run the workflow for pushes to the main branch + push: + branches: + - master + + # Run the workflow for pull requests (from forks or within the repo) + pull_request: + types: + - opened + - synchronize + - reopened + +jobs: + verify-image-build: + runs-on: ubuntu-latest + steps: + - name: Checkout repo content + uses: actions/checkout@v2 + - name: ooil version + uses: docker://itisfoundation/ci-service-integration-library:v2.0.11 + with: + args: ooil --version + - name: Assemble docker compose spec + uses: docker://itisfoundation/ci-service-integration-library:v2.0.11 + with: + args: ooil compose + - name: Build all images if multiple + uses: docker://itisfoundation/ci-service-integration-library:v2.0.11 + with: + args: docker compose build + - name: test Tensorflow + with: + args: make run-tensorflow-local + - name: test PyTorch + with: + args: make run-pytorch-local From 29fff44f90bd92f5a68893977db17890cbcb9e6a Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:38:59 +0200 Subject: [PATCH 11/26] added scripts to check functionality --- validation-pytorch/inputs/input_1/main.py | 16 ++++++++++++++++ .../inputs/input_1/requirements.txt | 2 ++ 2 files changed, 18 insertions(+) create mode 100644 validation-pytorch/inputs/input_1/main.py create mode 100644 validation-pytorch/inputs/input_1/requirements.txt diff --git a/validation-pytorch/inputs/input_1/main.py b/validation-pytorch/inputs/input_1/main.py new file mode 100644 index 0000000..4931969 --- /dev/null +++ b/validation-pytorch/inputs/input_1/main.py @@ -0,0 +1,16 @@ +import torch +import os +import requests +from pathlib import Path + +response = requests.get("https://www.google.com") +print(response) + +msg = torch.tensor("Hello, PyTorch!") +print(msg) +# Example tensor operation +a = torch.tensor([1.0, 2.0, 3.0]) +b = torch.tensor([4.0, 5.0, 6.0]) +print("Sum:", a + b) + + diff --git a/validation-pytorch/inputs/input_1/requirements.txt b/validation-pytorch/inputs/input_1/requirements.txt new file mode 100644 index 0000000..3e73491 --- /dev/null +++ b/validation-pytorch/inputs/input_1/requirements.txt @@ -0,0 +1,2 @@ +requests +numpy From 0cd9febaf5f64e129120a69dd7ead21f8da5f12b Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:39:18 +0200 Subject: [PATCH 12/26] added pytaoach validation --- validation-tensorflow/inputs/input_1/main.py | 15 +++++++++++++++ .../inputs/input_1/requirements.txt | 2 ++ 2 files changed, 17 insertions(+) create mode 100644 validation-tensorflow/inputs/input_1/main.py create mode 100644 validation-tensorflow/inputs/input_1/requirements.txt diff --git a/validation-tensorflow/inputs/input_1/main.py b/validation-tensorflow/inputs/input_1/main.py new file mode 100644 index 0000000..4dc16f8 --- /dev/null +++ b/validation-tensorflow/inputs/input_1/main.py @@ -0,0 +1,15 @@ +import os + +from pathlib import Path +import requests +import tensorflow as tf + +response = requests.get("https://www.google.com") +print(response) + +msg = tf.constant("Hello, TensorFlow!") +tf.print(msg) + +(Path(os.environ["OUTPUT_FOLDER"]) / "output_1" / "a_file.txt").write_text( + "Hello, TensorFlow!" +) diff --git a/validation-tensorflow/inputs/input_1/requirements.txt b/validation-tensorflow/inputs/input_1/requirements.txt new file mode 100644 index 0000000..3e73491 --- /dev/null +++ b/validation-tensorflow/inputs/input_1/requirements.txt @@ -0,0 +1,2 @@ +requests +numpy From 91ecfc5878fc73175d6a9d29dd47e845b0aede49 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:39:58 +0200 Subject: [PATCH 13/26] proper validation --- validation-pytorch/inputs/input_1/main.py | 1 - validation-tensorflow/inputs/input_1/main.py | 4 ---- 2 files changed, 5 deletions(-) diff --git a/validation-pytorch/inputs/input_1/main.py b/validation-pytorch/inputs/input_1/main.py index 4931969..51b1601 100644 --- a/validation-pytorch/inputs/input_1/main.py +++ b/validation-pytorch/inputs/input_1/main.py @@ -13,4 +13,3 @@ b = torch.tensor([4.0, 5.0, 6.0]) print("Sum:", a + b) - diff --git a/validation-tensorflow/inputs/input_1/main.py b/validation-tensorflow/inputs/input_1/main.py index 4dc16f8..43c3bc6 100644 --- a/validation-tensorflow/inputs/input_1/main.py +++ b/validation-tensorflow/inputs/input_1/main.py @@ -9,7 +9,3 @@ msg = tf.constant("Hello, TensorFlow!") tf.print(msg) - -(Path(os.environ["OUTPUT_FOLDER"]) / "output_1" / "a_file.txt").write_text( - "Hello, TensorFlow!" -) From 39c03652a76bfd04223408637c0f779b24c7edf0 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:42:39 +0200 Subject: [PATCH 14/26] updated workflow --- .github/workflows/check-image.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/check-image.yml b/.github/workflows/check-image.yml index a5441f2..ae812d9 100644 --- a/.github/workflows/check-image.yml +++ b/.github/workflows/check-image.yml @@ -32,8 +32,6 @@ jobs: with: args: docker compose build - name: test Tensorflow - with: - args: make run-tensorflow-local + run: make run-tensorflow-local - name: test PyTorch - with: - args: make run-pytorch-local + run: make run-pytorch-local From 227fc48ee7295850617dc91fa9a07c1ee98febf6 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:43:58 +0200 Subject: [PATCH 15/26] using tooling --- .github/workflows/check-image.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/check-image.yml b/.github/workflows/check-image.yml index ae812d9..fe2065d 100644 --- a/.github/workflows/check-image.yml +++ b/.github/workflows/check-image.yml @@ -32,6 +32,10 @@ jobs: with: args: docker compose build - name: test Tensorflow - run: make run-tensorflow-local + uses: docker://itisfoundation/ci-service-integration-library:v2.0.11 + with: + args: make run-tensorflow-local - name: test PyTorch - run: make run-pytorch-local + uses: docker://itisfoundation/ci-service-integration-library:v2.0.11 + with: + args: make run-pytorch-local From 2f4c1d4d49ec147bcff6a6ece01fdd41d98e5a1a Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 15:54:39 +0200 Subject: [PATCH 16/26] updated actions --- .github/workflows/check-image.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/check-image.yml b/.github/workflows/check-image.yml index fe2065d..ae812d9 100644 --- a/.github/workflows/check-image.yml +++ b/.github/workflows/check-image.yml @@ -32,10 +32,6 @@ jobs: with: args: docker compose build - name: test Tensorflow - uses: docker://itisfoundation/ci-service-integration-library:v2.0.11 - with: - args: make run-tensorflow-local + run: make run-tensorflow-local - name: test PyTorch - uses: docker://itisfoundation/ci-service-integration-library:v2.0.11 - with: - args: make run-pytorch-local + run: make run-pytorch-local From 76e8def0f4daefdcde43c06b1a067a0ae39f7cf1 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 23 May 2025 16:03:41 +0200 Subject: [PATCH 17/26] check if CI fails --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 8e6066e..7816a3b 100644 --- a/Makefile +++ b/Makefile @@ -71,14 +71,14 @@ run-pytorch-local: ## runs pytorch image with local configuration IMAGE_TO_RUN=${IMAGE_PYTORCH} \ TAG_TO_RUN=${TAG_PYTORCH} \ VALIDATION_DIR=validation-pytorch \ - docker compose --file docker-compose-local.yml up + docker compose --file docker-compose-local.yml up --abort-on-container-exit --exit-code-from runner-ml-1 .PHONY: run-tensorflow-local run-tensorflow-local: ## runs tensorflow image with local configuration IMAGE_TO_RUN=${IMAGE_TENSORFLOW} \ TAG_TO_RUN=${TAG_TENSORFLOW} \ VALIDATION_DIR=validation-tensorflow \ - docker compose --file docker-compose-local.yml up + docker compose --file docker-compose-local.yml up --abort-on-container-exit --exit-code-from runner-ml-1 .PHONY: publish-local publish-local: ## push to local throw away registry to test integration From 97b5642ba82f6ea19cfb8120afb212199de1c0c8 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 14:11:24 +0200 Subject: [PATCH 18/26] refactor --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 7816a3b..22cbf8a 100644 --- a/Makefile +++ b/Makefile @@ -71,14 +71,14 @@ run-pytorch-local: ## runs pytorch image with local configuration IMAGE_TO_RUN=${IMAGE_PYTORCH} \ TAG_TO_RUN=${TAG_PYTORCH} \ VALIDATION_DIR=validation-pytorch \ - docker compose --file docker-compose-local.yml up --abort-on-container-exit --exit-code-from runner-ml-1 + docker compose --file docker-compose-local.yml up --abort-on-container-exit --exit-code-from runner-ml .PHONY: run-tensorflow-local run-tensorflow-local: ## runs tensorflow image with local configuration IMAGE_TO_RUN=${IMAGE_TENSORFLOW} \ TAG_TO_RUN=${TAG_TENSORFLOW} \ VALIDATION_DIR=validation-tensorflow \ - docker compose --file docker-compose-local.yml up --abort-on-container-exit --exit-code-from runner-ml-1 + docker compose --file docker-compose-local.yml up --abort-on-container-exit --exit-code-from runner-ml .PHONY: publish-local publish-local: ## push to local throw away registry to test integration From 5a2e0fdfede1c271d25b8532ae5a273daf5590e8 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 14:16:20 +0200 Subject: [PATCH 19/26] refactor tests --- validation-pytorch/inputs/input_1/main.py | 4 +--- validation-tensorflow/inputs/input_1/main.py | 3 --- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/validation-pytorch/inputs/input_1/main.py b/validation-pytorch/inputs/input_1/main.py index 51b1601..d18f231 100644 --- a/validation-pytorch/inputs/input_1/main.py +++ b/validation-pytorch/inputs/input_1/main.py @@ -1,12 +1,10 @@ import torch -import os import requests -from pathlib import Path response = requests.get("https://www.google.com") print(response) -msg = torch.tensor("Hello, PyTorch!") +msg = torch.tensor([[1, 2, 3], [4, 5, 6]]) print(msg) # Example tensor operation a = torch.tensor([1.0, 2.0, 3.0]) diff --git a/validation-tensorflow/inputs/input_1/main.py b/validation-tensorflow/inputs/input_1/main.py index 43c3bc6..3a73510 100644 --- a/validation-tensorflow/inputs/input_1/main.py +++ b/validation-tensorflow/inputs/input_1/main.py @@ -1,6 +1,3 @@ -import os - -from pathlib import Path import requests import tensorflow as tf From cd4b2a8b4691f6c287e20c0527a5f81ef53a9c34 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 14:54:01 +0200 Subject: [PATCH 20/26] added bump2version config --- .bumpversion-pytorch.cfg | 13 +++++++++++++ .bumpversion-tensorflow.cfg | 13 +++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 .bumpversion-pytorch.cfg create mode 100644 .bumpversion-tensorflow.cfg diff --git a/.bumpversion-pytorch.cfg b/.bumpversion-pytorch.cfg new file mode 100644 index 0000000..6d4f59f --- /dev/null +++ b/.bumpversion-pytorch.cfg @@ -0,0 +1,13 @@ +[bumpversion] +current_version = 1.0.4 +commit = False +message = service version: {current_version} → {new_version} +tag = False + +[bumpversion:file:.osparc/osparc-python-runner-pytorch/metadata.yml] +search = version: {current_version} +replace = version: {new_version} + +[bumpversion:file:Makefile] +search = TAG_PYTORCH={current_version} +replace = TAG_PYTORCH={new_version} diff --git a/.bumpversion-tensorflow.cfg b/.bumpversion-tensorflow.cfg new file mode 100644 index 0000000..06d222e --- /dev/null +++ b/.bumpversion-tensorflow.cfg @@ -0,0 +1,13 @@ +[bumpversion] +current_version = 1.0.4 +commit = False +message = service version: {current_version} → {new_version} +tag = False + +[bumpversion:file:.osparc/osparc-python-runner-tensorflow/metadata.yml] +search = version: {current_version} +replace = version: {new_version} + +[bumpversion:file:Makefile] +search = TAG_TENSORFLOW={current_version} +replace = TAG_TENSORFLOW={new_version} From 4b24aebfff5c33b13a21586bf083f264e562cf7e Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 14:54:55 +0200 Subject: [PATCH 21/26] added commands to bump --- Makefile | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 22cbf8a..7f955d1 100644 --- a/Makefile +++ b/Makefile @@ -32,10 +32,16 @@ define _bumpversion sh -c "cd /ml-runner && bump2version --verbose --list --config-file $(1) $(subst $(2),,$@)" endef -.PHONY: version-patch version-minor version-major -version-patch version-minor version-major: .bumpversion.cfg ## increases service's version +.PHONY: version-tensorflow-patch version-tensorflow-minor version-tensorflow-major +version-tensorflow-patch version-tensorflow-minor version-tensorflow-major: .bumpversion-tensorflow.cfg ## increases tensroflow service's version @make compose-spec - @$(call _bumpversion,$<,version-) + @$(call _bumpversion,$<,version-tensorflow-) + @make compose-spec + +.PHONY: version-pytorch-patch version-pytorch-minor version-pytorch-major +version-pytorch-patch version-pytorch-minor version-pytorch-major: .bumpversion-pytorch.cfg ## increases pytorchservice's version + @make compose-spec + @$(call _bumpversion,$<,version-pytorch-) @make compose-spec From b4359098b8da39c6c1f1fe7d1ab452a20913fc04 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 14:55:40 +0200 Subject: [PATCH 22/26] bumped version pytorch --- .bumpversion-pytorch.cfg | 2 +- .osparc/osparc-python-runner-pytorch/metadata.yml | 2 +- Makefile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion-pytorch.cfg b/.bumpversion-pytorch.cfg index 6d4f59f..5b3d4f7 100644 --- a/.bumpversion-pytorch.cfg +++ b/.bumpversion-pytorch.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.0.4 +current_version = 1.1.0 commit = False message = service version: {current_version} → {new_version} tag = False diff --git a/.osparc/osparc-python-runner-pytorch/metadata.yml b/.osparc/osparc-python-runner-pytorch/metadata.yml index 78d683c..79188b9 100644 --- a/.osparc/osparc-python-runner-pytorch/metadata.yml +++ b/.osparc/osparc-python-runner-pytorch/metadata.yml @@ -2,7 +2,7 @@ name: oSparc Python Runner PyTorch key: simcore/services/comp/osparc-python-runner-pytorch type: computational integration-version: 2.0.0 -version: 1.0.4 +version: 1.1.0 description: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/full/services/simcore_services_comp_osparc-python-runner-pytorch.md icon: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/icons/s4l/simcore_services_comp_osparc-python-runner-pytorch.png thumbnail: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/thumbnails/s4l/simcore_services_comp_osparc-python-runner-pytorch.png diff --git a/Makefile b/Makefile index 7f955d1..b9ce728 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ SHELL = /bin/sh export IMAGE_PYTORCH=osparc-python-runner-pytorch export IMAGE_TENSORFLOW=osparc-python-runner-tensorflow -export TAG_PYTORCH=1.0.4 +export TAG_PYTORCH=1.1.0 export TAG_TENSORFLOW=1.0.4 # PYTHON ENVIRON --------------------------------------------------------------------------------------- From 34c159696e32c2a85ce63c98f94b2d9cb34e96d0 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 14:56:01 +0200 Subject: [PATCH 23/26] bumped version tensorflow --- .bumpversion-tensorflow.cfg | 2 +- .osparc/osparc-python-runner-tensorflow/metadata.yml | 2 +- Makefile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion-tensorflow.cfg b/.bumpversion-tensorflow.cfg index 06d222e..234e05b 100644 --- a/.bumpversion-tensorflow.cfg +++ b/.bumpversion-tensorflow.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.0.4 +current_version = 1.1.0 commit = False message = service version: {current_version} → {new_version} tag = False diff --git a/.osparc/osparc-python-runner-tensorflow/metadata.yml b/.osparc/osparc-python-runner-tensorflow/metadata.yml index 43d275c..513303a 100644 --- a/.osparc/osparc-python-runner-tensorflow/metadata.yml +++ b/.osparc/osparc-python-runner-tensorflow/metadata.yml @@ -2,7 +2,7 @@ name: oSparc Python Runner Tensorflow key: simcore/services/comp/osparc-python-runner-tensorflow type: computational integration-version: 2.0.0 -version: 1.0.4 +version: 1.1.0 description: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/full/services/simcore_services_comp_osparc-python-runner-tensorflow.md icon: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/icons/s4l/simcore_services_comp_osparc-python-runner-tensorflow.png thumbnail: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/thumbnails/s4l/simcore_services_comp_osparc-python-runner-tensorflow.png diff --git a/Makefile b/Makefile index b9ce728..84d1784 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ SHELL = /bin/sh export IMAGE_PYTORCH=osparc-python-runner-pytorch export IMAGE_TENSORFLOW=osparc-python-runner-tensorflow export TAG_PYTORCH=1.1.0 -export TAG_TENSORFLOW=1.0.4 +export TAG_TENSORFLOW=1.1.0 # PYTHON ENVIRON --------------------------------------------------------------------------------------- .PHONY: devenv From c386fbd8095539a4dbd40cea7c3c6ef4ef03448f Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 15:42:53 +0200 Subject: [PATCH 24/26] refactor --- README.md | 3 +++ common/main.py | 45 +++++++++++++++++++++++---------------------- 2 files changed, 26 insertions(+), 22 deletions(-) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..9045b28 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# TODOs: +- [ ] MaG ask for what limits to put on this comp service, previous versions of `simcore/services/comp/osparc-python-runner-tensorflow` <= `1.0.4` were unbound and should be retired! +- [ ] bump service version diff --git a/common/main.py b/common/main.py index c70aeaa..00d12ea 100644 --- a/common/main.py +++ b/common/main.py @@ -8,7 +8,7 @@ from typing import Dict logging.basicConfig(level=logging.INFO) -logger = logging.getLogger("osparc-python-main") +_logger = logging.getLogger("osparc-python-main") ENVIRONS = ["INPUT_FOLDER", "OUTPUT_FOLDER"] @@ -26,7 +26,7 @@ def _find_user_code_entrypoint(code_dir: Path) -> Path: - logger.info("Searching for script main entrypoint ...") + _logger.info("Searching for script main entrypoint ...") code_files = list(code_dir.rglob("*.py")) if not code_files: @@ -40,19 +40,19 @@ def _find_user_code_entrypoint(code_dir: Path) -> Path: raise ValueError(f"Many entrypoints found: {code_files}") main_py = code_files[0] - logger.info("Found %s as main entrypoint", main_py) + _logger.info("Found %s as main entrypoint", main_py) return main_py def _ensure_pip_requirements(code_dir: Path) -> Path: - logger.info("Searching for requirements file ...") + _logger.info("Searching for requirements file ...") requirements = list(code_dir.rglob("requirements.txt")) if len(requirements) > 1: raise ValueError(f"Many requirements found: {requirements}") elif not requirements: # deduce requirements using pipreqs - logger.info("Not found. Recreating requirements ...") + _logger.info("Not found. Recreating requirements ...") requirements = code_dir / "requirements.txt" subprocess.run( f"pipreqs --savepath={requirements} --force {code_dir}".split(), @@ -65,7 +65,7 @@ def _ensure_pip_requirements(code_dir: Path) -> Path: else: requirements = requirements[0] - logger.info(f"Found: {requirements}") + _logger.info(f"Found: {requirements}") return requirements @@ -78,7 +78,7 @@ def _ensure_output_subfolders_exist() -> Dict[str, str]: # NOTE: exist_ok for forward compatibility in case they are already created output_sub_folder.mkdir(parents=True, exist_ok=True) output_envs[output_sub_folder_env] = f"{output_sub_folder}" - logger.info( + _logger.info( "Output ENVs available: %s", json.dumps(output_envs, indent=2), ) @@ -89,7 +89,7 @@ def _ensure_input_environment() -> Dict[str, str]: input_envs = { f"INPUT_{n}": os.environ[f"INPUT_{n}"] for n in range(1, NUM_INPUTS + 1) } - logger.info( + _logger.info( "Input ENVs available: %s", json.dumps(input_envs, indent=2), ) @@ -99,13 +99,13 @@ def _ensure_input_environment() -> Dict[str, str]: def setup(): input_envs = _ensure_input_environment() output_envs = _ensure_output_subfolders_exist() - logger.info("Available data:") + _logger.info("Available data:") os.system("ls -tlah") user_code_entrypoint = _find_user_code_entrypoint(INPUT_FOLDER) requirements_txt = _ensure_pip_requirements(INPUT_FOLDER) - logger.info("Preparing launch script ...") + _logger.info("Preparing launch script ...") bash_input_env_export = [f"export {env}={path}" for env, path in input_envs.items()] bash_output_env_export = [ f"export {env}='{path}'" for env, path in output_envs.items() @@ -123,24 +123,25 @@ def setup(): 'echo "DONE ..."', ] main_sh_path = Path("main.sh") - logger.info("main_sh_path: %s", main_sh_path.absolute()) # TODO: remove this line + _logger.info("main_sh_path: %s", main_sh_path.absolute()) # TODO: remove this line main_sh_path.write_text("\n".join(script)) def teardown(): - logger.info("Zipping output...") + _logger.info("Zipping output...") for n in range(1, NUM_OUTPUTS + 1): output_path = OUTPUT_FOLDER / f"output_{n}" archive_file_path = OUTPUT_FOLDER / OUTPUT_FILE_TEMPLATE.format(n) - logger.info("Zipping %s into %s...", output_path, archive_file_path) - shutil.make_archive( - f"{(archive_file_path.parent / archive_file_path.stem)}", - format="zip", - root_dir=output_path, - logger=logger, - ) - logger.info("Zipping %s into %s done", output_path, archive_file_path) - logger.info("Zipping done.") + if any(output_path.iterdir()): # Only zip if directory is not empty + _logger.info("Zipping %s into %s...", output_path, archive_file_path) + shutil.make_archive( + f"{(archive_file_path.parent / archive_file_path.stem)}", + format="zip", + root_dir=output_path, + logger=_logger, + ) + _logger.info("Zipping %s into %s done", output_path, archive_file_path) + _logger.info("Zipping done.") if __name__ == "__main__": @@ -151,4 +152,4 @@ def teardown(): else: teardown() except Exception as err: # pylint: disable=broad-except - logger.error("%s . Stopping %s", err, action) + _logger.error("%s . Stopping %s", err, action) From 19e6663e9f4997ee0946b6a7cc93526d666764d6 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 15:44:10 +0200 Subject: [PATCH 25/26] refactor --- README.md | 49 ++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 46 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 9045b28..e2ec549 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,46 @@ -# TODOs: -- [ ] MaG ask for what limits to put on this comp service, previous versions of `simcore/services/comp/osparc-python-runner-tensorflow` <= `1.0.4` were unbound and should be retired! -- [ ] bump service version + +# osparc-python-runner-pytorch and osparc-python-runner-tensorflow + +This repository contains the source code for two o²S²PARC Services: osparc-python-runner-pytorch and osparc-python-runner-tensorflow + +Building the docker images: + +```shell +make build +``` + + +Test the built images locally: + + +**pytorch** +```shell +make run-pytorch-local +``` + +**tensorflow** +```shell +make run-tensorflow-local +``` + + +Raising the version can be achieved via one for three methods. The `major`,`minor` or `patch` can be bumped, for example: + + +**pytorch** +```shell +make version-pytorch-patch +``` + +**tensorflow** +```shell +make version-tensorflow-patch +``` + + +If you already have a local copy of **o2S2PARC** running and wish to push data to the local registry: + +```shell +make publish-local +``` + From d8574ef7cb0e3efafd85d4f98f75eda03cbba20d Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Mon, 26 May 2025 16:14:19 +0200 Subject: [PATCH 26/26] reverted change --- common/main.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/common/main.py b/common/main.py index 00d12ea..db9c7d2 100644 --- a/common/main.py +++ b/common/main.py @@ -132,15 +132,14 @@ def teardown(): for n in range(1, NUM_OUTPUTS + 1): output_path = OUTPUT_FOLDER / f"output_{n}" archive_file_path = OUTPUT_FOLDER / OUTPUT_FILE_TEMPLATE.format(n) - if any(output_path.iterdir()): # Only zip if directory is not empty - _logger.info("Zipping %s into %s...", output_path, archive_file_path) - shutil.make_archive( - f"{(archive_file_path.parent / archive_file_path.stem)}", - format="zip", - root_dir=output_path, - logger=_logger, - ) - _logger.info("Zipping %s into %s done", output_path, archive_file_path) + _logger.info("Zipping %s into %s...", output_path, archive_file_path) + shutil.make_archive( + f"{(archive_file_path.parent / archive_file_path.stem)}", + format="zip", + root_dir=output_path, + logger=_logger, + ) + _logger.info("Zipping %s into %s done", output_path, archive_file_path) _logger.info("Zipping done.")