diff --git a/.bumpversion-pytorch.cfg b/.bumpversion-pytorch.cfg
new file mode 100644
index 0000000..5b3d4f7
--- /dev/null
+++ b/.bumpversion-pytorch.cfg
@@ -0,0 +1,13 @@
+[bumpversion]
+current_version = 1.1.0
+commit = False
+message = service version: {current_version} → {new_version}
+tag = False
+
+[bumpversion:file:.osparc/osparc-python-runner-pytorch/metadata.yml]
+search = version: {current_version}
+replace = version: {new_version}
+
+[bumpversion:file:Makefile]
+search = TAG_PYTORCH={current_version}
+replace = TAG_PYTORCH={new_version}
diff --git a/.bumpversion-tensorflow.cfg b/.bumpversion-tensorflow.cfg
new file mode 100644
index 0000000..234e05b
--- /dev/null
+++ b/.bumpversion-tensorflow.cfg
@@ -0,0 +1,13 @@
+[bumpversion]
+current_version = 1.1.0
+commit = False
+message = service version: {current_version} → {new_version}
+tag = False
+
+[bumpversion:file:.osparc/osparc-python-runner-tensorflow/metadata.yml]
+search = version: {current_version}
+replace = version: {new_version}
+
+[bumpversion:file:Makefile]
+search = TAG_TENSORFLOW={current_version}
+replace = TAG_TENSORFLOW={new_version}
diff --git a/.github/workflows/check-image.yml b/.github/workflows/check-image.yml
new file mode 100644
index 0000000..ae812d9
--- /dev/null
+++ b/.github/workflows/check-image.yml
@@ -0,0 +1,37 @@
+name: Build and check image
+
+on:
+ # Run the workflow for pushes to the main branch
+ push:
+ branches:
+ - master
+
+ # Run the workflow for pull requests (from forks or within the repo)
+ pull_request:
+ types:
+ - opened
+ - synchronize
+ - reopened
+
+jobs:
+ verify-image-build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo content
+ uses: actions/checkout@v2
+ - name: ooil version
+ uses: docker://itisfoundation/ci-service-integration-library:v2.0.11
+ with:
+ args: ooil --version
+ - name: Assemble docker compose spec
+ uses: docker://itisfoundation/ci-service-integration-library:v2.0.11
+ with:
+ args: ooil compose
+ - name: Build all images if multiple
+ uses: docker://itisfoundation/ci-service-integration-library:v2.0.11
+ with:
+ args: docker compose build
+ - name: test Tensorflow
+ run: make run-tensorflow-local
+ - name: test PyTorch
+ run: make run-pytorch-local
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..4093972
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+# not interested in storing this file
+docker-compose.yml
+
+
+*.ignore.*
+
+.venv/
+docker-compose.yml
\ No newline at end of file
diff --git a/.osparc/common/docker-compose.overwrite.yml b/.osparc/common/docker-compose.overwrite.yml
new file mode 100644
index 0000000..706cc8e
--- /dev/null
+++ b/.osparc/common/docker-compose.overwrite.yml
@@ -0,0 +1,5 @@
+services:
+ common:
+ build:
+ context: ./common
+ dockerfile: Dockerfile
diff --git a/.osparc/common/metadata.yml b/.osparc/common/metadata.yml
new file mode 100644
index 0000000..5091782
--- /dev/null
+++ b/.osparc/common/metadata.yml
@@ -0,0 +1,14 @@
+name: "Base runner-ml image"
+thumbnail:
+description: "used only for building not to be published"
+key: simcore/services/comp/common
+version: 0.0.0
+integration-version: 2.0.0
+type: dynamic
+authors:
+ - name: "Andrei Neagu"
+ email: "neagu@itis.swiss"
+ affiliation: "IT'IS Foundation"
+contact: "neagu@itis.swiss"
+inputs: {}
+outputs: {}
diff --git a/.osparc/common/runtime.yml b/.osparc/common/runtime.yml
new file mode 100644
index 0000000..24e49db
--- /dev/null
+++ b/.osparc/common/runtime.yml
@@ -0,0 +1 @@
+settings: []
diff --git a/.osparc/osparc-python-runner-pytorch/docker-compose.overwrite.yml b/.osparc/osparc-python-runner-pytorch/docker-compose.overwrite.yml
new file mode 100644
index 0000000..855dc59
--- /dev/null
+++ b/.osparc/osparc-python-runner-pytorch/docker-compose.overwrite.yml
@@ -0,0 +1,7 @@
+services:
+ osparc-python-runner-pytorch:
+ depends_on:
+ - common
+ build:
+ context: ./osparc-python-runner-pytorch
+ dockerfile: Dockerfile
diff --git a/.osparc/osparc-python-runner-pytorch/metadata.yml b/.osparc/osparc-python-runner-pytorch/metadata.yml
new file mode 100644
index 0000000..79188b9
--- /dev/null
+++ b/.osparc/osparc-python-runner-pytorch/metadata.yml
@@ -0,0 +1,68 @@
+name: oSparc Python Runner PyTorch
+key: simcore/services/comp/osparc-python-runner-pytorch
+type: computational
+integration-version: 2.0.0
+version: 1.1.0
+description: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/full/services/simcore_services_comp_osparc-python-runner-pytorch.md
+icon: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/icons/s4l/simcore_services_comp_osparc-python-runner-pytorch.png
+thumbnail: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/thumbnails/s4l/simcore_services_comp_osparc-python-runner-pytorch.png
+contact: guidon@itis.swiss
+authors:
+ - name: Manuel Guidon
+ email: guidon@itis.swiss
+ affiliation: IT'IS Foundation
+inputs:
+ input_1:
+ displayOrder: 1
+ label: Input data
+ description: Any code, requirements or data file
+ type: data:*/*
+ input_2:
+ displayOrder: 2
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+ input_3:
+ displayOrder: 3
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+ input_4:
+ displayOrder: 4
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+ input_5:
+ displayOrder: 5
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+outputs:
+ output_1:
+ displayOrder: 1
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_1 as output_1.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_1.zip: output_1
+ output_2:
+ displayOrder: 2
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_2 as output_2.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_2.zip: output_2
+ output_3:
+ displayOrder: 3
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_3 as output_3.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_3.zip: output_3
+ output_4:
+ displayOrder: 4
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_4 as output_4.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_4.zip: output_4
\ No newline at end of file
diff --git a/.osparc/osparc-python-runner-pytorch/runtime.yml b/.osparc/osparc-python-runner-pytorch/runtime.yml
new file mode 100644
index 0000000..d29bfd9
--- /dev/null
+++ b/.osparc/osparc-python-runner-pytorch/runtime.yml
@@ -0,0 +1,14 @@
+settings:
+ - name: Resources
+ type: Resources
+ value:
+ Limits:
+ NanoCPUs: 0
+ MemoryBytes: 0
+ Reservations:
+ NanoCPUs: 0
+ MemoryBytes: 0
+ GenericResources:
+ - DiscreteResourceSpec:
+ Kind: VRAM
+ Value: 1
\ No newline at end of file
diff --git a/.osparc/osparc-python-runner-tensorflow/docker-compose.overwrite.yml b/.osparc/osparc-python-runner-tensorflow/docker-compose.overwrite.yml
new file mode 100644
index 0000000..3f9fdf4
--- /dev/null
+++ b/.osparc/osparc-python-runner-tensorflow/docker-compose.overwrite.yml
@@ -0,0 +1,7 @@
+services:
+ osparc-python-runner-tensorflow:
+ depends_on:
+ - common
+ build:
+ context: ./osparc-python-runner-tensorflow
+ dockerfile: Dockerfile
diff --git a/.osparc/osparc-python-runner-tensorflow/metadata.yml b/.osparc/osparc-python-runner-tensorflow/metadata.yml
new file mode 100644
index 0000000..513303a
--- /dev/null
+++ b/.osparc/osparc-python-runner-tensorflow/metadata.yml
@@ -0,0 +1,68 @@
+name: oSparc Python Runner Tensorflow
+key: simcore/services/comp/osparc-python-runner-tensorflow
+type: computational
+integration-version: 2.0.0
+version: 1.1.0
+description: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/full/services/simcore_services_comp_osparc-python-runner-tensorflow.md
+icon: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/icons/s4l/simcore_services_comp_osparc-python-runner-tensorflow.png
+thumbnail: https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/thumbnails/s4l/simcore_services_comp_osparc-python-runner-tensorflow.png
+contact: guidon@itis.swiss
+authors:
+ - name: Manuel Guidon
+ email: guidon@itis.swiss
+ affiliation: IT'IS Foundation
+inputs:
+ input_1:
+ displayOrder: 1
+ label: Input data
+ description: Any code, requirements or data file
+ type: data:*/*
+ input_2:
+ displayOrder: 2
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+ input_3:
+ displayOrder: 3
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+ input_4:
+ displayOrder: 4
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+ input_5:
+ displayOrder: 5
+ label: Additional input data - optional
+ description: Any additional input data file
+ type: data:*/*
+outputs:
+ output_1:
+ displayOrder: 1
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_1 as output_1.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_1.zip: output_1
+ output_2:
+ displayOrder: 2
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_2 as output_2.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_2.zip: output_2
+ output_3:
+ displayOrder: 3
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_3 as output_3.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_3.zip: output_3
+ output_4:
+ displayOrder: 4
+ label: Output data
+ description: The data produced by the script and saved under OUTPUT_FOLDER/output_4 as output_4.zip
+ type: data:*/*
+ fileToKeyMap:
+ output_4.zip: output_4
\ No newline at end of file
diff --git a/.osparc/osparc-python-runner-tensorflow/runtime.yml b/.osparc/osparc-python-runner-tensorflow/runtime.yml
new file mode 100644
index 0000000..d29bfd9
--- /dev/null
+++ b/.osparc/osparc-python-runner-tensorflow/runtime.yml
@@ -0,0 +1,14 @@
+settings:
+ - name: Resources
+ type: Resources
+ value:
+ Limits:
+ NanoCPUs: 0
+ MemoryBytes: 0
+ Reservations:
+ NanoCPUs: 0
+ MemoryBytes: 0
+ GenericResources:
+ - DiscreteResourceSpec:
+ Kind: VRAM
+ Value: 1
\ No newline at end of file
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..84d1784
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,102 @@
+# minimalistic utility to test and develop locally
+
+SHELL = /bin/sh
+.DEFAULT_GOAL := help
+
+export IMAGE_PYTORCH=osparc-python-runner-pytorch
+export IMAGE_TENSORFLOW=osparc-python-runner-tensorflow
+export TAG_PYTORCH=1.1.0
+export TAG_TENSORFLOW=1.1.0
+
+# PYTHON ENVIRON ---------------------------------------------------------------------------------------
+.PHONY: devenv
+.venv:
+ @python3 --version
+ python3 -m venv $@
+ # upgrading package managers
+ $@/bin/pip install --upgrade uv
+
+devenv: .venv ## create a python virtual environment with tools to dev, run and tests cookie-cutter
+ # installing extra tools
+ @$2S2PARC** running and wish to push data to the local registry:
+
+```shell
+make publish-local
+```
+
diff --git a/common/Dockerfile b/common/Dockerfile
new file mode 100644
index 0000000..8db1f1d
--- /dev/null
+++ b/common/Dockerfile
@@ -0,0 +1,45 @@
+FROM nvidia/cuda:12.8.0-cudnn-runtime-ubuntu24.04 AS base
+
+LABEL maintainer="Andrei Neagu "
+
+ENV SC_USER_ID=8004
+ENV SC_USER_NAME=scu
+RUN adduser \
+ --uid ${SC_USER_ID} \
+ --disabled-password \
+ --gecos "" \
+ --shell /bin/sh --home /home/${SC_USER_NAME} ${SC_USER_NAME}
+
+# Install uv tool
+COPY --from=ghcr.io/astral-sh/uv:0.7.6 /uv /uvx /bin/
+ENV UV_HTTP_TIMEOUT=120
+
+# Install Python globally for all users
+ARG PYTHON_VERSION=3.12.10
+ENV UV_PYTHON_INSTALL_DIR=/opt/uv-python
+RUN mkdir -p /opt/uv-python \
+ && uv venv /opt/venv --python=python${PYTHON_VERSION%.*} \
+ && chmod -R a+rx /opt/uv-python \
+ && chown -R ${SC_USER_NAME}:${SC_USER_NAME} /opt/venv
+ENV PATH="/opt/venv/bin:$PATH"
+
+FROM base AS production
+
+ENV SC_BUILD_TARGET=production
+ENV SC_BOOT_MODE=production
+ENV INPUT_FOLDER="/input" \
+ OUTPUT_FOLDER="/output"
+ENV PATH="/home/${SC_USER_NAME}/service.cli:${PATH}"
+
+WORKDIR /home/${SC_USER_NAME}
+
+COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} entrypoint.sh docker/
+COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} main.py main.py
+
+ENV LD_LIBRARY_PATH=/usr/local/cuda/lib64:${LD_LIBRARY_PATH}
+ENV LC_ALL=C.UTF-8
+ENV LANG=C.UTF-8
+
+ENTRYPOINT [ "/bin/sh", "docker/entrypoint.sh", "/bin/sh", "-c" ]
+CMD ["run"]
+
diff --git a/common/entrypoint.sh b/common/entrypoint.sh
new file mode 100755
index 0000000..1cb7f2f
--- /dev/null
+++ b/common/entrypoint.sh
@@ -0,0 +1,79 @@
+#!/bin/sh
+set -o errexit
+set -o nounset
+
+IFS=$(printf '\n\t')
+# This entrypoint script:
+#
+# - Executes *inside* of the container upon start as --user [default root]
+# - Notice that the container *starts* as --user [default root] but
+# *runs* as non-root user [$SC_USER_NAME]
+#
+echo Entrypoint for stage "${SC_BUILD_TARGET}" ...
+echo User : "$(id "$(whoami)")"
+echo Workdir : "$(pwd)"
+
+
+# expect input/output folders to be mounted
+stat "${INPUT_FOLDER}" > /dev/null 2>&1 || \
+ (echo "ERROR: You must mount '${INPUT_FOLDER}' to deduce user and group ids" && exit 1)
+stat "${OUTPUT_FOLDER}" > /dev/null 2>&1 || \
+ (echo "ERROR: You must mount '${OUTPUT_FOLDER}' to deduce user and group ids" && exit 1)
+
+# NOTE: expects docker run ... -v /path/to/input/folder:${INPUT_FOLDER}
+# check input/output folders are owned by the same user
+if [ "$(stat -c %u "${INPUT_FOLDER}")" -ne "$(stat -c %u "${OUTPUT_FOLDER}")" ]
+then
+ echo "ERROR: '${INPUT_FOLDER}' and '${OUTPUT_FOLDER}' have different user id's. not allowed" && exit 1
+fi
+# check input/outputfolders are owned by the same group
+if [ "$(stat -c %g "${INPUT_FOLDER}")" -ne "$(stat -c %g "${OUTPUT_FOLDER}")" ]
+then
+ echo "ERROR: '${INPUT_FOLDER}' and '${OUTPUT_FOLDER}' have different group id's. not allowed" && exit 1
+fi
+
+echo "setting correct user id/group id..."
+HOST_USERID=$(stat --format=%u "${INPUT_FOLDER}")
+HOST_GROUPID=$(stat --format=%g "${INPUT_FOLDER}")
+CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1)
+if [ "$HOST_USERID" -eq 0 ]
+then
+ echo "Warning: Folder mounted owned by root user... adding $SC_USER_NAME to root..."
+ adduser "$SC_USER_NAME" root
+else
+ echo "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..."
+ # take host's credentials in $SC_USER_NAME
+ if [ -z "$CONT_GROUPNAME" ]
+ then
+ echo "Creating new group my$SC_USER_NAME"
+ CONT_GROUPNAME=my$SC_USER_NAME
+ addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME"
+ else
+ echo "group already exists"
+ fi
+
+ # add user if missing
+ if id "$SC_USER_NAME" >/dev/null 2>&1; then
+ echo "User $SC_USER_NAME already exists"
+ else
+ echo "adding $SC_USER_NAME to group $CONT_GROUPNAME..."
+ adduser "$SC_USER_NAME" "$CONT_GROUPNAME"
+
+ echo "changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)"
+ usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME"
+ fi
+
+ echo "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME"
+ find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \;
+ # change user property of files already around
+ echo "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME"
+ find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \;
+fi
+
+echo "Starting $* ..."
+echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")"
+echo " local dir : $(ls -al)"
+echo " input dir : $(ls -al "${INPUT_FOLDER}")"
+echo " output dir : $(ls -al "${OUTPUT_FOLDER}")"
+
+su --command "export PATH=${PATH}:/home/$SC_USER_NAME/service.cli; $*" "$SC_USER_NAME"
\ No newline at end of file
diff --git a/common/main.py b/common/main.py
new file mode 100644
index 0000000..db9c7d2
--- /dev/null
+++ b/common/main.py
@@ -0,0 +1,154 @@
+import json
+import logging
+import os
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+from typing import Dict
+
+logging.basicConfig(level=logging.INFO)
+_logger = logging.getLogger("osparc-python-main")
+
+
+ENVIRONS = ["INPUT_FOLDER", "OUTPUT_FOLDER"]
+try:
+ INPUT_FOLDER, OUTPUT_FOLDER = [Path(os.environ[v]) for v in ENVIRONS]
+except KeyError:
+ raise ValueError("Required env vars {ENVIRONS} were not set")
+
+# NOTE: sync with schema in metadata!!
+NUM_INPUTS = 5
+NUM_OUTPUTS = 4
+OUTPUT_SUBFOLDER_ENV_TEMPLATE = "OUTPUT_{}"
+OUTPUT_SUBFOLDER_TEMPLATE = "output_{}"
+OUTPUT_FILE_TEMPLATE = "output_{}.zip"
+
+
+def _find_user_code_entrypoint(code_dir: Path) -> Path:
+ _logger.info("Searching for script main entrypoint ...")
+ code_files = list(code_dir.rglob("*.py"))
+
+ if not code_files:
+ raise ValueError("No python code found")
+
+ if len(code_files) > 1:
+ code_files = list(code_dir.rglob("main.py"))
+ if not code_files:
+ raise ValueError("No entrypoint found (e.g. main.py)")
+ if len(code_files) > 1:
+ raise ValueError(f"Many entrypoints found: {code_files}")
+
+ main_py = code_files[0]
+ _logger.info("Found %s as main entrypoint", main_py)
+ return main_py
+
+
+def _ensure_pip_requirements(code_dir: Path) -> Path:
+ _logger.info("Searching for requirements file ...")
+ requirements = list(code_dir.rglob("requirements.txt"))
+ if len(requirements) > 1:
+ raise ValueError(f"Many requirements found: {requirements}")
+
+ elif not requirements:
+ # deduce requirements using pipreqs
+ _logger.info("Not found. Recreating requirements ...")
+ requirements = code_dir / "requirements.txt"
+ subprocess.run(
+ f"pipreqs --savepath={requirements} --force {code_dir}".split(),
+ shell=False,
+ check=True,
+ cwd=INPUT_FOLDER,
+ )
+
+ # TODO log subprocess.run
+
+ else:
+ requirements = requirements[0]
+ _logger.info(f"Found: {requirements}")
+ return requirements
+
+
+# TODO: Next version of integration will take care of this and maybe the ENVs as well
+def _ensure_output_subfolders_exist() -> Dict[str, str]:
+ output_envs = {}
+ for n in range(1, NUM_OUTPUTS + 1):
+ output_sub_folder_env = f"OUTPUT_{n}"
+ output_sub_folder = OUTPUT_FOLDER / OUTPUT_SUBFOLDER_TEMPLATE.format(n)
+ # NOTE: exist_ok for forward compatibility in case they are already created
+ output_sub_folder.mkdir(parents=True, exist_ok=True)
+ output_envs[output_sub_folder_env] = f"{output_sub_folder}"
+ _logger.info(
+ "Output ENVs available: %s",
+ json.dumps(output_envs, indent=2),
+ )
+ return output_envs
+
+
+def _ensure_input_environment() -> Dict[str, str]:
+ input_envs = {
+ f"INPUT_{n}": os.environ[f"INPUT_{n}"] for n in range(1, NUM_INPUTS + 1)
+ }
+ _logger.info(
+ "Input ENVs available: %s",
+ json.dumps(input_envs, indent=2),
+ )
+ return input_envs
+
+
+def setup():
+ input_envs = _ensure_input_environment()
+ output_envs = _ensure_output_subfolders_exist()
+ _logger.info("Available data:")
+ os.system("ls -tlah")
+
+ user_code_entrypoint = _find_user_code_entrypoint(INPUT_FOLDER)
+ requirements_txt = _ensure_pip_requirements(INPUT_FOLDER)
+
+ _logger.info("Preparing launch script ...")
+ bash_input_env_export = [f"export {env}={path}" for env, path in input_envs.items()]
+ bash_output_env_export = [
+ f"export {env}='{path}'" for env, path in output_envs.items()
+ ]
+ script = [
+ "#!/bin/sh",
+ "set -o errexit",
+ "set -o nounset",
+ "IFS=$(printf '\\n\\t')",
+ f'uv pip install -r "{requirements_txt}"',
+ "\n".join(bash_input_env_export),
+ "\n".join(bash_output_env_export),
+ f'echo "Executing code {user_code_entrypoint.name}..."',
+ f'"python" "{user_code_entrypoint}"',
+ 'echo "DONE ..."',
+ ]
+ main_sh_path = Path("main.sh")
+ _logger.info("main_sh_path: %s", main_sh_path.absolute()) # TODO: remove this line
+ main_sh_path.write_text("\n".join(script))
+
+
+def teardown():
+ _logger.info("Zipping output...")
+ for n in range(1, NUM_OUTPUTS + 1):
+ output_path = OUTPUT_FOLDER / f"output_{n}"
+ archive_file_path = OUTPUT_FOLDER / OUTPUT_FILE_TEMPLATE.format(n)
+ _logger.info("Zipping %s into %s...", output_path, archive_file_path)
+ shutil.make_archive(
+ f"{(archive_file_path.parent / archive_file_path.stem)}",
+ format="zip",
+ root_dir=output_path,
+ logger=_logger,
+ )
+ _logger.info("Zipping %s into %s done", output_path, archive_file_path)
+ _logger.info("Zipping done.")
+
+
+if __name__ == "__main__":
+ action = "setup" if len(sys.argv) == 1 else sys.argv[1]
+ try:
+ if action == "setup":
+ setup()
+ else:
+ teardown()
+ except Exception as err: # pylint: disable=broad-except
+ _logger.error("%s . Stopping %s", err, action)
diff --git a/docker-compose-local.yml b/docker-compose-local.yml
new file mode 100644
index 0000000..1f630e1
--- /dev/null
+++ b/docker-compose-local.yml
@@ -0,0 +1,9 @@
+services:
+ runner-ml:
+ image: simcore/services/comp/${IMAGE_TO_RUN}:${TAG_TO_RUN}
+ environment:
+ - INPUT_FOLDER=/tmp/inputs
+ - OUTPUT_FOLDER=/tmp/outputs
+ volumes:
+ - ${PWD}/${VALIDATION_DIR}/inputs:/tmp/inputs
+ - ${PWD}/${VALIDATION_DIR}/outputs:/tmp/outputs
diff --git a/osparc-python-runner-pytorch/Dockerfile b/osparc-python-runner-pytorch/Dockerfile
new file mode 100644
index 0000000..f2ce4b9
--- /dev/null
+++ b/osparc-python-runner-pytorch/Dockerfile
@@ -0,0 +1,9 @@
+FROM simcore/services/comp/common:0.0.0 AS service-base
+
+LABEL maintainer="Andrei Neagu "
+
+COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} service.cli/ service.cli/
+COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_pytorch osparc_python_runner_pytorch
+RUN mv main.py osparc_python_runner_pytorch/main.py
+
+RUN uv pip install -r osparc_python_runner_pytorch/requirements.txt
\ No newline at end of file
diff --git a/osparc-python-runner-pytorch/osparc_python_runner_pytorch/requirements.txt b/osparc-python-runner-pytorch/osparc_python_runner_pytorch/requirements.txt
new file mode 100644
index 0000000..62c2049
--- /dev/null
+++ b/osparc-python-runner-pytorch/osparc_python_runner_pytorch/requirements.txt
@@ -0,0 +1,4 @@
+--index-url https://download.pytorch.org/whl/cu128
+torch==2.7.0
+torchaudio==2.7.0
+torchvision==0.22.0
\ No newline at end of file
diff --git a/osparc-python-runner-pytorch/service.cli/execute.sh b/osparc-python-runner-pytorch/service.cli/execute.sh
new file mode 100755
index 0000000..d098890
--- /dev/null
+++ b/osparc-python-runner-pytorch/service.cli/execute.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+# set sh strict mode
+set -o errexit
+set -o nounset
+IFS=$(printf '\n\t')
+
+cd /home/scu/osparc_python_runner_pytorch
+
+echo "starting service as"
+echo " User : $(id "$(whoami)")"
+echo " Workdir : $(pwd)"
+echo "..."
+echo
+
+python main.py setup
+/bin/sh main.sh
+python main.py teardown
\ No newline at end of file
diff --git a/osparc-python-runner-pytorch/service.cli/run b/osparc-python-runner-pytorch/service.cli/run
new file mode 100755
index 0000000..e2bd973
--- /dev/null
+++ b/osparc-python-runner-pytorch/service.cli/run
@@ -0,0 +1,25 @@
+
+#!/bin/sh
+#---------------------------------------------------------------
+# AUTO-GENERATED CODE, do not modify this will be overwritten!!!
+#---------------------------------------------------------------
+# shell strict mode:
+set -o errexit
+set -o nounset
+IFS=$(printf '\n\t')
+cd "$(dirname "$0")"
+json_input=$INPUT_FOLDER/inputs.json
+
+INPUT_1=$INPUT_FOLDER/input_1
+export INPUT_1
+INPUT_2=$INPUT_FOLDER/input_2
+export INPUT_2
+INPUT_3=$INPUT_FOLDER/input_3
+export INPUT_3
+INPUT_4=$INPUT_FOLDER/input_4
+export INPUT_4
+INPUT_5=$INPUT_FOLDER/input_5
+export INPUT_5
+
+exec execute.sh
+
\ No newline at end of file
diff --git a/osparc-python-runner-tensorflow/Dockerfile b/osparc-python-runner-tensorflow/Dockerfile
new file mode 100644
index 0000000..c61a446
--- /dev/null
+++ b/osparc-python-runner-tensorflow/Dockerfile
@@ -0,0 +1,9 @@
+FROM simcore/services/comp/common:0.0.0 AS service-base
+
+LABEL maintainer="Andrei Neagu "
+
+COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} service.cli/ service.cli/
+COPY --chown=${SC_USER_NAME}:${SC_USER_NAME} osparc_python_runner_tensorflow osparc_python_runner_tensorflow
+RUN mv main.py osparc_python_runner_tensorflow/main.py
+
+RUN uv pip install -r osparc_python_runner_tensorflow/requirements.txt
\ No newline at end of file
diff --git a/osparc-python-runner-tensorflow/osparc_python_runner_tensorflow/requirements.txt b/osparc-python-runner-tensorflow/osparc_python_runner_tensorflow/requirements.txt
new file mode 100644
index 0000000..eced62a
--- /dev/null
+++ b/osparc-python-runner-tensorflow/osparc_python_runner_tensorflow/requirements.txt
@@ -0,0 +1 @@
+tensorflow[and-cuda]==2.18.1
\ No newline at end of file
diff --git a/osparc-python-runner-tensorflow/service.cli/execute.sh b/osparc-python-runner-tensorflow/service.cli/execute.sh
new file mode 100755
index 0000000..35558cf
--- /dev/null
+++ b/osparc-python-runner-tensorflow/service.cli/execute.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+# set sh strict mode
+set -o errexit
+set -o nounset
+IFS=$(printf '\n\t')
+
+cd /home/scu/osparc_python_runner_tensorflow
+
+echo "starting service as"
+echo " User : $(id "$(whoami)")"
+echo " Workdir : $(pwd)"
+echo "..."
+echo
+
+python main.py setup
+/bin/sh main.sh
+python main.py teardown
\ No newline at end of file
diff --git a/osparc-python-runner-tensorflow/service.cli/run b/osparc-python-runner-tensorflow/service.cli/run
new file mode 100755
index 0000000..e2bd973
--- /dev/null
+++ b/osparc-python-runner-tensorflow/service.cli/run
@@ -0,0 +1,25 @@
+
+#!/bin/sh
+#---------------------------------------------------------------
+# AUTO-GENERATED CODE, do not modify this will be overwritten!!!
+#---------------------------------------------------------------
+# shell strict mode:
+set -o errexit
+set -o nounset
+IFS=$(printf '\n\t')
+cd "$(dirname "$0")"
+json_input=$INPUT_FOLDER/inputs.json
+
+INPUT_1=$INPUT_FOLDER/input_1
+export INPUT_1
+INPUT_2=$INPUT_FOLDER/input_2
+export INPUT_2
+INPUT_3=$INPUT_FOLDER/input_3
+export INPUT_3
+INPUT_4=$INPUT_FOLDER/input_4
+export INPUT_4
+INPUT_5=$INPUT_FOLDER/input_5
+export INPUT_5
+
+exec execute.sh
+
\ No newline at end of file
diff --git a/validation-pytorch/inputs/input_1/main.py b/validation-pytorch/inputs/input_1/main.py
new file mode 100644
index 0000000..d18f231
--- /dev/null
+++ b/validation-pytorch/inputs/input_1/main.py
@@ -0,0 +1,13 @@
+import torch
+import requests
+
+response = requests.get("https://www.google.com")
+print(response)
+
+msg = torch.tensor([[1, 2, 3], [4, 5, 6]])
+print(msg)
+# Example tensor operation
+a = torch.tensor([1.0, 2.0, 3.0])
+b = torch.tensor([4.0, 5.0, 6.0])
+print("Sum:", a + b)
+
diff --git a/validation-pytorch/inputs/input_1/requirements.txt b/validation-pytorch/inputs/input_1/requirements.txt
new file mode 100644
index 0000000..3e73491
--- /dev/null
+++ b/validation-pytorch/inputs/input_1/requirements.txt
@@ -0,0 +1,2 @@
+requests
+numpy
diff --git a/validation-pytorch/outputs/output_1/.gitkeep b/validation-pytorch/outputs/output_1/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/validation-pytorch/outputs/output_2/.gitkeep b/validation-pytorch/outputs/output_2/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/validation-pytorch/outputs/output_3/.gitkeep b/validation-pytorch/outputs/output_3/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/validation-pytorch/outputs/output_4/.gitkeep b/validation-pytorch/outputs/output_4/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/validation-tensorflow/inputs/input_1/main.py b/validation-tensorflow/inputs/input_1/main.py
new file mode 100644
index 0000000..3a73510
--- /dev/null
+++ b/validation-tensorflow/inputs/input_1/main.py
@@ -0,0 +1,8 @@
+import requests
+import tensorflow as tf
+
+response = requests.get("https://www.google.com")
+print(response)
+
+msg = tf.constant("Hello, TensorFlow!")
+tf.print(msg)
diff --git a/validation-tensorflow/inputs/input_1/requirements.txt b/validation-tensorflow/inputs/input_1/requirements.txt
new file mode 100644
index 0000000..3e73491
--- /dev/null
+++ b/validation-tensorflow/inputs/input_1/requirements.txt
@@ -0,0 +1,2 @@
+requests
+numpy
diff --git a/validation-tensorflow/outputs/output_1/.gitkeep b/validation-tensorflow/outputs/output_1/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/validation-tensorflow/outputs/output_2/.gitkeep b/validation-tensorflow/outputs/output_2/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/validation-tensorflow/outputs/output_3/.gitkeep b/validation-tensorflow/outputs/output_3/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/validation-tensorflow/outputs/output_4/.gitkeep b/validation-tensorflow/outputs/output_4/.gitkeep
new file mode 100644
index 0000000..e69de29