From f2cdaebd2d366aa4141b8312e82a2c12837489e7 Mon Sep 17 00:00:00 2001
From: ali <117142933+muhammad-ali-e@users.noreply.github.com>
Date: Fri, 11 Apr 2025 19:33:33 +0530
Subject: [PATCH] Sidecar implementation (#1242)
* reolved file skipping while API-deloyment execution
* sidecar implementation
* update docker build yaml
* tool version updates
* remove docker tool-sidecar build
* minor updates on runner
* update docker-tool-build workflow with sidecar
* update docker-tool-build workflow with sidecar
* update docker-tool-build workflow with sidecar
* Commit pdm.lock changes
* removed tool-sidecar from workflow build
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* validation on imagename exist and minor changes in docker file
* format fixes
* tool dockerfile cmd to entrypoint
* updated tool version in public tools
---------
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
.../workflows/docker-tools-build-push.yaml | 5 +-
docker/docker-compose.build.yaml | 5 +
docker/dockerfiles/tool-sidecar.Dockerfile | 50 ++
.../tool-sidecar.Dockerfile.dockerignore | 66 +++
runner/sample.env | 7 +-
.../clients/{docker.py => docker_client.py} | 160 +++++-
runner/src/unstract/runner/clients/helper.py | 2 +-
.../src/unstract/runner/clients/interface.py | 54 +-
.../unstract/runner/clients/test_docker.py | 124 +++-
runner/src/unstract/runner/constants.py | 9 +
runner/src/unstract/runner/runner.py | 175 +++++-
runner/src/unstract/runner/utils.py | 59 ++
tool-sidecar/README.md | 13 +
tool-sidecar/entrypoint.sh | 6 +
tool-sidecar/pdm.lock | 533 ++++++++++++++++++
tool-sidecar/pyproject.toml | 31 +
tool-sidecar/src/unstract/__init__.py | 0
.../src/unstract/tool_sidecar/__init__.py | 0
.../src/unstract/tool_sidecar/constants.py | 28 +
tool-sidecar/src/unstract/tool_sidecar/dto.py | 16 +
.../unstract/tool_sidecar/log_processor.py | 276 +++++++++
tools/classifier/Dockerfile | 9 +-
tools/classifier/src/config/properties.json | 2 +-
tools/structure/Dockerfile | 4 +-
tools/structure/src/config/properties.json | 2 +-
tools/text_extractor/Dockerfile | 9 +-
.../text_extractor/src/config/properties.json | 2 +-
.../databases/postgresql/postgresql.py | 2 +-
unstract/core/src/unstract/core/constants.py | 1 +
.../tool_registry_config/public_tools.json | 12 +-
30 files changed, 1592 insertions(+), 70 deletions(-)
create mode 100644 docker/dockerfiles/tool-sidecar.Dockerfile
create mode 100644 docker/dockerfiles/tool-sidecar.Dockerfile.dockerignore
rename runner/src/unstract/runner/clients/{docker.py => docker_client.py} (52%)
create mode 100644 tool-sidecar/README.md
create mode 100755 tool-sidecar/entrypoint.sh
create mode 100644 tool-sidecar/pdm.lock
create mode 100644 tool-sidecar/pyproject.toml
create mode 100644 tool-sidecar/src/unstract/__init__.py
create mode 100644 tool-sidecar/src/unstract/tool_sidecar/__init__.py
create mode 100644 tool-sidecar/src/unstract/tool_sidecar/constants.py
create mode 100644 tool-sidecar/src/unstract/tool_sidecar/dto.py
create mode 100644 tool-sidecar/src/unstract/tool_sidecar/log_processor.py
diff --git a/.github/workflows/docker-tools-build-push.yaml b/.github/workflows/docker-tools-build-push.yaml
index c9253818..049f9630 100644
--- a/.github/workflows/docker-tools-build-push.yaml
+++ b/.github/workflows/docker-tools-build-push.yaml
@@ -16,6 +16,7 @@ on:
- tool-classifier
- tool-structure
- tool-text-extractor
+ - tool-sidecar
run-name: "[${{ inputs.service_name }}:${{ inputs.tag }}] Docker Image Build and Push (Development)"
@@ -44,6 +45,8 @@ jobs:
- name: Build tool-text-extractor
if: github.event.inputs.service_name=='tool-text-extractor'
run: docker build -t unstract/${{github.event.inputs.service_name}}:${{ github.event.inputs.tag }} ./tools/text_extractor
-
+ - name: Build tool-sidecar
+ if: github.event.inputs.service_name=='tool-sidecar'
+ run: docker build -t unstract/${{github.event.inputs.service_name}}:${{ github.event.inputs.tag }} -f docker/dockerfiles/tool-sidecar.Dockerfile .
- name: Push Docker image to Docker Hub
run: docker push unstract/${{ github.event.inputs.service_name }}:${{ github.event.inputs.tag }}
diff --git a/docker/docker-compose.build.yaml b/docker/docker-compose.build.yaml
index 34294962..4963ea80 100644
--- a/docker/docker-compose.build.yaml
+++ b/docker/docker-compose.build.yaml
@@ -14,6 +14,11 @@ services:
build:
dockerfile: docker/dockerfiles/runner.Dockerfile
context: ..
+ tool-sidecar:
+ image: unstract/tool-sidecar:${VERSION}
+ build:
+ dockerfile: docker/dockerfiles/tool-sidecar.Dockerfile
+ context: ..
platform-service:
image: unstract/platform-service:${VERSION}
build:
diff --git a/docker/dockerfiles/tool-sidecar.Dockerfile b/docker/dockerfiles/tool-sidecar.Dockerfile
new file mode 100644
index 00000000..201d99aa
--- /dev/null
+++ b/docker/dockerfiles/tool-sidecar.Dockerfile
@@ -0,0 +1,50 @@
+# Use Python 3.9 alpine for minimal size
+FROM python:3.9-slim
+LABEL maintainer="Zipstack Inc."
+
+ENV \
+ PYTHONDONTWRITEBYTECODE=1 \
+ PYTHONUNBUFFERED=1 \
+ LOG_PATH=/shared/logs/logs.txt \
+ LOG_LEVEL=INFO \
+ PYTHONPATH=/unstract \
+ BUILD_CONTEXT_PATH=tool-sidecar \
+ BUILD_PACKAGES_PATH=unstract \
+ PDM_VERSION=2.16.1 \
+ # OpenTelemetry configuration (disabled by default, enable in docker-compose)
+ OTEL_TRACES_EXPORTER=none \
+ OTEL_METRICS_EXPORTER=none \
+ OTEL_LOGS_EXPORTER=none \
+ OTEL_SERVICE_NAME=unstract_tool_sidecar \
+ PATH="/app/.venv/bin:$PATH"
+
+RUN apt-get update \
+ && apt-get --no-install-recommends install -y docker \
+ && apt-get clean && rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/* \
+ \
+ && pip install --no-cache-dir -U pip pdm~=${PDM_VERSION}
+
+WORKDIR /app
+
+# Create venv and install gunicorn and other deps in it
+RUN pdm venv create -w virtualenv --with-pip && \
+ . .venv/bin/activate
+
+# Copy local dependency packages first
+COPY ${BUILD_PACKAGES_PATH}/core /unstract/core
+
+# Copy application files
+COPY ${BUILD_CONTEXT_PATH} /app/
+
+# Ensure correct package structure
+RUN touch /app/src/unstract/__init__.py && \
+ cd /unstract/core && pip install --no-cache-dir -e . && cd /app && \
+ pip install --no-cache-dir -e . && \
+ . .venv/bin/activate && \
+ pdm sync --prod --no-editable --with deploy && \
+ opentelemetry-bootstrap -a install
+
+COPY ${BUILD_CONTEXT_PATH}/entrypoint.sh /app/
+RUN chmod +x /app/entrypoint.sh
+
+CMD ["./entrypoint.sh"]
diff --git a/docker/dockerfiles/tool-sidecar.Dockerfile.dockerignore b/docker/dockerfiles/tool-sidecar.Dockerfile.dockerignore
new file mode 100644
index 00000000..51dc2dd3
--- /dev/null
+++ b/docker/dockerfiles/tool-sidecar.Dockerfile.dockerignore
@@ -0,0 +1,66 @@
+**/__pycache__
+**/.pytest_cache
+**/.python-version
+**/.pyc
+**/.pyo
+**/.venv
+**/.classpath
+**/.dockerignore
+**/.env
+**/.git
+**/.gitignore
+**/.gitkeep
+**/.project
+**/.settings
+**/.toolstarget
+**/.vs
+**/.vscode
+**/*.*proj.user
+**/*.dbmdl
+**/*.jfm
+**/bin
+**/charts
+**/docker-compose*
+**/compose*
+**/Dockerfile*
+**/build
+**/dist
+**/node_modules
+**/npm-debug.log
+**/obj
+**/secrets.dev.yaml
+**/values.dev.yaml
+**/.db
+**/.sqlite3
+**/.log
+**/*-log.txt
+**/*.drawio
+**/.tmp
+**/.swp
+**/.swo
+**/.bak
+*.idea
+*.vscode
+*.git
+**/.pdm.toml
+**/.pdm-build
+**/.pdm-python
+!LICENSE
+*.md
+!README.md
+.jshintrc
+.pre-commit-config.yaml
+**/tests
+test*.py
+**/*.egg-info
+
+backend
+frontend
+platform-service
+prompt-service
+tools
+unstract
+!unstract/core
+!unstract/flags
+!runner
+x2text-service
diff --git a/runner/sample.env b/runner/sample.env
index 67659df8..c1368039 100644
--- a/runner/sample.env
+++ b/runner/sample.env
@@ -15,7 +15,7 @@ LOG_LEVEL="INFO"
REMOVE_CONTAINER_ON_EXIT=True
# Client module path of the container engine to be used.
-CONTAINER_CLIENT_PATH=unstract.runner.clients.docker
+CONTAINER_CLIENT_PATH=unstract.runner.clients.docker_client
# Logs Expiry of 24 hours
LOGS_EXPIRATION_TIME_IN_SECOND=86400
@@ -45,3 +45,8 @@ FLASK_ENV=production
FLASK_RUN_HOST=0.0.0.0
FLASK_RUN_PORT=5002
FLASK_APP=unstract.runner:app
+
+# Tool Sidecar
+TOOL_SIDECAR_ENABLED=False
+TOOL_SIDECAR_IMAGE_NAME="unstract/tool-sidecar"
+TOOL_SIDECAR_IMAGE_TAG="0.0.1"
diff --git a/runner/src/unstract/runner/clients/docker.py b/runner/src/unstract/runner/clients/docker_client.py
similarity index 52%
rename from runner/src/unstract/runner/clients/docker.py
rename to runner/src/unstract/runner/clients/docker_client.py
index 8b505020..0f95a903 100644
--- a/runner/src/unstract/runner/clients/docker.py
+++ b/runner/src/unstract/runner/clients/docker_client.py
@@ -17,8 +17,9 @@ from unstract.core.utilities import UnstractUtils
class DockerContainer(ContainerInterface):
- def __init__(self, container: Container) -> None:
+ def __init__(self, container: Container, logger: logging.Logger) -> None:
self.container: Container = container
+ self.logger = logger
@property
def name(self):
@@ -28,21 +29,51 @@ class DockerContainer(ContainerInterface):
for line in self.container.logs(stream=True, follow=follow):
yield line.decode().strip()
- def cleanup(self) -> None:
+ def wait_until_stop(
+ self, main_container_status: Optional[dict[str, Any]] = None
+ ) -> dict:
+ """Wait until the container stops and return the status.
+
+ Returns:
+ dict: Container exit status containing 'StatusCode' and 'Error' if any
+ """
+ if main_container_status and main_container_status.get("StatusCode", 0) != 0:
+ self.logger.info(
+ f"Main container exited with status {main_container_status}, "
+ "stopping sidecar"
+ )
+ timeout = Utils.get_sidecar_wait_timeout()
+ return self.container.wait(timeout=timeout)
+
+ return self.container.wait()
+
+ def cleanup(self, client: Optional[ContainerClientInterface] = None) -> None:
if not self.container or not Utils.remove_container_on_exit():
return
try:
self.container.remove(force=True)
+ if client:
+ client.cleanup_volume()
except Exception as remove_error:
self.logger.error(f"Failed to remove docker container: {remove_error}")
class Client(ContainerClientInterface):
- def __init__(self, image_name: str, image_tag: str, logger: logging.Logger) -> None:
+ def __init__(
+ self,
+ image_name: str,
+ image_tag: str,
+ logger: logging.Logger,
+ sidecar_enabled: bool = False,
+ ) -> None:
self.image_name = image_name
+ self.sidecar_image_name = os.getenv(Env.TOOL_SIDECAR_IMAGE_NAME)
+ self.sidecar_image_tag = os.getenv(Env.TOOL_SIDECAR_IMAGE_TAG)
# If no image_tag is provided will assume the `latest` tag
self.image_tag = image_tag or "latest"
self.logger = logger
+ self.sidecar_enabled = sidecar_enabled
+ self.volume_name: Optional[str] = None
# Create a Docker client that communicates with
# the Docker daemon in the host environment
@@ -119,20 +150,34 @@ class Client(ContainerClientInterface):
self.logger.error(f"An API error occurred: {e}")
return False
- def get_image(self) -> str:
+ def cleanup_volume(self) -> None:
+ """Cleans up the shared volume after both containers are stopped."""
+ try:
+ if self.volume_name:
+ self.client.volumes.get(self.volume_name).remove(force=True)
+ self.logger.info(f"Removed log volume: {self.volume_name}")
+ except Exception as e:
+ self.logger.warning(f"Failed to remove log volume: {e}")
+
+ def get_image(self, sidecar: bool = False) -> str:
"""Will check if image exists locally and pulls the image using
`self.image_name` and `self.image_tag` if necessary.
Returns:
str: image string combining repo name and tag like `ubuntu:22.04`
"""
- image_name_with_tag = f"{self.image_name}:{self.image_tag}"
+ if sidecar:
+ image_name_with_tag = f"{self.sidecar_image_name}:{self.sidecar_image_tag}"
+ repository = self.sidecar_image_name
+ else:
+ image_name_with_tag = f"{self.image_name}:{self.image_tag}"
+ repository = self.image_name
if self.__image_exists(image_name_with_tag):
return image_name_with_tag
self.logger.info("Pulling the container: %s", image_name_with_tag)
resp = self.client.api.pull(
- repository=self.image_name,
+ repository=repository,
tag=self.image_tag,
stream=True,
decode=True,
@@ -159,13 +204,36 @@ class Client(ContainerClientInterface):
self,
command: list[str],
file_execution_id: str,
+ shared_log_dir: str,
container_name: Optional[str] = None,
envs: Optional[dict[str, Any]] = None,
auto_remove: bool = False,
+ sidecar: bool = False,
+ **kwargs,
) -> dict[str, Any]:
if envs is None:
envs = {}
- mounts = []
+
+ # Create shared volume for logs
+ volume_name = f"logs-{file_execution_id}"
+ self.volume_name = volume_name
+
+ # Ensure we're mounting to a directory
+ mount_target = shared_log_dir
+ if not mount_target.endswith("/"):
+ mount_target = os.path.dirname(mount_target)
+ if self.sidecar_enabled:
+ mounts = [
+ {
+ "Type": "volume",
+ "Source": volume_name,
+ "Target": mount_target,
+ }
+ ]
+ stream_logs = False
+ else:
+ mounts = []
+ stream_logs = True
if not container_name:
container_name = UnstractUtils.build_tool_container_name(
@@ -174,12 +242,15 @@ class Client(ContainerClientInterface):
file_execution_id=file_execution_id,
)
+ if sidecar:
+ container_name = f"{container_name}-sidecar"
+
return {
"name": container_name,
- "image": self.get_image(),
- "command": command,
+ "image": self.get_image(sidecar=sidecar),
+ "entrypoint": command,
"detach": True,
- "stream": True,
+ "stream": stream_logs,
"auto_remove": auto_remove,
"environment": envs,
"stderr": True,
@@ -188,6 +259,69 @@ class Client(ContainerClientInterface):
"mounts": mounts,
}
- def run_container(self, config: dict[Any, Any]) -> Any:
- self.logger.info(f"Docker config: {config}")
- return DockerContainer(self.client.containers.run(**config))
+ def run_container(self, container_config: dict[str, Any]) -> DockerContainer:
+ """Run a container with the given configuration.
+
+ Args:
+ container_config (dict[str, Any]): Container configuration
+
+ Returns:
+ DockerContainer: Running container instance
+ """
+ try:
+ self.logger.info("Running container with config: %s", container_config)
+ container = self.client.containers.run(**container_config)
+ return DockerContainer(
+ container=container,
+ logger=self.logger,
+ )
+ except ImageNotFound:
+ self.logger.error(f"Image {self.image_name}:{self.image_tag} not found")
+ raise
+
+ def run_container_with_sidecar(
+ self, container_config: dict[str, Any], sidecar_config: dict[str, Any]
+ ) -> tuple[DockerContainer, Optional[DockerContainer]]:
+ """Run a container with sidecar.
+
+ Args:
+ container_config (dict[str, Any]): Container configuration
+ sidecar_config (dict[str, Any]): Sidecar configuration
+
+ Returns:
+ tuple[DockerContainer, Optional[DockerContainer]]:
+ Running container and sidecar instance
+ """
+ try:
+ self.logger.info("Running container with config: %s", container_config)
+ container = self.client.containers.run(**container_config)
+ self.logger.info("Running sidecar with config: %s", sidecar_config)
+ sidecar = self.client.containers.run(**sidecar_config)
+ return DockerContainer(
+ container=container,
+ logger=self.logger,
+ ), DockerContainer(
+ container=sidecar,
+ logger=self.logger,
+ )
+ except ImageNotFound:
+ self.logger.error(f"Image {self.image_name}:{self.image_tag} not found")
+ raise
+
+ def wait_for_container_stop(
+ self,
+ container: Optional[DockerContainer],
+ main_container_status: Optional[dict[str, Any]] = None,
+ ) -> Optional[dict]:
+ """Wait for the container to stop and return its exit status.
+
+ Args:
+ container (DockerContainer): Container to wait for
+
+ Returns:
+ Optional[dict]:
+ Container exit status containing 'StatusCode' and 'Error' if any
+ """
+ if not container:
+ return None
+ return container.wait_until_stop(main_container_status)
diff --git a/runner/src/unstract/runner/clients/helper.py b/runner/src/unstract/runner/clients/helper.py
index ec399413..b2bed8a0 100644
--- a/runner/src/unstract/runner/clients/helper.py
+++ b/runner/src/unstract/runner/clients/helper.py
@@ -11,7 +11,7 @@ class ContainerClientHelper:
@staticmethod
def get_container_client() -> ContainerClientInterface:
client_path = os.getenv(
- "CONTAINER_CLIENT_PATH", "unstract.runner.clients.docker"
+ "CONTAINER_CLIENT_PATH", "unstract.runner.clients.docker_client"
)
logger.info("Loading the container client from path:", client_path)
return import_module(client_path).Client
diff --git a/runner/src/unstract/runner/clients/interface.py b/runner/src/unstract/runner/clients/interface.py
index 2ad65fb4..ea0f576b 100644
--- a/runner/src/unstract/runner/clients/interface.py
+++ b/runner/src/unstract/runner/clients/interface.py
@@ -24,7 +24,7 @@ class ContainerInterface(ABC):
pass
@abstractmethod
- def cleanup(self) -> None:
+ def cleanup(self, client: Optional["ContainerClientInterface"] = None) -> None:
"""Stops and removes the running container."""
pass
@@ -32,7 +32,13 @@ class ContainerInterface(ABC):
class ContainerClientInterface(ABC):
@abstractmethod
- def __init__(self, image_name: str, image_tag: str, logger: logging.Logger) -> None:
+ def __init__(
+ self,
+ image_name: str,
+ image_tag: str,
+ logger: logging.Logger,
+ sidecar_enabled: bool = False,
+ ) -> None:
pass
@abstractmethod
@@ -48,9 +54,26 @@ class ContainerClientInterface(ABC):
"""
pass
+ @abstractmethod
+ def run_container_with_sidecar(
+ self, container_config: dict[Any, Any], sidecar_config: dict[Any, Any]
+ ) -> tuple[ContainerInterface, Optional[ContainerInterface]]:
+ """Method to run a container with provided config. This method will run
+ the container.
+
+ Args:
+ container_config (dict[Any, Any]): Configuration for container.
+ sidecar_config (dict[Any, Any]): Configuration for sidecar.
+
+ Returns:
+ tuple[ContainerInterface, Optional[ContainerInterface]]:
+ Returns a Container instance.
+ """
+ pass
+
@abstractmethod
def get_image(self) -> str:
- """Consturct image name with tag and repo name. Pulls the image if
+ """Construct image name with tag and repo name. Pulls the image if
needed.
Returns:
@@ -58,14 +81,34 @@ class ContainerClientInterface(ABC):
"""
pass
+ @abstractmethod
+ def wait_for_container_stop(
+ self,
+ container: Optional[ContainerInterface],
+ main_container_status: Optional[dict] = None,
+ ) -> Optional[dict]:
+ """Wait for the container to stop and return the exit code.
+
+ Args:
+ container (Optional[ContainerInterface]): The container to wait for.
+ main_container_status (Optional[dict]): The status of the main container.
+
+ Returns:
+ str: The exit code of the container.
+ """
+ pass
+
@abstractmethod
def get_container_run_config(
self,
command: list[str],
file_execution_id: str,
+ shared_log_dir: str,
container_name: Optional[str] = None,
envs: Optional[dict[str, Any]] = None,
auto_remove: bool = False,
+ sidecar: bool = False,
+ **kwargs,
) -> dict[str, Any]:
"""Generate the configuration dictionary to run the container.
@@ -73,3 +116,8 @@ class ContainerClientInterface(ABC):
dict[str, Any]: Configuration for running the container.
"""
pass
+
+ @abstractmethod
+ def cleanup_volume(self) -> None:
+ """Cleans up the shared volume"""
+ pass
diff --git a/runner/src/unstract/runner/clients/test_docker.py b/runner/src/unstract/runner/clients/test_docker.py
index c275e15e..0d789fb7 100644
--- a/runner/src/unstract/runner/clients/test_docker.py
+++ b/runner/src/unstract/runner/clients/test_docker.py
@@ -6,15 +6,16 @@ import pytest
from docker.errors import ImageNotFound
from unstract.runner.constants import Env
-from .docker import Client, DockerContainer
+from .docker_client import Client, DockerContainer
-DOCKER_MODULE = "unstract.runner.clients.docker"
+DOCKER_MODULE = "unstract.runner.clients.docker_client"
@pytest.fixture
def docker_container():
container = MagicMock()
- return DockerContainer(container)
+ logger = logging.getLogger("test-logger")
+ return DockerContainer(container, logger)
@pytest.fixture
@@ -22,7 +23,15 @@ def docker_client():
image_name = "test-image"
image_tag = "latest"
logger = logging.getLogger("test-logger")
- return Client(image_name, image_tag, logger)
+ return Client(image_name, image_tag, logger, sidecar_enabled=False)
+
+
+@pytest.fixture
+def docker_client_with_sidecar():
+ image_name = "test-image"
+ image_tag = "latest"
+ logger = logging.getLogger("test-logger")
+ return Client(image_name, image_tag, logger, sidecar_enabled=True)
def test_logs(docker_container, mocker):
@@ -110,6 +119,7 @@ def test_get_container_run_config(docker_client, mocker):
"""Test the get_container_run_config method."""
command = ["echo", "hello"]
file_execution_id = "run123"
+ shared_log_dir = "/shared/logs"
mocker.patch.object(docker_client, "_Client__image_exists", return_value=True)
mocker_normalize = mocker.patch(
@@ -117,7 +127,11 @@ def test_get_container_run_config(docker_client, mocker):
return_value="test-image",
)
config = docker_client.get_container_run_config(
- command, file_execution_id, envs={"KEY": "VALUE"}, auto_remove=True
+ command,
+ file_execution_id,
+ shared_log_dir,
+ envs={"KEY": "VALUE"},
+ auto_remove=True,
)
mocker_normalize.assert_called_once_with(
@@ -127,7 +141,7 @@ def test_get_container_run_config(docker_client, mocker):
)
assert config["name"] == "test-image"
assert config["image"] == "test-image:latest"
- assert config["command"] == ["echo", "hello"]
+ assert config["entrypoint"] == ["echo", "hello"]
assert config["environment"] == {"KEY": "VALUE"}
assert config["mounts"] == []
@@ -137,6 +151,7 @@ def test_get_container_run_config_without_mount(docker_client, mocker):
os.environ[Env.EXECUTION_DATA_DIR] = "/source"
command = ["echo", "hello"]
file_execution_id = "run123"
+ shared_log_dir = "/shared/logs"
mocker.patch.object(docker_client, "_Client__image_exists", return_value=True)
mocker_normalize = mocker.patch(
@@ -144,7 +159,7 @@ def test_get_container_run_config_without_mount(docker_client, mocker):
return_value="test-image",
)
config = docker_client.get_container_run_config(
- command, file_execution_id, auto_remove=True
+ command, file_execution_id, shared_log_dir, auto_remove=True
)
mocker_normalize.assert_called_once_with(
@@ -154,7 +169,7 @@ def test_get_container_run_config_without_mount(docker_client, mocker):
)
assert config["name"] == "test-image"
assert config["image"] == "test-image:latest"
- assert config["command"] == ["echo", "hello"]
+ assert config["entrypoint"] == ["echo", "hello"]
assert config["environment"] == {}
assert config["mounts"] == []
@@ -167,7 +182,7 @@ def test_run_container(docker_client, mocker):
config = {
"name": "test-image",
"image": "test-image:latest",
- "command": ["echo", "hello"],
+ "entrypoint": ["echo", "hello"],
"detach": True,
"stream": True,
"auto_remove": True,
@@ -182,5 +197,96 @@ def test_run_container(docker_client, mocker):
mock_client.containers.run.assert_called_once_with(**config)
+def test_get_image_for_sidecar(docker_client_with_sidecar, mocker):
+ """Test the get_image method."""
+ # Mock environment variables
+ mocker.patch.dict(
+ os.environ,
+ {
+ Env.TOOL_SIDECAR_IMAGE_NAME: "test-sidecar-image",
+ Env.TOOL_SIDECAR_IMAGE_TAG: "latest",
+ },
+ )
+
+ # Re-initialize client to pick up mocked env vars
+ docker_client_with_sidecar.sidecar_image_name = os.getenv(
+ Env.TOOL_SIDECAR_IMAGE_NAME
+ )
+ docker_client_with_sidecar.sidecar_image_tag = os.getenv(Env.TOOL_SIDECAR_IMAGE_TAG)
+
+ # Patch the client object to control its behavior
+ mock_client = mocker.patch.object(docker_client_with_sidecar, "client")
+ # Patch the images attribute of the client to control its behavior
+ mock_images = mocker.MagicMock()
+ mock_client.images = mock_images
+
+ # Case 1: Image exists
+ mock_images.get.side_effect = MagicMock()
+ assert (
+ docker_client_with_sidecar.get_image(sidecar=True)
+ == "test-sidecar-image:latest"
+ )
+ mock_images.get.assert_called_once_with("test-sidecar-image:latest")
+
+ # Case 2: Image does not exist
+ mock_images.get.side_effect = ImageNotFound("Image not found")
+ mock_pull = mocker.patch.object(docker_client_with_sidecar.client.api, "pull")
+ mock_pull.return_value = iter([{"status": "pulling"}])
+ assert (
+ docker_client_with_sidecar.get_image(sidecar=True)
+ == "test-sidecar-image:latest"
+ )
+ mock_pull.assert_called_once_with(
+ repository="test-sidecar-image",
+ tag="latest",
+ stream=True,
+ decode=True,
+ )
+
+
+def test_sidecar_container(docker_client_with_sidecar, mocker):
+ """Test the sidecar_container method."""
+ # Patch the client object to control its behavior
+ mock_client = mocker.patch.object(docker_client_with_sidecar, "client")
+
+ config = {
+ "name": "test-image",
+ "image": "test-image:latest",
+ "entrypoint": ["echo", "hello"],
+ "detach": True,
+ "stream": False,
+ "auto_remove": True,
+ "environment": {"KEY": "VALUE"},
+ "stderr": True,
+ "stdout": True,
+ "network": "",
+ "mounts": [
+ {
+ "Type": "volume",
+ "Source": "logs-test-id",
+ "Target": "/shared",
+ }
+ ],
+ }
+
+ shared_log_dir = "/shared/logs"
+ test_config = docker_client_with_sidecar.get_container_run_config(
+ command=["echo", "hello"],
+ file_execution_id="test-id",
+ shared_log_dir=shared_log_dir,
+ envs={"KEY": "VALUE"},
+ auto_remove=True,
+ sidecar=True,
+ )
+
+ # Test the actual configuration generated
+ assert test_config["stream"] is False
+ assert test_config["mounts"] == config["mounts"]
+ assert isinstance(
+ docker_client_with_sidecar.run_container(test_config), DockerContainer
+ )
+ mock_client.containers.run.assert_called_once_with(**test_config)
+
+
if __name__ == "__main__":
pytest.main()
diff --git a/runner/src/unstract/runner/constants.py b/runner/src/unstract/runner/constants.py
index d2533962..9aeb1baf 100644
--- a/runner/src/unstract/runner/constants.py
+++ b/runner/src/unstract/runner/constants.py
@@ -31,3 +31,12 @@ class Env:
)
EXECUTION_DATA_DIR = "EXECUTION_DATA_DIR"
FLIPT_SERVICE_AVAILABLE = "FLIPT_SERVICE_AVAILABLE"
+ TOOL_SIDECAR_ENABLED = "TOOL_SIDECAR_ENABLED"
+ TOOL_SIDECAR_IMAGE_NAME = "TOOL_SIDECAR_IMAGE_NAME"
+ TOOL_SIDECAR_CONTAINER_WAIT_TIMEOUT = "TOOL_SIDECAR_CONTAINER_WAIT_TIMEOUT"
+ TOOL_SIDECAR_IMAGE_TAG = "TOOL_SIDECAR_IMAGE_TAG"
+ REDIS_HOST = "REDIS_HOST"
+ REDIS_PORT = "REDIS_PORT"
+ REDIS_USER = "REDIS_USER"
+ REDIS_PASSWORD = "REDIS_PASSWORD"
+ CELERY_BROKER_URL = "CELERY_BROKER_URL"
diff --git a/runner/src/unstract/runner/runner.py b/runner/src/unstract/runner/runner.py
index 98c339cf..f59f7bd0 100644
--- a/runner/src/unstract/runner/runner.py
+++ b/runner/src/unstract/runner/runner.py
@@ -13,12 +13,13 @@ from unstract.runner.clients.interface import (
)
from unstract.runner.constants import Env, LogLevel, LogType, ToolKey
from unstract.runner.exception import ToolRunException
+from unstract.runner.utils import Utils
from unstract.core.constants import LogFieldName
from unstract.core.pubsub_helper import LogPublisher
load_dotenv()
-# Loads the container clinet class.
+# Loads the container client class.
client_class = ContainerClientHelper.get_container_client()
@@ -28,8 +29,9 @@ class UnstractRunner:
# If no image_tag is provided will assume the `latest` tag
self.image_tag = image_tag or "latest"
self.logger = app.logger
+ self.sidecar_enabled = Utils.is_sidecar_enabled()
self.client: ContainerClientInterface = client_class(
- self.image_name, self.image_tag, self.logger
+ self.image_name, self.image_tag, self.logger, self.sidecar_enabled
)
# Function to stream logs
@@ -191,6 +193,45 @@ class UnstractRunner:
return additional_envs
+ def _get_sidecar_container_config(
+ self,
+ container_name: str,
+ shared_log_dir: str,
+ shared_log_file: str,
+ file_execution_id: str,
+ execution_id: str,
+ organization_id: str,
+ messaging_channel: str,
+ tool_instance_id: str,
+ ) -> dict[str, Any]:
+ """
+ Returns the container configuration for the sidecar container.
+ """
+
+ sidecar_env = {
+ "LOG_PATH": shared_log_file,
+ "REDIS_HOST": os.getenv(Env.REDIS_HOST),
+ "REDIS_PORT": os.getenv(Env.REDIS_PORT),
+ "REDIS_USER": os.getenv(Env.REDIS_USER),
+ "REDIS_PASSWORD": os.getenv(Env.REDIS_PASSWORD),
+ "TOOL_INSTANCE_ID": tool_instance_id,
+ "EXECUTION_ID": execution_id,
+ "ORGANIZATION_ID": organization_id,
+ "FILE_EXECUTION_ID": file_execution_id,
+ "MESSAGING_CHANNEL": messaging_channel,
+ "LOG_LEVEL": os.getenv(Env.LOG_LEVEL, "INFO"),
+ "CELERY_BROKER_URL": os.getenv(Env.CELERY_BROKER_URL),
+ }
+ sidecar_config = self.client.get_container_run_config(
+ command=[],
+ file_execution_id=file_execution_id,
+ shared_log_dir=shared_log_dir,
+ container_name=container_name,
+ envs=sidecar_env,
+ sidecar=True,
+ )
+ return sidecar_config
+
def run_command(self, command: str) -> Optional[Any]:
"""Runs any given command on the container.
@@ -228,6 +269,38 @@ class UnstractRunner:
container.cleanup()
return None
+ def _get_container_command(
+ self, shared_log_dir: str, shared_log_file: str, settings: dict[str, Any]
+ ):
+ """Returns the container command to run the tool."""
+
+ settings_json = json.dumps(settings).replace("'", "\\'")
+ # Prepare the tool execution command
+ tool_cmd = (
+ f"opentelemetry-instrument python main.py --command RUN "
+ f"--settings '{settings_json}' --log-level DEBUG"
+ )
+
+ if not self.sidecar_enabled:
+ return tool_cmd
+
+ # Shell script components
+ mkdir_cmd = f"mkdir -p {shared_log_dir}"
+ run_tool_fn = (
+ "run_tool() { "
+ f"{tool_cmd}; "
+ "exit_code=$?; "
+ f'echo "{LogFieldName.TOOL_TERMINATION_MARKER} with exit code $exit_code" '
+ f">> {shared_log_file}; "
+ "return $exit_code; "
+ "}"
+ )
+ execute_cmd = f"run_tool 2>&1 | tee -a {shared_log_file}"
+
+ # Combine all commands
+ shell_script = f"{mkdir_cmd} && {run_tool_fn}; {execute_cmd}"
+ return shell_script
+
def run_container(
self,
organization_id: str,
@@ -264,20 +337,41 @@ class UnstractRunner:
# Get additional environment variables to pass to the container
additional_env = self._parse_additional_envs()
+ tool_instance_id = str(settings.get(ToolKey.TOOL_INSTANCE_ID))
+ shared_log_dir = "/shared/logs" # Mount directory, not file
+ shared_log_file = os.path.join(shared_log_dir, "logs.txt")
+ container_command = self._get_container_command(
+ shared_log_dir=shared_log_dir,
+ shared_log_file=shared_log_file,
+ settings=settings,
+ )
container_config = self.client.get_container_run_config(
- command=[
- "--command",
- "RUN",
- "--settings",
- json.dumps(settings),
- "--log-level",
- "DEBUG",
- ],
+ command=["/bin/sh", "-c", container_command],
file_execution_id=file_execution_id,
+ shared_log_dir=shared_log_dir, # Pass directory for mounting
container_name=container_name,
envs={**envs, **additional_env},
+ organization_id=organization_id,
+ workflow_id=workflow_id,
+ execution_id=execution_id,
+ messaging_channel=messaging_channel,
+ tool_instance_id=tool_instance_id,
)
+
+ sidecar_config: Optional[dict[str, Any]] = None
+ if self.sidecar_enabled:
+ sidecar_config = self._get_sidecar_container_config(
+ container_name=container_name,
+ shared_log_dir=shared_log_dir,
+ shared_log_file=shared_log_file,
+ file_execution_id=file_execution_id,
+ execution_id=execution_id,
+ organization_id=organization_id,
+ messaging_channel=messaging_channel,
+ tool_instance_id=tool_instance_id,
+ )
+
# Add labels to container for logging with Loki.
# This only required for observability.
try:
@@ -288,27 +382,54 @@ class UnstractRunner:
# Run the Docker container
container = None
+ sidecar = None
result = {"type": "RESULT", "result": None}
try:
self.logger.info(
f"Execution ID: {execution_id}, running docker "
f"container: {container_name}"
)
- container: ContainerInterface = self.client.run_container(container_config)
- tool_instance_id = str(settings.get(ToolKey.TOOL_INSTANCE_ID))
- # Stream logs
- self.stream_logs(
- container=container,
- tool_instance_id=tool_instance_id,
- channel=messaging_channel,
- execution_id=execution_id,
- organization_id=organization_id,
- file_execution_id=file_execution_id,
- )
- self.logger.info(
- f"Execution ID: {execution_id}, docker "
- f"container: {container_name} ran successfully"
- )
+ if sidecar_config:
+ containers: tuple[ContainerInterface, Optional[ContainerInterface]] = (
+ self.client.run_container_with_sidecar(
+ container_config, sidecar_config
+ )
+ )
+ container, sidecar = containers
+ status = self.client.wait_for_container_stop(container)
+ self.logger.info(
+ f"Execution ID: {execution_id}, docker "
+ f"container: {container_name} ran with status: {status}"
+ )
+ self.client.wait_for_container_stop(
+ sidecar, main_container_status=status
+ )
+ self.logger.info(
+ f"Execution ID: {execution_id}, docker "
+ f"container: {container_name} completed execution"
+ )
+ else:
+ container: ContainerInterface = self.client.run_container(
+ container_config
+ )
+ self.logger.info(
+ f"Execution ID: {execution_id}, docker "
+ f"container: {container_name} streaming logs..."
+ )
+ # Stream logs
+ self.stream_logs(
+ container=container,
+ tool_instance_id=tool_instance_id,
+ channel=messaging_channel,
+ execution_id=execution_id,
+ organization_id=organization_id,
+ file_execution_id=file_execution_id,
+ )
+ self.logger.info(
+ f"Execution ID: {execution_id}, docker "
+ f"container: {container_name} ran successfully"
+ )
+
except ToolRunException as te:
self.logger.error(
"Error while running docker container"
@@ -323,5 +444,7 @@ class UnstractRunner:
)
result = {"type": "RESULT", "result": None, "error": str(e)}
if container:
- container.cleanup()
+ container.cleanup(client=self.client)
+ if sidecar:
+ sidecar.cleanup(client=self.client)
return result
diff --git a/runner/src/unstract/runner/utils.py b/runner/src/unstract/runner/utils.py
index dffccef2..9a03f5ad 100644
--- a/runner/src/unstract/runner/utils.py
+++ b/runner/src/unstract/runner/utils.py
@@ -1,4 +1,6 @@
+import logging
import os
+from typing import Optional
from dotenv import load_dotenv
from unstract.runner.constants import Env
@@ -7,6 +9,9 @@ from unstract.runner.enum import LogLevel
load_dotenv()
+logger = logging.getLogger(__name__)
+
+
class Utils:
@staticmethod
def str_to_bool(string: str) -> bool:
@@ -25,6 +30,27 @@ class Utils:
else:
raise ValueError("Invalid boolean string")
+ @staticmethod
+ def str_to_int(value: Optional[str], default: int) -> int:
+ """
+ Safely convert a string to an integer, returning a default if conversion fails.
+
+ Args:
+ value (Optional[str]): The string to convert.
+ default (int): The fallback value if conversion fails.
+
+ Returns:
+ int: Parsed integer or the default value.
+ """
+ if not value:
+ return default
+
+ try:
+ return int(value)
+ except ValueError:
+ logger.warning(f"Invalid integer value '{value}'; using default: {default}")
+ return default
+
@staticmethod
def get_log_level() -> LogLevel:
"""Get log level from environment variable.
@@ -46,3 +72,36 @@ class Utils:
bool
"""
return Utils.str_to_bool(os.getenv(Env.REMOVE_CONTAINER_ON_EXIT, "true"))
+
+ @staticmethod
+ def is_sidecar_enabled() -> bool:
+ """Get sidecar enabled from environment variable.
+
+ Returns:
+ bool
+ """
+ if not Utils.str_to_bool(os.getenv(Env.TOOL_SIDECAR_ENABLED, "false")):
+ return False
+
+ image_name = os.getenv(Env.TOOL_SIDECAR_IMAGE_NAME)
+ image_tag = os.getenv(Env.TOOL_SIDECAR_IMAGE_TAG)
+
+ if not image_name or not image_tag:
+ logger.warning(
+ "Sidecar is enabled but configuration is incomplete: "
+ f"image_name={'missing' if not image_name else 'set'}, "
+ f"image_tag={'missing' if not image_tag else 'set'}"
+ )
+ return False
+ return True
+
+ @staticmethod
+ def get_sidecar_wait_timeout() -> int:
+ """
+ Retrieve the timeout value from environment variables.
+
+ Returns:
+ int: Timeout in seconds, defaulting to 5 if not set or invalid.
+ """
+ raw_timeout = os.getenv(Env.TOOL_SIDECAR_CONTAINER_WAIT_TIMEOUT)
+ return Utils.str_to_int(raw_timeout, default=5)
diff --git a/tool-sidecar/README.md b/tool-sidecar/README.md
new file mode 100644
index 00000000..b4492ce9
--- /dev/null
+++ b/tool-sidecar/README.md
@@ -0,0 +1,13 @@
+# Unstract Tool Sidecar
+
+A companion container that runs alongside main tool containers in Unstract for log processing, monitoring, and real-time streaming.
+
+## Key Features
+- Log processing and real-time streaming to Redis
+- Monitors tool container output and completion signals
+- Runs in same pod as tool container (For K8s)
+- Handles organization and execution-specific logging
+- Part of containerized tool execution infrastructure
+
+## Architecture
+The sidecar container operates as part of Unstract's containerized infrastructure, working in tandem with the main tool containers to provide real-time monitoring and log management capabilities. It uses Redis for efficient log publishing and streaming.
diff --git a/tool-sidecar/entrypoint.sh b/tool-sidecar/entrypoint.sh
new file mode 100755
index 00000000..558b4d2a
--- /dev/null
+++ b/tool-sidecar/entrypoint.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+# Activate virtual environment
+. /app/.venv/bin/activate
+
+# Run the application with OpenTelemetry instrumentation
+exec opentelemetry-instrument python -m unstract.tool_sidecar.log_processor
diff --git a/tool-sidecar/pdm.lock b/tool-sidecar/pdm.lock
new file mode 100644
index 00000000..87ca33f9
--- /dev/null
+++ b/tool-sidecar/pdm.lock
@@ -0,0 +1,533 @@
+# This file is @generated by PDM.
+# It is not intended for manual editing.
+
+[metadata]
+groups = ["default", "deploy"]
+strategy = ["cross_platform", "inherit_metadata"]
+lock_version = "4.4.2"
+content_hash = "sha256:14a3589a7ff7190ce3cce5293d91c02603d9514f205f5b9ad04a5c23ff1411cf"
+
+[[package]]
+name = "async-timeout"
+version = "5.0.1"
+requires_python = ">=3.8"
+summary = "Timeout context manager for asyncio programs"
+groups = ["default"]
+marker = "python_full_version < \"3.11.3\""
+files = [
+ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},
+ {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
+]
+
+[[package]]
+name = "certifi"
+version = "2025.1.31"
+requires_python = ">=3.6"
+summary = "Python package for providing Mozilla's CA Bundle."
+groups = ["deploy"]
+files = [
+ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
+ {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.1"
+requires_python = ">=3.7"
+summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+groups = ["deploy"]
+files = [
+ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"},
+ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"},
+ {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"},
+]
+
+[[package]]
+name = "deprecated"
+version = "1.2.18"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
+summary = "Python @deprecated decorator to deprecate old python classes, functions or methods."
+groups = ["deploy"]
+dependencies = [
+ "wrapt<2,>=1.10",
+]
+files = [
+ {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"},
+ {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"},
+]
+
+[[package]]
+name = "googleapis-common-protos"
+version = "1.69.2"
+requires_python = ">=3.7"
+summary = "Common protobufs used in Google APIs"
+groups = ["deploy"]
+dependencies = [
+ "protobuf!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0,>=3.20.2",
+]
+files = [
+ {file = "googleapis_common_protos-1.69.2-py3-none-any.whl", hash = "sha256:0b30452ff9c7a27d80bfc5718954063e8ab53dd3697093d3bc99581f5fd24212"},
+ {file = "googleapis_common_protos-1.69.2.tar.gz", hash = "sha256:3e1b904a27a33c821b4b749fd31d334c0c9c30e6113023d495e48979a3dc9c5f"},
+]
+
+[[package]]
+name = "grpcio"
+version = "1.71.0"
+requires_python = ">=3.9"
+summary = "HTTP/2-based RPC framework"
+groups = ["deploy"]
+marker = "python_version < \"3.13\""
+files = [
+ {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"},
+ {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"},
+ {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"},
+ {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"},
+ {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"},
+ {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"},
+ {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"},
+ {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"},
+ {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"},
+ {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"},
+ {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"},
+ {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"},
+ {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"},
+ {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"},
+ {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"},
+ {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"},
+ {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"},
+ {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"},
+ {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"},
+ {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"},
+ {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"},
+ {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"},
+ {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"},
+ {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"},
+ {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"},
+ {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"},
+ {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"},
+ {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"},
+ {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"},
+ {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"},
+ {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"},
+ {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"},
+ {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"},
+ {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"},
+ {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"},
+ {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"},
+ {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"},
+ {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"},
+ {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"},
+ {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"},
+ {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"},
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+requires_python = ">=3.6"
+summary = "Internationalized Domain Names in Applications (IDNA)"
+groups = ["deploy"]
+files = [
+ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
+ {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "8.6.1"
+requires_python = ">=3.9"
+summary = "Read metadata from Python packages"
+groups = ["deploy"]
+dependencies = [
+ "zipp>=3.20",
+]
+files = [
+ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"},
+ {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"},
+]
+
+[[package]]
+name = "opentelemetry-api"
+version = "1.31.1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Python API"
+groups = ["deploy"]
+dependencies = [
+ "deprecated>=1.2.6",
+ "importlib-metadata<8.7.0,>=6.0",
+]
+files = [
+ {file = "opentelemetry_api-1.31.1-py3-none-any.whl", hash = "sha256:1511a3f470c9c8a32eeea68d4ea37835880c0eed09dd1a0187acc8b1301da0a1"},
+ {file = "opentelemetry_api-1.31.1.tar.gz", hash = "sha256:137ad4b64215f02b3000a0292e077641c8611aab636414632a9b9068593b7e91"},
+]
+
+[[package]]
+name = "opentelemetry-distro"
+version = "0.52b1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Python Distro"
+groups = ["deploy"]
+dependencies = [
+ "opentelemetry-api~=1.12",
+ "opentelemetry-instrumentation==0.52b1",
+ "opentelemetry-sdk~=1.13",
+]
+files = [
+ {file = "opentelemetry_distro-0.52b1-py3-none-any.whl", hash = "sha256:5562a039e4c36524d0dbb45a0857f8acfda3afbef7e8462513c7946309eb5c8c"},
+ {file = "opentelemetry_distro-0.52b1.tar.gz", hash = "sha256:cb8df34a95034c7d038fd245556fb732853dc66473746d652bee6c5c2fb7dfc6"},
+]
+
+[[package]]
+name = "opentelemetry-exporter-otlp"
+version = "1.31.1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Collector Exporters"
+groups = ["deploy"]
+dependencies = [
+ "opentelemetry-exporter-otlp-proto-grpc==1.31.1",
+ "opentelemetry-exporter-otlp-proto-http==1.31.1",
+]
+files = [
+ {file = "opentelemetry_exporter_otlp-1.31.1-py3-none-any.whl", hash = "sha256:36286c28709cbfba5177129ec30bfe4de67bdec8f375c1703014e0eea44322c6"},
+ {file = "opentelemetry_exporter_otlp-1.31.1.tar.gz", hash = "sha256:004db12bfafb9e07b79936783d91db214b1e208a152b5c36b1f2ef2264940692"},
+]
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-common"
+version = "1.31.1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Protobuf encoding"
+groups = ["deploy"]
+dependencies = [
+ "opentelemetry-proto==1.31.1",
+]
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_common-1.31.1-py3-none-any.whl", hash = "sha256:7cadf89dbab12e217a33c5d757e67c76dd20ce173f8203e7370c4996f2e9efd8"},
+ {file = "opentelemetry_exporter_otlp_proto_common-1.31.1.tar.gz", hash = "sha256:c748e224c01f13073a2205397ba0e415dcd3be9a0f95101ba4aace5fc730e0da"},
+]
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-grpc"
+version = "1.31.1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Collector Protobuf over gRPC Exporter"
+groups = ["deploy"]
+dependencies = [
+ "deprecated>=1.2.6",
+ "googleapis-common-protos~=1.52",
+ "grpcio<2.0.0,>=1.63.2; python_version < \"3.13\"",
+ "opentelemetry-api~=1.15",
+ "opentelemetry-exporter-otlp-proto-common==1.31.1",
+ "opentelemetry-proto==1.31.1",
+ "opentelemetry-sdk~=1.31.1",
+]
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.31.1-py3-none-any.whl", hash = "sha256:f4055ad2c9a2ea3ae00cbb927d6253233478b3b87888e197d34d095a62305fae"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.31.1.tar.gz", hash = "sha256:c7f66b4b333c52248dc89a6583506222c896c74824d5d2060b818ae55510939a"},
+]
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-http"
+version = "1.31.1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Collector Protobuf over HTTP Exporter"
+groups = ["deploy"]
+dependencies = [
+ "deprecated>=1.2.6",
+ "googleapis-common-protos~=1.52",
+ "opentelemetry-api~=1.15",
+ "opentelemetry-exporter-otlp-proto-common==1.31.1",
+ "opentelemetry-proto==1.31.1",
+ "opentelemetry-sdk~=1.31.1",
+ "requests~=2.7",
+]
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_http-1.31.1-py3-none-any.whl", hash = "sha256:5dee1f051f096b13d99706a050c39b08e3f395905f29088bfe59e54218bd1cf4"},
+ {file = "opentelemetry_exporter_otlp_proto_http-1.31.1.tar.gz", hash = "sha256:723bd90eb12cfb9ae24598641cb0c92ca5ba9f1762103902f6ffee3341ba048e"},
+]
+
+[[package]]
+name = "opentelemetry-instrumentation"
+version = "0.52b1"
+requires_python = ">=3.8"
+summary = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
+groups = ["deploy"]
+dependencies = [
+ "opentelemetry-api~=1.4",
+ "opentelemetry-semantic-conventions==0.52b1",
+ "packaging>=18.0",
+ "wrapt<2.0.0,>=1.0.0",
+]
+files = [
+ {file = "opentelemetry_instrumentation-0.52b1-py3-none-any.whl", hash = "sha256:8c0059c4379d77bbd8015c8d8476020efe873c123047ec069bb335e4b8717477"},
+ {file = "opentelemetry_instrumentation-0.52b1.tar.gz", hash = "sha256:739f3bfadbbeec04dd59297479e15660a53df93c131d907bb61052e3d3c1406f"},
+]
+
+[[package]]
+name = "opentelemetry-proto"
+version = "1.31.1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Python Proto"
+groups = ["deploy"]
+dependencies = [
+ "protobuf<6.0,>=5.0",
+]
+files = [
+ {file = "opentelemetry_proto-1.31.1-py3-none-any.whl", hash = "sha256:1398ffc6d850c2f1549ce355744e574c8cd7c1dba3eea900d630d52c41d07178"},
+ {file = "opentelemetry_proto-1.31.1.tar.gz", hash = "sha256:d93e9c2b444e63d1064fb50ae035bcb09e5822274f1683886970d2734208e790"},
+]
+
+[[package]]
+name = "opentelemetry-sdk"
+version = "1.31.1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Python SDK"
+groups = ["deploy"]
+dependencies = [
+ "opentelemetry-api==1.31.1",
+ "opentelemetry-semantic-conventions==0.52b1",
+ "typing-extensions>=3.7.4",
+]
+files = [
+ {file = "opentelemetry_sdk-1.31.1-py3-none-any.whl", hash = "sha256:882d021321f223e37afaca7b4e06c1d8bbc013f9e17ff48a7aa017460a8e7dae"},
+ {file = "opentelemetry_sdk-1.31.1.tar.gz", hash = "sha256:c95f61e74b60769f8ff01ec6ffd3d29684743404603df34b20aa16a49dc8d903"},
+]
+
+[[package]]
+name = "opentelemetry-semantic-conventions"
+version = "0.52b1"
+requires_python = ">=3.8"
+summary = "OpenTelemetry Semantic Conventions"
+groups = ["deploy"]
+dependencies = [
+ "deprecated>=1.2.6",
+ "opentelemetry-api==1.31.1",
+]
+files = [
+ {file = "opentelemetry_semantic_conventions-0.52b1-py3-none-any.whl", hash = "sha256:72b42db327e29ca8bb1b91e8082514ddf3bbf33f32ec088feb09526ade4bc77e"},
+ {file = "opentelemetry_semantic_conventions-0.52b1.tar.gz", hash = "sha256:7b3d226ecf7523c27499758a58b542b48a0ac8d12be03c0488ff8ec60c5bae5d"},
+]
+
+[[package]]
+name = "packaging"
+version = "24.2"
+requires_python = ">=3.8"
+summary = "Core utilities for Python packages"
+groups = ["deploy"]
+files = [
+ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
+ {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
+]
+
+[[package]]
+name = "protobuf"
+version = "5.29.4"
+requires_python = ">=3.8"
+summary = ""
+groups = ["deploy"]
+files = [
+ {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"},
+ {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"},
+ {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"},
+ {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"},
+ {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"},
+ {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"},
+ {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"},
+ {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"},
+ {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"},
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.1.0"
+requires_python = ">=3.9"
+summary = "Read key-value pairs from a .env file and set them as environment variables"
+groups = ["default"]
+files = [
+ {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"},
+ {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"},
+]
+
+[[package]]
+name = "python-json-logger"
+version = "3.3.0"
+requires_python = ">=3.8"
+summary = "JSON Log Formatter for the Python Logging Package"
+groups = ["default"]
+dependencies = [
+ "typing-extensions; python_version < \"3.10\"",
+]
+files = [
+ {file = "python_json_logger-3.3.0-py3-none-any.whl", hash = "sha256:dd980fae8cffb24c13caf6e158d3d61c0d6d22342f932cb6e9deedab3d35eec7"},
+ {file = "python_json_logger-3.3.0.tar.gz", hash = "sha256:12b7e74b17775e7d565129296105bbe3910842d9d0eb083fc83a6a617aa8df84"},
+]
+
+[[package]]
+name = "redis"
+version = "5.2.1"
+requires_python = ">=3.8"
+summary = "Python client for Redis database and key-value store"
+groups = ["default"]
+dependencies = [
+ "async-timeout>=4.0.3; python_full_version < \"3.11.3\"",
+]
+files = [
+ {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"},
+ {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"},
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+requires_python = ">=3.8"
+summary = "Python HTTP for Humans."
+groups = ["deploy"]
+dependencies = [
+ "certifi>=2017.4.17",
+ "charset-normalizer<4,>=2",
+ "idna<4,>=2.5",
+ "urllib3<3,>=1.21.1",
+]
+files = [
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.13.1"
+requires_python = ">=3.8"
+summary = "Backported and Experimental Type Hints for Python 3.8+"
+groups = ["default", "deploy"]
+files = [
+ {file = "typing_extensions-4.13.1-py3-none-any.whl", hash = "sha256:4b6cf02909eb5495cfbc3f6e8fd49217e6cc7944e145cdda8caa3734777f9e69"},
+ {file = "typing_extensions-4.13.1.tar.gz", hash = "sha256:98795af00fb9640edec5b8e31fc647597b4691f099ad75f469a2616be1a76dff"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.3.0"
+requires_python = ">=3.9"
+summary = "HTTP library with thread-safe connection pooling, file post, and more."
+groups = ["deploy"]
+files = [
+ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
+ {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
+]
+
+[[package]]
+name = "wrapt"
+version = "1.17.2"
+requires_python = ">=3.8"
+summary = "Module for decorators, wrappers and monkey patching."
+groups = ["deploy"]
+files = [
+ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"},
+ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"},
+ {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"},
+ {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"},
+ {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"},
+ {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"},
+ {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"},
+ {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"},
+ {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"},
+ {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"},
+ {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"},
+ {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"},
+ {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"},
+ {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"},
+ {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"},
+ {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"},
+ {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"},
+ {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"},
+ {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"},
+ {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"},
+ {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"},
+ {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"},
+ {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"},
+ {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"},
+ {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"},
+ {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"},
+ {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"},
+ {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"},
+ {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"},
+ {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"},
+ {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"},
+ {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"},
+ {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"},
+ {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"},
+ {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"},
+ {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"},
+ {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"},
+ {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"},
+ {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"},
+ {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"},
+ {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"},
+ {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"},
+ {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"},
+ {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"},
+ {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"},
+ {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"},
+]
+
+[[package]]
+name = "zipp"
+version = "3.21.0"
+requires_python = ">=3.9"
+summary = "Backport of pathlib-compatible object wrapper for zip files"
+groups = ["deploy"]
+files = [
+ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
+ {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
+]
diff --git a/tool-sidecar/pyproject.toml b/tool-sidecar/pyproject.toml
new file mode 100644
index 00000000..0540592d
--- /dev/null
+++ b/tool-sidecar/pyproject.toml
@@ -0,0 +1,31 @@
+[project]
+name = "unstract-tool-sidecar"
+version = "0.0.1"
+description = "Container Side Car for Unstract"
+authors = [
+ {name = "Zipstack Inc.", email = "devsupport@zipstack.com"},
+]
+dependencies = [
+ "redis>=4.5.0",
+ "python-dotenv>=1.0.0",
+ "python-json-logger>=2.0.0",
+ "unstract-core @ file:///${PROJECT_ROOT}/../unstract/core"
+]
+requires-python = ">=3.9,<3.13"
+readme = "README.md"
+license = {text = "MIT"}
+
+[project.optional-dependencies]
+deploy = [
+ # OpenTelemetry for tracing and profiling
+ "opentelemetry-distro",
+ "opentelemetry-exporter-otlp",
+]
+
+[build-system]
+requires = ["pdm-backend"]
+build-backend = "pdm.backend"
+
+[tool.pdm.build]
+includes = ["src"]
+package-dir = "src"
diff --git a/tool-sidecar/src/unstract/__init__.py b/tool-sidecar/src/unstract/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tool-sidecar/src/unstract/tool_sidecar/__init__.py b/tool-sidecar/src/unstract/tool_sidecar/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tool-sidecar/src/unstract/tool_sidecar/constants.py b/tool-sidecar/src/unstract/tool_sidecar/constants.py
new file mode 100644
index 00000000..110d4f39
--- /dev/null
+++ b/tool-sidecar/src/unstract/tool_sidecar/constants.py
@@ -0,0 +1,28 @@
+class LogType:
+ LOG = "LOG"
+ UPDATE = "UPDATE"
+ COST = "COST"
+ RESULT = "RESULT"
+ SINGLE_STEP = "SINGLE_STEP_MESSAGE"
+
+
+class LogLevel:
+ ERROR = "ERROR"
+ WARN = "WARN"
+ INFO = "INFO"
+ DEBUG = "DEBUG"
+
+
+class Env:
+ LOG_LEVEL = "LOG_LEVEL"
+ LOG_PATH = "LOG_PATH"
+ REDIS_HOST = "REDIS_HOST"
+ REDIS_PORT = "REDIS_PORT"
+ REDIS_USER = "REDIS_USER"
+ REDIS_PASSWORD = "REDIS_PASSWORD"
+ CELERY_BROKER_URL = "CELERY_BROKER_URL"
+ TOOL_INSTANCE_ID = "TOOL_INSTANCE_ID"
+ EXECUTION_ID = "EXECUTION_ID"
+ ORGANIZATION_ID = "ORGANIZATION_ID"
+ FILE_EXECUTION_ID = "FILE_EXECUTION_ID"
+ MESSAGING_CHANNEL = "MESSAGING_CHANNEL"
diff --git a/tool-sidecar/src/unstract/tool_sidecar/dto.py b/tool-sidecar/src/unstract/tool_sidecar/dto.py
new file mode 100644
index 00000000..0d625bc9
--- /dev/null
+++ b/tool-sidecar/src/unstract/tool_sidecar/dto.py
@@ -0,0 +1,16 @@
+from dataclasses import dataclass
+from typing import Any, Optional
+
+
+@dataclass
+class LogLineDTO:
+ is_terminated: bool = False # True if the tool log has termination marker
+ with_result: bool = False # True if the tool log contains a result
+ error: Optional[str] = None
+
+ def to_dict(self) -> dict[str, Any]:
+ return {
+ "is_terminated": self.is_terminated,
+ "with_result": self.with_result,
+ "error": self.error,
+ }
diff --git a/tool-sidecar/src/unstract/tool_sidecar/log_processor.py b/tool-sidecar/src/unstract/tool_sidecar/log_processor.py
new file mode 100644
index 00000000..59e4abbd
--- /dev/null
+++ b/tool-sidecar/src/unstract/tool_sidecar/log_processor.py
@@ -0,0 +1,276 @@
+"""
+Sidecar container implementation for log processing and result handling.
+This sidecar runs alongside the tool container in the same pod, monitoring
+the tool's output log file and streaming logs to Redis while watching for
+completion signals.
+"""
+
+import json
+import logging
+import os
+import time
+from datetime import datetime, timezone
+from typing import Any, Optional
+
+from unstract.core.constants import LogFieldName
+from unstract.core.pubsub_helper import LogPublisher
+
+from .constants import Env, LogLevel, LogType
+from .dto import LogLineDTO
+
+logging.basicConfig(
+ level=getattr(logging, os.getenv(Env.LOG_LEVEL, "INFO")),
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+)
+logger = logging.getLogger(__name__)
+
+
+class LogProcessor:
+
+ TERMINATION_MARKER = "TOOL_EXECUTION_COMPLETE"
+
+ def __init__(
+ self,
+ log_path: str,
+ redis_host: str,
+ redis_port: str,
+ redis_user: str,
+ redis_password: str,
+ tool_instance_id: str,
+ execution_id: str,
+ organization_id: str,
+ file_execution_id: str,
+ messaging_channel: str,
+ ):
+ """
+ Initialize the log processor with necessary connections and paths.
+
+ Args:
+ log_path: Path to the log file to monitor
+ redis_url: URL for Redis connection
+ tool_instance_id: ID of the tool instance
+ execution_id: ID of the execution
+ organization_id: ID of the organization
+ file_execution_id: ID of the file execution
+ """
+ self.log_path = log_path
+ self.redis_host = redis_host
+ self.redis_port = redis_port
+ self.redis_user = redis_user
+ self.redis_password = redis_password
+ self.tool_instance_id = tool_instance_id
+ self.execution_id = execution_id
+ self.organization_id = organization_id
+ self.file_execution_id = file_execution_id
+ self.messaging_channel = messaging_channel
+
+ def wait_for_log_file(self, timeout: int = 300) -> bool:
+ """
+ Wait for the log file to be created by the tool container.
+
+ Args:
+ timeout: Maximum time to wait in seconds
+
+ Returns:
+ bool: True if file exists, False if timeout occurred
+ """
+ start_time = time.time()
+ while not os.path.exists(self.log_path):
+ if time.time() - start_time > timeout:
+ return False
+ time.sleep(0.1)
+ return True
+
+ def process_log_line(self, line: str) -> LogLineDTO:
+ """
+ Process a single log line, checking for completion signal.
+
+ Args:
+ line: Log line to process
+
+ Returns:
+ Optional[Dict]: Parsed JSON if line is a completion signal
+ """
+ # Stream log to Redis
+ if LogFieldName.TOOL_TERMINATION_MARKER in line:
+ logger.info(
+ "Tool container terminated with status "
+ f"{LogFieldName.TOOL_TERMINATION_MARKER}"
+ )
+ return LogLineDTO(is_terminated=True)
+ log_dict = self.get_valid_log_message(line)
+ if not log_dict:
+ return LogLineDTO()
+ log_type = log_dict.get("type")
+ log_level = log_dict.get("level")
+ if log_type == LogType.LOG and log_level == LogLevel.ERROR:
+ logger.error(f"Error in log: {log_dict.get('log')}")
+ return LogLineDTO(error=log_dict.get("log"))
+ if not self.is_valid_log_type(log_type):
+ logger.warning(
+ f"Received invalid logType: {log_type} with log message: {log_dict}"
+ )
+ return LogLineDTO()
+ if log_type == LogType.RESULT:
+ logger.info(
+ f"Tool {self.tool_instance_id} execution {self.execution_id} "
+ "completed successfully with result."
+ )
+ return LogLineDTO(with_result=True)
+ if log_type == LogType.UPDATE:
+ log_dict["component"] = self.tool_instance_id
+ log_dict[LogFieldName.EXECUTION_ID] = self.execution_id
+ log_dict[LogFieldName.ORGANIZATION_ID] = self.organization_id
+ log_dict[LogFieldName.TIMESTAMP] = self.get_log_timestamp(log_dict)
+ log_dict[LogFieldName.FILE_EXECUTION_ID] = self.file_execution_id
+ # Publish to channel of socket io
+ LogPublisher.publish(self.messaging_channel, log_dict)
+ return LogLineDTO()
+
+ def get_log_timestamp(self, log_dict: dict[str, Any]) -> float:
+ """Obtains the timestamp from the log dictionary.
+
+ Checks if the log dictionary has an `emitted_at` key and returns the
+ corresponding timestamp. If the key is not present, returns the current
+ timestamp.
+
+ Args:
+ log_dict (dict[str, Any]): Log message from tool
+
+ Returns:
+ float: Timestamp of the log message
+ """
+ # Use current timestamp if emitted_at is not present
+ if "emitted_at" not in log_dict:
+ return datetime.now(timezone.utc).timestamp()
+
+ emitted_at = log_dict["emitted_at"]
+ if isinstance(emitted_at, str):
+ # Convert ISO format string to UNIX timestamp
+ return datetime.fromisoformat(emitted_at).timestamp()
+ elif isinstance(emitted_at, (int, float)):
+ # Already a UNIX timestamp
+ return float(emitted_at)
+
+ def get_valid_log_message(self, log_message: str) -> Optional[dict[str, Any]]:
+ """Get a valid log message from the log message.
+
+ Args:
+ log_message (str): str
+
+ Returns:
+ Optional[dict[str, Any]]: json message
+ """
+ try:
+ log_dict = json.loads(log_message)
+ if isinstance(log_dict, dict):
+ return log_dict
+ except json.JSONDecodeError:
+ return None
+
+ def is_valid_log_type(self, log_type: Optional[str]) -> bool:
+ if log_type in {
+ LogType.LOG,
+ LogType.UPDATE,
+ LogType.COST,
+ LogType.RESULT,
+ LogType.SINGLE_STEP,
+ }:
+ return True
+ return False
+
+ def monitor_logs(self) -> None:
+ """
+ Main loop to monitor log file for new content and completion signals.
+ Uses file polling with position tracking to efficiently read new lines.
+ """
+ logger.info("Starting log monitoring...")
+ if not self.wait_for_log_file():
+ raise TimeoutError("Log file was not created within timeout period")
+
+ # Monitor the file for new content
+ with open(self.log_path) as f:
+ while True:
+ # Remember current position
+ where = f.tell()
+ line = f.readline()
+
+ if not line:
+ # No new data, check if tool container is done
+ if os.path.exists(
+ os.path.join(os.path.dirname(self.log_path), "completed")
+ ):
+ break
+
+ # Sleep briefly to avoid CPU spinning
+ time.sleep(0.1)
+ # Go back to where we were
+ f.seek(where)
+ continue
+
+ # Process the log line
+ log_line = self.process_log_line(line)
+ if log_line.is_terminated:
+ logger.info("Completion signal received")
+ break
+
+
+def main():
+ """
+ Main entry point for the sidecar container.
+ Sets up the log processor with environment variables and starts monitoring.
+ """
+ # Get configuration from environment
+ log_path = os.getenv(Env.LOG_PATH, "/shared/logs/logs.txt")
+ redis_host = os.getenv(Env.REDIS_HOST)
+ redis_port = os.getenv(Env.REDIS_PORT)
+ redis_user = os.getenv(Env.REDIS_USER)
+ redis_password = os.getenv(Env.REDIS_PASSWORD)
+ celery_broker_url = os.getenv(Env.CELERY_BROKER_URL)
+
+ # Get execution parameters from environment
+ tool_instance_id = os.getenv(Env.TOOL_INSTANCE_ID)
+ execution_id = os.getenv(Env.EXECUTION_ID)
+ organization_id = os.getenv(Env.ORGANIZATION_ID)
+ file_execution_id = os.getenv(Env.FILE_EXECUTION_ID)
+ messaging_channel = os.getenv(Env.MESSAGING_CHANNEL)
+
+ # Validate required parameters
+ required_params = {
+ Env.TOOL_INSTANCE_ID: tool_instance_id,
+ Env.EXECUTION_ID: execution_id,
+ Env.ORGANIZATION_ID: organization_id,
+ Env.FILE_EXECUTION_ID: file_execution_id,
+ Env.MESSAGING_CHANNEL: messaging_channel,
+ Env.LOG_PATH: log_path,
+ Env.REDIS_HOST: redis_host,
+ Env.REDIS_PORT: redis_port,
+ Env.CELERY_BROKER_URL: celery_broker_url,
+ }
+
+ logger.info(f"Log processor started with params: {required_params}")
+
+ missing_params = [k for k, v in required_params.items() if not v]
+ if missing_params:
+ raise ValueError(
+ f"Missing required environment variables: {', '.join(missing_params)}"
+ )
+
+ # Create and run log processor
+ processor = LogProcessor(
+ log_path=log_path,
+ redis_host=redis_host,
+ redis_port=redis_port,
+ redis_user=redis_user,
+ redis_password=redis_password,
+ messaging_channel=messaging_channel,
+ tool_instance_id=tool_instance_id,
+ execution_id=execution_id,
+ organization_id=organization_id,
+ file_execution_id=file_execution_id,
+ )
+ processor.monitor_logs()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/classifier/Dockerfile b/tools/classifier/Dockerfile
index a8b1f90a..5ed2b84b 100644
--- a/tools/classifier/Dockerfile
+++ b/tools/classifier/Dockerfile
@@ -12,10 +12,15 @@ RUN pip install --no-cache-dir -U pip
# Set the working directory in the container
WORKDIR /app
COPY requirements.txt /app/
-RUN pip install --no-cache-dir -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt && \
+ pip install --no-cache-dir \
+ opentelemetry-distro \
+ opentelemetry-exporter-otlp \
+ platformdirs>=3.0.0 \
+ && opentelemetry-bootstrap -a install
# Copy the contents of your project directory into the container at /app
COPY src /app/src/
WORKDIR /app/src
-ENTRYPOINT ["python", "main.py"]
+ENTRYPOINT ["opentelemetry-instrument","python", "main.py"]
diff --git a/tools/classifier/src/config/properties.json b/tools/classifier/src/config/properties.json
index 355b14a0..203265bd 100644
--- a/tools/classifier/src/config/properties.json
+++ b/tools/classifier/src/config/properties.json
@@ -2,7 +2,7 @@
"schemaVersion": "0.0.1",
"displayName": "File Classifier",
"functionName": "classify",
- "toolVersion": "0.0.57",
+ "toolVersion": "0.0.58",
"description": "Classifies a file into a bin based on its contents",
"input": {
"description": "File to be classified"
diff --git a/tools/structure/Dockerfile b/tools/structure/Dockerfile
index bcc26061..360179da 100644
--- a/tools/structure/Dockerfile
+++ b/tools/structure/Dockerfile
@@ -27,8 +27,8 @@ RUN pip install --no-cache-dir -U pip
# Set the working directory in the container
WORKDIR /app
COPY requirements.txt /app/
-RUN pip install --no-cache-dir -r requirements.txt
-RUN pip install --no-cache-dir \
+RUN pip install --no-cache-dir -r requirements.txt && \
+ pip install --no-cache-dir \
opentelemetry-distro \
opentelemetry-exporter-otlp \
platformdirs>=3.0.0 \
diff --git a/tools/structure/src/config/properties.json b/tools/structure/src/config/properties.json
index 9d42b268..f9ffaf9e 100644
--- a/tools/structure/src/config/properties.json
+++ b/tools/structure/src/config/properties.json
@@ -2,7 +2,7 @@
"schemaVersion": "0.0.1",
"displayName": "Structure Tool",
"functionName": "structure_tool",
- "toolVersion": "0.0.73",
+ "toolVersion": "0.0.74",
"description": "This is a template tool which can answer set of input prompts designed in the Prompt Studio",
"input": {
"description": "File that needs to be indexed and parsed for answers"
diff --git a/tools/text_extractor/Dockerfile b/tools/text_extractor/Dockerfile
index a8b1f90a..f53b2214 100644
--- a/tools/text_extractor/Dockerfile
+++ b/tools/text_extractor/Dockerfile
@@ -12,10 +12,15 @@ RUN pip install --no-cache-dir -U pip
# Set the working directory in the container
WORKDIR /app
COPY requirements.txt /app/
-RUN pip install --no-cache-dir -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt && \
+ pip install --no-cache-dir \
+ opentelemetry-distro \
+ opentelemetry-exporter-otlp \
+ platformdirs>=3.0.0 \
+ && opentelemetry-bootstrap -a install
# Copy the contents of your project directory into the container at /app
COPY src /app/src/
WORKDIR /app/src
-ENTRYPOINT ["python", "main.py"]
+ENTRYPOINT ["opentelemetry-instrument", "python", "main.py"]
diff --git a/tools/text_extractor/src/config/properties.json b/tools/text_extractor/src/config/properties.json
index d9e14f75..cd9adfd0 100644
--- a/tools/text_extractor/src/config/properties.json
+++ b/tools/text_extractor/src/config/properties.json
@@ -2,7 +2,7 @@
"schemaVersion": "0.0.1",
"displayName": "Text Extractor",
"functionName": "text_extractor",
- "toolVersion": "0.0.54",
+ "toolVersion": "0.0.55",
"description": "The Text Extractor is a powerful tool designed to convert documents to its text form or Extract texts from documents",
"input": {
"description": "Document"
diff --git a/unstract/connectors/src/unstract/connectors/databases/postgresql/postgresql.py b/unstract/connectors/src/unstract/connectors/databases/postgresql/postgresql.py
index b1217fcf..3c3a2924 100644
--- a/unstract/connectors/src/unstract/connectors/databases/postgresql/postgresql.py
+++ b/unstract/connectors/src/unstract/connectors/databases/postgresql/postgresql.py
@@ -85,7 +85,7 @@ class PostgreSQL(UnstractDB, PsycoPgHandler):
"connect_timeout": self.CONNECT_TIMEOUT,
"application_name": "unstract_connector",
}
-
+
# Standard PostgreSQL - use basic SSL if available
conn_params["sslmode"] = "prefer"
diff --git a/unstract/core/src/unstract/core/constants.py b/unstract/core/src/unstract/core/constants.py
index 7523cd01..67264f9c 100644
--- a/unstract/core/src/unstract/core/constants.py
+++ b/unstract/core/src/unstract/core/constants.py
@@ -6,6 +6,7 @@ class LogFieldName:
DATA = "data"
EVENT_TIME = "event_time"
FILE_EXECUTION_ID = "file_execution_id"
+ TOOL_TERMINATION_MARKER = "TOOL_EXECUTION_COMPLETE"
class LogEventArgument:
diff --git a/unstract/tool-registry/tool_registry_config/public_tools.json b/unstract/tool-registry/tool_registry_config/public_tools.json
index 53246d70..26f2f4c2 100644
--- a/unstract/tool-registry/tool_registry_config/public_tools.json
+++ b/unstract/tool-registry/tool_registry_config/public_tools.json
@@ -5,7 +5,7 @@
"schemaVersion": "0.0.1",
"displayName": "File Classifier",
"functionName": "classify",
- "toolVersion": "0.0.57",
+ "toolVersion": "0.0.58",
"description": "Classifies a file into a bin based on its contents",
"input": {
"description": "File to be classified"
@@ -106,9 +106,9 @@
"properties": {}
},
"icon": "\n\n",
- "image_url": "docker:unstract/tool-classifier:0.0.57",
+ "image_url": "docker:unstract/tool-classifier:0.0.58",
"image_name": "unstract/tool-classifier",
- "image_tag": "0.0.57"
+ "image_tag": "0.0.58"
},
"text_extractor": {
"tool_uid": "text_extractor",
@@ -116,7 +116,7 @@
"schemaVersion": "0.0.1",
"displayName": "Text Extractor",
"functionName": "text_extractor",
- "toolVersion": "0.0.54",
+ "toolVersion": "0.0.55",
"description": "The Text Extractor is a powerful tool designed to convert documents to its text form or Extract texts from documents",
"input": {
"description": "Document"
@@ -191,8 +191,8 @@
}
},
"icon": "\n\n",
- "image_url": "docker:unstract/tool-text-extractor:0.0.54",
+ "image_url": "docker:unstract/tool-text-extractor:0.0.55",
"image_name": "unstract/tool-text-extractor",
- "image_tag": "0.0.54"
+ "image_tag": "0.0.55"
}
}