diff --git a/dev-docker.sh b/dev-docker.sh index 9ba69cd..a502b05 100755 --- a/dev-docker.sh +++ b/dev-docker.sh @@ -5,5 +5,5 @@ # docker volume rm open-webui # Runs the containers with Ollama image for Open WebUI and the Pipelines endpoint in place -docker run -d -p 9099:9099 --add-host=host.docker.internal:host-gateway -v pipelines:/app/pipelines --name pipelines --restart always --env-file .env pipelines #ghcr.io/open-webui/pipelines:latest +docker run -d -p 9099:9099 --add-host=host.docker.internal:host-gateway -v pipelines:/app/pipelines --name pipelines --restart always --env-file .env ghcr.io/open-webui/pipelines:latest docker run -d -p 3000:8080 -v ~/.ollama:/root/.ollama -v open-webui:/app/backend/data --name open-webui --restart always -e OPENAI_API_BASE_URL=http://host.docker.internal:9099 -e OPENAI_API_KEY=0p3n-w3bu! ghcr.io/open-webui/open-webui:ollama \ No newline at end of file diff --git a/examples/filters/datadog_filter_pipeline.py b/examples/filters/datadog_filter_pipeline.py index 6829b6c..af1d2de 100644 --- a/examples/filters/datadog_filter_pipeline.py +++ b/examples/filters/datadog_filter_pipeline.py @@ -109,8 +109,6 @@ class Pipeline: async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: print(f"outlet:{__name__}") - if body["chat_id"] not in self.chat_generations: - return body self.LLMObs.annotate( span = self.llm_span, diff --git a/requirements.txt b/requirements.txt index 70a9145..c5700a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,7 +29,6 @@ chromadb # Observability langfuse -#git+https://github.com/DataDog/dd-trace-py.git@main ddtrace # ML libraries