From 0afe4aa8498de1bcd2a9f421f6a34a83497dc71a Mon Sep 17 00:00:00 2001 From: Justin Hayes <52832301+justinh-rahb@users.noreply.github.com> Date: Fri, 26 Jul 2024 11:07:14 -0400 Subject: [PATCH] fix: cast `user` as string --- examples/pipelines/providers/routellm_pipeline.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/examples/pipelines/providers/routellm_pipeline.py b/examples/pipelines/providers/routellm_pipeline.py index 032c979..f7203c9 100644 --- a/examples/pipelines/providers/routellm_pipeline.py +++ b/examples/pipelines/providers/routellm_pipeline.py @@ -2,7 +2,7 @@ title: RouteLLM Pipeline author: justinh-rahb date: 2024-07-25 -version: 0.2.1 +version: 0.2.2 license: MIT description: A pipeline for routing LLM requests using RouteLLM framework, compatible with OpenAI API. requirements: routellm, pydantic, requests @@ -55,7 +55,7 @@ class Pipeline: self.id = "routellm" self.name = f"RouteLLM/" self.controller = None - + self._initialize_controller() def pipelines(self) -> List[dict]: @@ -102,12 +102,16 @@ class Pipeline: # Prepare parameters, excluding 'model' and 'messages' if they're in body params = {k: v for k, v in body.items() if k not in ['model', 'messages'] and v is not None} + # Ensure 'user' is a string if present + if 'user' in params and not isinstance(params['user'], str): + params['user'] = str(params['user']) + response = self.controller.completion( model=model_name, messages=messages, **params ) - + if body.get("stream", False): return (chunk for chunk in response) else: