Merge branch 'open-webui:main' into routellm-pipeline

This commit is contained in:
Justin Hayes 2024-08-02 08:48:45 -04:00 committed by GitHub
commit 08c8379007
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 64 additions and 27 deletions

View File

@ -1,21 +1,20 @@
""" """
title: Anthropic Manifold Pipeline title: Anthropic Manifold Pipeline
author: justinh-rahb author: justinh-rahb, sriparashiva
date: 2024-06-20 date: 2024-06-20
version: 1.3 version: 1.4
license: MIT license: MIT
description: A pipeline for generating text and processing images using the Anthropic API. description: A pipeline for generating text and processing images using the Anthropic API.
requirements: requests, anthropic requirements: requests, sseclient-py
environment_variables: ANTHROPIC_API_KEY environment_variables: ANTHROPIC_API_KEY
""" """
import os import os
from anthropic import Anthropic, RateLimitError, APIStatusError, APIConnectionError import requests
import json
from schemas import OpenAIChatMessage
from typing import List, Union, Generator, Iterator from typing import List, Union, Generator, Iterator
from pydantic import BaseModel from pydantic import BaseModel
import requests import sseclient
from utils.pipelines.main import pop_system_message from utils.pipelines.main import pop_system_message
@ -32,7 +31,15 @@ class Pipeline:
self.valves = self.Valves( self.valves = self.Valves(
**{"ANTHROPIC_API_KEY": os.getenv("ANTHROPIC_API_KEY", "your-api-key-here")} **{"ANTHROPIC_API_KEY": os.getenv("ANTHROPIC_API_KEY", "your-api-key-here")}
) )
self.client = Anthropic(api_key=self.valves.ANTHROPIC_API_KEY) self.url = 'https://api.anthropic.com/v1/messages'
self.update_headers()
def update_headers(self):
self.headers = {
'anthropic-version': '2023-06-01',
'content-type': 'application/json',
'x-api-key': self.valves.ANTHROPIC_API_KEY
}
def get_anthropic_models(self): def get_anthropic_models(self):
return [ return [
@ -51,8 +58,7 @@ class Pipeline:
pass pass
async def on_valves_updated(self): async def on_valves_updated(self):
self.client = Anthropic(api_key=self.valves.ANTHROPIC_API_KEY) self.update_headers()
pass
def pipelines(self) -> List[dict]: def pipelines(self) -> List[dict]:
return self.get_anthropic_models() return self.get_anthropic_models()
@ -131,21 +137,38 @@ class Pipeline:
} }
if body.get("stream", False): if body.get("stream", False):
return self.stream_response(model_id, payload) return self.stream_response(payload)
else: else:
return self.get_completion(model_id, payload) return self.get_completion(payload)
except (RateLimitError, APIStatusError, APIConnectionError) as e: except Exception as e:
return f"Error: {e}" return f"Error: {e}"
def stream_response(self, model_id: str, payload: dict) -> Generator: def stream_response(self, payload: dict) -> Generator:
stream = self.client.messages.create(**payload) response = requests.post(self.url, headers=self.headers, json=payload, stream=True)
for chunk in stream: if response.status_code == 200:
if chunk.type == "content_block_start": client = sseclient.SSEClient(response)
yield chunk.content_block.text for event in client.events():
elif chunk.type == "content_block_delta": try:
yield chunk.delta.text data = json.loads(event.data)
if data["type"] == "content_block_start":
yield data["content_block"]["text"]
elif data["type"] == "content_block_delta":
yield data["delta"]["text"]
elif data["type"] == "message_stop":
break
except json.JSONDecodeError:
print(f"Failed to parse JSON: {event.data}")
except KeyError as e:
print(f"Unexpected data structure: {e}")
print(f"Full data: {data}")
else:
raise Exception(f"Error: {response.status_code} - {response.text}")
def get_completion(self, model_id: str, payload: dict) -> str: def get_completion(self, payload: dict) -> str:
response = self.client.messages.create(**payload) response = requests.post(self.url, headers=self.headers, json=payload)
return response.content[0].text if response.status_code == 200:
res = response.json()
return res["content"][0]["text"] if "content" in res and res["content"] else ""
else:
raise Exception(f"Error: {response.status_code} - {response.text}")

View File

@ -2,7 +2,7 @@
title: Google GenAI Manifold Pipeline title: Google GenAI Manifold Pipeline
author: Marc Lopez (refactor by justinh-rahb) author: Marc Lopez (refactor by justinh-rahb)
date: 2024-06-06 date: 2024-06-06
version: 1.1 version: 1.2
license: MIT license: MIT
description: A pipeline for generating text using Google's GenAI models in Open-WebUI. description: A pipeline for generating text using Google's GenAI models in Open-WebUI.
requirements: google-generativeai requirements: google-generativeai
@ -12,7 +12,7 @@ environment_variables: GOOGLE_API_KEY
from typing import List, Union, Iterator from typing import List, Union, Iterator
import os import os
from pydantic import BaseModel from pydantic import BaseModel, Field
import google.generativeai as genai import google.generativeai as genai
from google.generativeai.types import GenerationConfig from google.generativeai.types import GenerationConfig
@ -25,13 +25,17 @@ class Pipeline:
"""Options to change from the WebUI""" """Options to change from the WebUI"""
GOOGLE_API_KEY: str = "" GOOGLE_API_KEY: str = ""
USE_PERMISSIVE_SAFETY: bool = Field(default=False)
def __init__(self): def __init__(self):
self.type = "manifold" self.type = "manifold"
self.id = "google_genai" self.id = "google_genai"
self.name = "Google: " self.name = "Google: "
self.valves = self.Valves(**{"GOOGLE_API_KEY": os.getenv("GOOGLE_API_KEY", "")}) self.valves = self.Valves(**{
"GOOGLE_API_KEY": os.getenv("GOOGLE_API_KEY", ""),
"USE_PERMISSIVE_SAFETY": False
})
self.pipelines = [] self.pipelines = []
genai.configure(api_key=self.valves.GOOGLE_API_KEY) genai.configure(api_key=self.valves.GOOGLE_API_KEY)
@ -41,6 +45,8 @@ class Pipeline:
"""This function is called when the server is started.""" """This function is called when the server is started."""
print(f"on_startup:{__name__}") print(f"on_startup:{__name__}")
genai.configure(api_key=self.valves.GOOGLE_API_KEY)
self.update_pipelines()
async def on_shutdown(self) -> None: async def on_shutdown(self) -> None:
"""This function is called when the server is stopped.""" """This function is called when the server is stopped."""
@ -135,7 +141,15 @@ class Pipeline:
stop_sequences=body.get("stop", []), stop_sequences=body.get("stop", []),
) )
safety_settings = body.get("safety_settings") if self.valves.USE_PERMISSIVE_SAFETY:
safety_settings = {
genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
}
else:
safety_settings = body.get("safety_settings")
response = model.generate_content( response = model.generate_content(
contents, contents,