feat: applescript pipeline example

This commit is contained in:
Timothy J. Baek 2024-05-22 10:33:16 -07:00
parent 1720143805
commit 168ab96449
2 changed files with 86 additions and 28 deletions

View File

@ -0,0 +1,86 @@
from typing import List, Union, Generator, Iterator
from schemas import OpenAIChatMessage
import requests
from subprocess import call
class Pipeline:
def __init__(self):
# Optionally, you can set the id and name of the pipeline.
self.id = "applescript_pipeline"
self.name = "AppleScript Pipeline"
pass
async def on_startup(self):
# This function is called when the server is started.
print(f"on_startup:{__name__}")
pass
async def on_shutdown(self):
# This function is called when the server is stopped.
print(f"on_shutdown:{__name__}")
pass
def get_response(
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
) -> Union[str, Generator, Iterator]:
# This is where you can add your custom pipelines like RAG.'
print(f"get_response:{__name__}")
OLLAMA_BASE_URL = "http://localhost:11434"
MODEL = "llama3"
if body.get("title", False):
print("Title Generation")
return "PyAutoGUI Pipeline"
else:
if "user" in body:
print("######################################")
print(f'# User: {body["user"]["name"]} ({body["user"]["id"]})')
print(f"# Message: {user_message}")
print("######################################")
commands = user_message.split(" ")
if commands[0] == "volume":
try:
commands[1] = int(commands[1])
if 0 <= commands[1] <= 100:
call(
[f"osascript -e 'set volume output volume {commands[1]}'"],
shell=True,
)
except:
pass
payload = {
"model": MODEL,
"messages": [
{
"role": "system",
"content": f"You are an agent of the AppleScript Pipeline. You have the power to control the volume of the system.",
},
{"role": "user", "content": user_message},
],
"stream": body["stream"],
}
try:
r = requests.post(
url=f"{OLLAMA_BASE_URL}/v1/chat/completions",
json=payload,
stream=True,
)
r.raise_for_status()
if body["stream"]:
return r.iter_lines()
else:
return r.json()
except Exception as e:
return f"Error: {e}"

View File

@ -1,28 +0,0 @@
from typing import List, Union, Generator
from schemas import OpenAIChatMessage
class Pipeline:
def __init__(self):
pass
async def on_startup(self):
# This function is called when the server is started.
print(f"on_startup:{__name__}")
pass
async def on_shutdown(self):
# This function is called when the server is stopped.
print(f"on_shutdown:{__name__}")
pass
def get_response(
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
) -> Union[str, Generator]:
# This is where you can add your custom pipelines like RAG.'
print(f"get_response:{__name__}")
print(messages)
print(user_message)
return f"{__name__} response to: {user_message}"