Merge pull request #4621 from nthe/main

feat: Set content-type header in Ollama backend
This commit is contained in:
Timothy Jaeryang Baek 2024-08-15 15:45:51 +02:00 committed by GitHub
commit 723caf2a09
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 13 additions and 3 deletions

View File

@ -147,13 +147,17 @@ async def cleanup_response(
await session.close() await session.close()
async def post_streaming_url(url: str, payload: str, stream: bool = True): async def post_streaming_url(url: str, payload: Union[str, bytes], stream: bool = True):
r = None r = None
try: try:
session = aiohttp.ClientSession( session = aiohttp.ClientSession(
trust_env=True, timeout=aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT) trust_env=True, timeout=aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT)
) )
r = await session.post(url, data=payload) r = await session.post(
url,
data=payload,
headers={"Content-Type": "application/json"},
)
r.raise_for_status() r.raise_for_status()
if stream: if stream:
@ -422,6 +426,7 @@ async def copy_model(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/copy", url=f"{url}/api/copy",
headers={"Content-Type": "application/json"},
data=form_data.model_dump_json(exclude_none=True).encode(), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
@ -470,6 +475,7 @@ async def delete_model(
r = requests.request( r = requests.request(
method="DELETE", method="DELETE",
url=f"{url}/api/delete", url=f"{url}/api/delete",
headers={"Content-Type": "application/json"},
data=form_data.model_dump_json(exclude_none=True).encode(), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
try: try:
@ -510,6 +516,7 @@ async def show_model_info(form_data: ModelNameForm, user=Depends(get_verified_us
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/show", url=f"{url}/api/show",
headers={"Content-Type": "application/json"},
data=form_data.model_dump_json(exclude_none=True).encode(), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
try: try:
@ -567,6 +574,7 @@ async def generate_embeddings(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/embeddings", url=f"{url}/api/embeddings",
headers={"Content-Type": "application/json"},
data=form_data.model_dump_json(exclude_none=True).encode(), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
try: try:
@ -616,6 +624,7 @@ def generate_ollama_embeddings(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/embeddings", url=f"{url}/api/embeddings",
headers={"Content-Type": "application/json"},
data=form_data.model_dump_json(exclude_none=True).encode(), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
try: try:

View File

@ -59,7 +59,8 @@
return _item; return _item;
}), }),
{ {
keys: ['value', 'label', 'tags', 'desc', 'modelName'] keys: ['value', 'tags', 'modelName'],
threshold: 0.3
} }
); );