fix: ollama streaming cancellation using aiohttp

This commit is contained in:
Jun Siang Cheah
2024-06-02 18:06:12 +01:00
parent 24c35c308d
commit 4dd51badfe
6 changed files with 154 additions and 490 deletions

View File

@@ -369,27 +369,6 @@ export const generateChatCompletion = async (token: string = '', body: object) =
return [res, controller];
};
export const cancelOllamaRequest = async (token: string = '', requestId: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/cancel/${requestId}`, {
method: 'GET',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
}
}).catch((err) => {
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
export const createModel = async (token: string, tagName: string, content: string) => {
let error = null;
@@ -461,8 +440,10 @@ export const deleteModel = async (token: string, tagName: string, urlIdx: string
export const pullModel = async (token: string, tagName: string, urlIdx: string | null = null) => {
let error = null;
const controller = new AbortController();
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/pull${urlIdx !== null ? `/${urlIdx}` : ''}`, {
signal: controller.signal,
method: 'POST',
headers: {
Accept: 'application/json',
@@ -485,7 +466,7 @@ export const pullModel = async (token: string, tagName: string, urlIdx: string |
if (error) {
throw error;
}
return res;
return [res, controller];
};
export const downloadModel = async (

View File

@@ -26,7 +26,7 @@
splitStream
} from '$lib/utils';
import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
import { generateChatCompletion } from '$lib/apis/ollama';
import {
addTagById,
createNewChat,
@@ -65,7 +65,6 @@
let autoScroll = true;
let processing = '';
let messagesContainerElement: HTMLDivElement;
let currentRequestId = null;
let showModelSelector = true;
@@ -130,10 +129,6 @@
//////////////////////////
const initNewChat = async () => {
if (currentRequestId !== null) {
await cancelOllamaRequest(localStorage.token, currentRequestId);
currentRequestId = null;
}
window.history.replaceState(history.state, '', `/`);
await chatId.set('');
@@ -616,7 +611,6 @@
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelOllamaRequest(localStorage.token, currentRequestId);
} else {
const messages = createMessagesList(responseMessageId);
const res = await chatCompleted(localStorage.token, {
@@ -647,8 +641,6 @@
}
}
currentRequestId = null;
break;
}
@@ -669,63 +661,58 @@
throw data;
}
if ('id' in data) {
console.log(data);
currentRequestId = data.id;
} else {
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.content += data.message.content;
messages = messages;
}
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = {
code: 400,
content: `Oops! No text generated from Ollama, Please try again.`
};
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
responseMessage.content += data.message.content;
messages = messages;
}
} else {
responseMessage.done = true;
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `${model}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
}
);
}
if (responseMessage.content == '') {
responseMessage.error = {
code: 400,
content: `Oops! No text generated from Ollama, Please try again.`
};
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
messages = messages;
if ($settings.responseAutoPlayback) {
await tick();
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `${model}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
}
);
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if ($settings.responseAutoPlayback) {
await tick();
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
}
}
}

View File

@@ -8,7 +8,7 @@
import Check from '$lib/components/icons/Check.svelte';
import Search from '$lib/components/icons/Search.svelte';
import { cancelOllamaRequest, deleteModel, getOllamaVersion, pullModel } from '$lib/apis/ollama';
import { deleteModel, getOllamaVersion, pullModel } from '$lib/apis/ollama';
import { user, MODEL_DOWNLOAD_POOL, models, mobile } from '$lib/stores';
import { toast } from 'svelte-sonner';
@@ -72,10 +72,12 @@
return;
}
const res = await pullModel(localStorage.token, sanitizedModelTag, '0').catch((error) => {
toast.error(error);
return null;
});
const [res, controller] = await pullModel(localStorage.token, sanitizedModelTag, '0').catch(
(error) => {
toast.error(error);
return null;
}
);
if (res) {
const reader = res.body
@@ -83,6 +85,16 @@
.pipeThrough(splitStream('\n'))
.getReader();
MODEL_DOWNLOAD_POOL.set({
...$MODEL_DOWNLOAD_POOL,
[sanitizedModelTag]: {
...$MODEL_DOWNLOAD_POOL[sanitizedModelTag],
abortController: controller,
reader,
done: false
}
});
while (true) {
try {
const { value, done } = await reader.read();
@@ -101,19 +113,6 @@
throw data.detail;
}
if (data.id) {
MODEL_DOWNLOAD_POOL.set({
...$MODEL_DOWNLOAD_POOL,
[sanitizedModelTag]: {
...$MODEL_DOWNLOAD_POOL[sanitizedModelTag],
requestId: data.id,
reader,
done: false
}
});
console.log(data);
}
if (data.status) {
if (data.digest) {
let downloadProgress = 0;
@@ -181,11 +180,12 @@
});
const cancelModelPullHandler = async (model: string) => {
const { reader, requestId } = $MODEL_DOWNLOAD_POOL[model];
const { reader, abortController } = $MODEL_DOWNLOAD_POOL[model];
if (abortController) {
abortController.abort();
}
if (reader) {
await reader.cancel();
await cancelOllamaRequest(localStorage.token, requestId);
delete $MODEL_DOWNLOAD_POOL[model];
MODEL_DOWNLOAD_POOL.set({
...$MODEL_DOWNLOAD_POOL

View File

@@ -8,7 +8,6 @@
getOllamaUrls,
getOllamaVersion,
pullModel,
cancelOllamaRequest,
uploadModel
} from '$lib/apis/ollama';
@@ -67,12 +66,14 @@
console.log(model);
updateModelId = model.id;
const res = await pullModel(localStorage.token, model.id, selectedOllamaUrlIdx).catch(
(error) => {
toast.error(error);
return null;
}
);
const [res, controller] = await pullModel(
localStorage.token,
model.id,
selectedOllamaUrlIdx
).catch((error) => {
toast.error(error);
return null;
});
if (res) {
const reader = res.body
@@ -141,10 +142,12 @@
return;
}
const res = await pullModel(localStorage.token, sanitizedModelTag, '0').catch((error) => {
toast.error(error);
return null;
});
const [res, controller] = await pullModel(localStorage.token, sanitizedModelTag, '0').catch(
(error) => {
toast.error(error);
return null;
}
);
if (res) {
const reader = res.body
@@ -152,6 +155,16 @@
.pipeThrough(splitStream('\n'))
.getReader();
MODEL_DOWNLOAD_POOL.set({
...$MODEL_DOWNLOAD_POOL,
[sanitizedModelTag]: {
...$MODEL_DOWNLOAD_POOL[sanitizedModelTag],
abortController: controller,
reader,
done: false
}
});
while (true) {
try {
const { value, done } = await reader.read();
@@ -170,19 +183,6 @@
throw data.detail;
}
if (data.id) {
MODEL_DOWNLOAD_POOL.set({
...$MODEL_DOWNLOAD_POOL,
[sanitizedModelTag]: {
...$MODEL_DOWNLOAD_POOL[sanitizedModelTag],
requestId: data.id,
reader,
done: false
}
});
console.log(data);
}
if (data.status) {
if (data.digest) {
let downloadProgress = 0;
@@ -416,11 +416,12 @@
};
const cancelModelPullHandler = async (model: string) => {
const { reader, requestId } = $MODEL_DOWNLOAD_POOL[model];
const { reader, abortController } = $MODEL_DOWNLOAD_POOL[model];
if (abortController) {
abortController.abort();
}
if (reader) {
await reader.cancel();
await cancelOllamaRequest(localStorage.token, requestId);
delete $MODEL_DOWNLOAD_POOL[model];
MODEL_DOWNLOAD_POOL.set({
...$MODEL_DOWNLOAD_POOL

View File

@@ -8,7 +8,7 @@
import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_API_BASE_URL } from '$lib/constants';
import { WEBUI_NAME, config, user, models, settings } from '$lib/stores';
import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
import { generateChatCompletion } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { splitStream } from '$lib/utils';
@@ -24,7 +24,6 @@
let selectedModelId = '';
let loading = false;
let currentRequestId = null;
let stopResponseFlag = false;
let messagesContainerElement: HTMLDivElement;
@@ -46,14 +45,6 @@
}
};
// const cancelHandler = async () => {
// if (currentRequestId) {
// const res = await cancelOllamaRequest(localStorage.token, currentRequestId);
// currentRequestId = null;
// loading = false;
// }
// };
const stopResponse = () => {
stopResponseFlag = true;
console.log('stopResponse');
@@ -171,8 +162,6 @@
if (stopResponseFlag) {
controller.abort('User: Stop Response');
}
currentRequestId = null;
break;
}
@@ -229,7 +218,6 @@
loading = false;
stopResponseFlag = false;
currentRequestId = null;
}
};