open-webui/src/lib/apis/ollama/index.ts

364 lines
7.2 KiB
TypeScript
Raw Normal View History

2023-12-26 11:28:30 +00:00
import { OLLAMA_API_BASE_URL } from '$lib/constants';
2024-01-04 21:06:31 +00:00
export const getOllamaAPIUrl = async (token: string = '') => {
2023-12-26 11:28:30 +00:00
let error = null;
2024-01-04 21:06:31 +00:00
const res = await fetch(`${OLLAMA_API_BASE_URL}/url`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/url/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
url: url
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const getOllamaVersion = async (token: string = '') => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/version`, {
2023-12-26 11:28:30 +00:00
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
2023-12-26 19:32:22 +00:00
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
2023-12-26 11:28:30 +00:00
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
2023-12-26 21:10:50 +00:00
return res?.version ?? '';
2023-12-26 11:28:30 +00:00
};
2024-01-04 21:06:31 +00:00
export const getOllamaModels = async (token: string = '') => {
2023-12-26 11:28:30 +00:00
let error = null;
2024-01-04 21:06:31 +00:00
const res = await fetch(`${OLLAMA_API_BASE_URL}/tags`, {
2023-12-26 11:28:30 +00:00
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
2023-12-26 19:32:22 +00:00
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
2023-12-26 11:28:30 +00:00
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
2024-02-22 12:12:26 +00:00
return (res?.models ?? [])
.map((model) => ({ id: model.model, name: model.name ?? model.model, ...model }))
.sort((a, b) => {
return a.name.localeCompare(b.name);
});
2023-12-26 11:28:30 +00:00
};
2023-12-26 20:50:52 +00:00
// TODO: migrate to backend
export const generateTitle = async (
token: string = '',
template: string,
model: string,
prompt: string
) => {
2023-12-26 20:50:52 +00:00
let error = null;
template = template.replace(/{{prompt}}/g, prompt);
console.log(template);
2024-01-04 21:06:31 +00:00
const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
2023-12-26 20:50:52 +00:00
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
model: model,
prompt: template,
2023-12-26 20:50:52 +00:00
stream: false
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
}
return null;
});
if (error) {
throw error;
}
return res?.response ?? 'New Chat';
};
2024-01-10 06:47:31 +00:00
export const generatePrompt = async (token: string = '', model: string, conversation: string) => {
let error = null;
if (conversation === '') {
2024-01-10 07:06:33 +00:00
conversation = '[no existing conversation]';
2024-01-10 06:47:31 +00:00
}
const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
model: model,
2024-01-10 07:06:33 +00:00
prompt: `Conversation:
2024-01-10 06:47:31 +00:00
${conversation}
2024-01-10 07:06:33 +00:00
As USER in the conversation above, your task is to continue the conversation. Remember, Your responses should be crafted as if you're a human conversing in a natural, realistic manner, keeping in mind the context and flow of the dialogue. Please generate a fitting response to the last message in the conversation, or if there is no existing conversation, initiate one as a normal person would.
2024-01-10 06:47:31 +00:00
Response:
`
})
}).catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
}
return null;
});
if (error) {
throw error;
}
return res;
};
2024-01-04 21:06:31 +00:00
export const generateChatCompletion = async (token: string = '', body: object) => {
2024-01-18 03:19:44 +00:00
let controller = new AbortController();
2023-12-26 20:50:52 +00:00
let error = null;
2024-01-04 21:06:31 +00:00
const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, {
2024-01-18 03:19:44 +00:00
signal: controller.signal,
2023-12-26 20:50:52 +00:00
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify(body)
}).catch((err) => {
error = err;
return null;
});
if (error) {
throw error;
}
2024-01-18 03:19:44 +00:00
return [res, controller];
};
export const cancelChatCompletion = async (token: string = '', requestId: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/cancel/${requestId}`, {
method: 'GET',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
}
}).catch((err) => {
error = err;
return null;
});
if (error) {
throw error;
}
2023-12-26 20:50:52 +00:00
return res;
};
2024-01-04 21:06:31 +00:00
export const createModel = async (token: string, tagName: string, content: string) => {
let error = null;
2024-01-04 21:06:31 +00:00
const res = await fetch(`${OLLAMA_API_BASE_URL}/create`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
name: tagName,
modelfile: content
})
}).catch((err) => {
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
2024-01-04 21:06:31 +00:00
export const deleteModel = async (token: string, tagName: string) => {
let error = null;
2024-01-04 21:06:31 +00:00
const res = await fetch(`${OLLAMA_API_BASE_URL}/delete`, {
method: 'DELETE',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
name: tagName
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
console.log(json);
return true;
})
.catch((err) => {
console.log(err);
error = err.error;
return null;
});
if (error) {
throw error;
}
return res;
};
2024-01-04 21:06:31 +00:00
export const pullModel = async (token: string, tagName: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
name: tagName
})
}).catch((err) => {
2024-01-06 21:02:09 +00:00
console.log(err);
2024-01-04 21:06:31 +00:00
error = err;
2024-01-06 21:02:09 +00:00
if ('detail' in err) {
error = err.detail;
}
2024-01-04 21:06:31 +00:00
return null;
});
if (error) {
throw error;
}
return res;
};
2024-01-06 21:02:09 +00:00
// export const pullModel = async (token: string, tagName: string) => {
// return await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
// method: 'POST',
// headers: {
// 'Content-Type': 'text/event-stream',
// Authorization: `Bearer ${token}`
// },
// body: JSON.stringify({
// name: tagName
// })
// });
// };