feat: init support for deepseek

This commit is contained in:
Fred 2024-05-15 14:47:43 +08:00
parent 3513c6801e
commit 27ac18d9d7
No known key found for this signature in database
GPG Key ID: 4DABDA85EF70EC71
8 changed files with 54 additions and 25 deletions

View File

@ -54,10 +54,11 @@ ANTHROPIC_API_KEY=
### anthropic claude Api version. (optional)
ANTHROPIC_API_VERSION=
# deepseek api key (optional)
DEEPSEEK_API_KEY=
### anthropic claude Api url (optional)
ANTHROPIC_URL=
### (optional)
WHITE_WEBDEV_ENDPOINTS=
WEBDEV_ENDPOINTS_WHITELIST=

View File

@ -212,6 +212,10 @@ anthropic claude Api version.
anthropic claude Api Url.
### `DEEPSEEK_API_KEY` (optional)
deepseek Api Key.
### `HIDE_USER_API_KEY` (optional)
> Default: Empty
@ -245,11 +249,12 @@ To control custom models, use `+` to add a custom model, use `-` to hide a model
User `-all` to disable all default models, `+all` to enable all default models.
### `WHITE_WEBDEV_ENDPOINTS` (可选)
### `WEBDEV_ENDPOINTS_WHITELIST` (可选)
You can use this option if you want to increase the number of webdav service addresses you are allowed to access, as required by the format
- Each address must be a complete endpoint
> `https://xxxx/yyy`
- Each address must be a complete endpoint
> `https://xxxx/yyy`
- Multiple addresses are connected by ', '
## Requirements

View File

@ -126,6 +126,10 @@ anthropic claude Api version.
anthropic claude Api Url.
### `DEEPSEEK_API_KEY` (optional)
deepseek Api Key.
### `HIDE_USER_API_KEY` (可选)
如果你不想让用户自行填入 API Key将此环境变量设置为 1 即可。
@ -142,11 +146,12 @@ anthropic claude Api Url.
如果你想禁用从链接解析预制设置,将此环境变量设置为 1 即可。
### `WHITE_WEBDEV_ENDPOINTS` (可选)
### `WEBDEV_ENDPOINTS_WHITELIST` (可选)
如果你想增加允许访问的webdav服务地址可以使用该选项格式要求
- 每一个地址必须是一个完整的 endpoint
> `https://xxxx/xxx`
> `https://xxxx/xxx`
- 多个地址以`,`相连
### `CUSTOM_MODELS` (可选)

View File

@ -73,6 +73,10 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
case ModelProvider.Claude:
systemApiKey = serverConfig.anthropicApiKey;
break;
case ModelProvider.Deepseek:
systemApiKey = serverConfig.deepseekApiKey;
break;
case ModelProvider.GPT:
default:
if (serverConfig.isAzure) {

View File

@ -87,6 +87,8 @@ export async function requestOpenai(req: NextRequest) {
DEFAULT_MODELS,
serverConfig.customModels,
);
// check if deepseek model
const clonedBody = await req.text();
fetchOptions.body = clonedBody;
@ -112,16 +114,16 @@ export async function requestOpenai(req: NextRequest) {
try {
const res = await fetch(fetchUrl, fetchOptions);
// Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Check if serverConfig.openaiOrgId is defined and not an empty string
if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader);
} else {
console.log("[Org ID] is not set up.");
}
// Check if serverConfig.openaiOrgId is defined and not an empty string
if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader);
} else {
console.log("[Org ID] is not set up.");
}
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
@ -129,7 +131,6 @@ export async function requestOpenai(req: NextRequest) {
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
// Also, this is to prevent the header from being sent to the client
if (!serverConfig.openaiOrgId || serverConfig.openaiOrgId.trim() === "") {
@ -142,7 +143,6 @@ export async function requestOpenai(req: NextRequest) {
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new Response(res.body, {
status: res.status,
statusText: res.statusText,

View File

@ -70,7 +70,7 @@ export abstract class LLMApi {
abstract models(): Promise<LLMModel[]>;
}
type ProviderName = "openai" | "azure" | "claude" | "palm";
type ProviderName = "openai" | "azure" | "claude" | "palm" | "deepseek";
interface Model {
name: string;
@ -162,6 +162,7 @@ export function getHeaders() {
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model.startsWith("gemini");
const isAzure = accessStore.provider === ServiceProvider.Azure;
const isDeepSeek = accessStore.provider === ServiceProvider.DeepSeek;
const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isGoogle
? accessStore.googleApiKey

View File

@ -89,6 +89,7 @@ export const getServerSideConfig = () => {
const isAzure = !!process.env.AZURE_URL;
const isGoogle = !!process.env.GOOGLE_API_KEY;
const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
const isDeepSeek = !!process.env.DEEPSEEK_API_KEY;
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
@ -99,7 +100,7 @@ export const getServerSideConfig = () => {
// );
const allowedWebDevEndpoints = (
process.env.WHITE_WEBDEV_ENDPOINTS ?? ""
process.env.WEBDEV_ENDPOINTS_WHITELIST ?? ""
).split(",");
return {
@ -121,6 +122,8 @@ export const getServerSideConfig = () => {
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
anthropicUrl: process.env.ANTHROPIC_URL,
deepseekApiKey: getApiKey(process.env.DEEPSEEK_API_KEY),
gtmId: process.env.GTM_ID,
needCode: ACCESS_CODES.size > 0,

View File

@ -1,3 +1,5 @@
import { Chat } from "./components/chat";
export const OWNER = "Yidadaa";
export const REPO = "ChatGPT-Next-Web";
export const REPO_URL = `https://github.com/${OWNER}/${REPO}`;
@ -70,12 +72,14 @@ export enum ServiceProvider {
Azure = "Azure",
Google = "Google",
Anthropic = "Anthropic",
DeepSeek = "DeepSeek",
}
export enum ModelProvider {
GPT = "GPT",
GeminiPro = "GeminiPro",
Claude = "Claude",
Deepseek = "DeepSeek",
}
export const Anthropic = {
@ -136,16 +140,11 @@ export const KnowledgeCutOffDate: Record<string, string> = {
const openaiModels = [
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-0125",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-4-vision-preview",
"gpt-4-turbo-2024-04-09",
];
const googleModels = [
@ -163,6 +162,8 @@ const anthropicModels = [
"claude-3-haiku-20240307",
];
const deepseekModels = ["deepseek-chat"];
export const DEFAULT_MODELS = [
...openaiModels.map((name) => ({
name,
@ -191,6 +192,15 @@ export const DEFAULT_MODELS = [
providerType: "anthropic",
},
})),
...deepseekModels.map((name) => ({
name,
available: true,
provider: {
id: "deepseek",
providerName: "DeepSeek",
providerType: "deepseek",
},
})),
] as const;
export const CHAT_PAGE_SIZE = 15;