diff --git a/app/client/api.ts b/app/client/api.ts index abff459c5..4b706434a 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -64,12 +64,14 @@ export interface LLMModel { displayName?: string; available: boolean; provider: LLMModelProvider; + sorted: number; } export interface LLMModelProvider { id: string; providerName: string; providerType: string; + sorted: number; } export abstract class LLMApi { diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 680125fe6..d95aebe87 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -411,13 +411,17 @@ export class ChatGPTApi implements LLMApi { return []; } + //由于目前 OpenAI 的 disableListModels 默认为 true,所以当前实际不会运行到这场 + let seq = 1000; //同 Constant.ts 中的排序保持一致 return chatModels.map((m) => ({ name: m.id, available: true, + sorted: seq++, provider: { id: "openai", providerName: "OpenAI", providerType: "openai", + sorted: 1, }, })); } diff --git a/app/constant.ts b/app/constant.ts index 5251b5b4f..8ca17c4b3 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -320,86 +320,105 @@ const tencentModels = [ const moonshotModes = ["moonshot-v1-8k", "moonshot-v1-32k", "moonshot-v1-128k"]; +let seq = 1000; // 内置的模型序号生成器从1000开始 export const DEFAULT_MODELS = [ ...openaiModels.map((name) => ({ name, available: true, + sorted: seq++, // Global sequence sort(index) provider: { id: "openai", providerName: "OpenAI", providerType: "openai", + sorted: 1, // 这里是固定的,确保顺序与之前内置的版本一致 }, })), ...openaiModels.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "azure", providerName: "Azure", providerType: "azure", + sorted: 2, }, })), ...googleModels.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "google", providerName: "Google", providerType: "google", + sorted: 3, }, })), ...anthropicModels.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "anthropic", providerName: "Anthropic", providerType: "anthropic", + sorted: 4, }, })), ...baiduModels.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "baidu", providerName: "Baidu", providerType: "baidu", + sorted: 5, }, })), ...bytedanceModels.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "bytedance", providerName: "ByteDance", providerType: "bytedance", + sorted: 6, }, })), ...alibabaModes.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "alibaba", providerName: "Alibaba", providerType: "alibaba", + sorted: 7, }, })), ...tencentModels.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "tencent", providerName: "Tencent", providerType: "tencent", + sorted: 8, }, })), ...moonshotModes.map((name) => ({ name, available: true, + sorted: seq++, provider: { id: "moonshot", providerName: "Moonshot", providerType: "moonshot", + sorted: 9, }, })), ] as const; diff --git a/app/utils/model.ts b/app/utils/model.ts index 4de0eb8d9..0b62b53be 100644 --- a/app/utils/model.ts +++ b/app/utils/model.ts @@ -1,12 +1,42 @@ import { DEFAULT_MODELS } from "../constant"; import { LLMModel } from "../client/api"; +const CustomSeq = { + val: -1000, //To ensure the custom model located at front, start from -1000, refer to constant.ts + cache: new Map(), + next: (id: string) => { + if (CustomSeq.cache.has(id)) { + return CustomSeq.cache.get(id) as number; + } else { + let seq = CustomSeq.val++; + CustomSeq.cache.set(id, seq); + return seq; + } + }, +}; + const customProvider = (providerName: string) => ({ id: providerName.toLowerCase(), providerName: providerName, providerType: "custom", + sorted: CustomSeq.next(providerName), }); +/** + * Sorts an array of models based on specified rules. + * + * First, sorted by provider; if the same, sorted by model + */ +const sortModelTable = (models: ReturnType) => + models.sort((a, b) => { + if (a.provider && b.provider) { + let cmp = a.provider.sorted - b.provider.sorted; + return cmp === 0 ? a.sorted - b.sorted : cmp; + } else { + return a.sorted - b.sorted; + } + }); + export function collectModelTable( models: readonly LLMModel[], customModels: string, @@ -17,6 +47,7 @@ export function collectModelTable( available: boolean; name: string; displayName: string; + sorted: number; provider?: LLMModel["provider"]; // Marked as optional isDefault?: boolean; } @@ -84,6 +115,7 @@ export function collectModelTable( displayName: displayName || customModelName, available, provider, // Use optional chaining + sorted: CustomSeq.next(`${customModelName}@${provider?.id}`), }; } } @@ -99,13 +131,16 @@ export function collectModelTableWithDefaultModel( ) { let modelTable = collectModelTable(models, customModels); if (defaultModel && defaultModel !== "") { - if (defaultModel.includes('@')) { + if (defaultModel.includes("@")) { if (defaultModel in modelTable) { modelTable[defaultModel].isDefault = true; } } else { for (const key of Object.keys(modelTable)) { - if (modelTable[key].available && key.split('@').shift() == defaultModel) { + if ( + modelTable[key].available && + key.split("@").shift() == defaultModel + ) { modelTable[key].isDefault = true; break; } @@ -123,7 +158,9 @@ export function collectModels( customModels: string, ) { const modelTable = collectModelTable(models, customModels); - const allModels = Object.values(modelTable); + let allModels = Object.values(modelTable); + + allModels = sortModelTable(allModels); return allModels; } @@ -138,7 +175,10 @@ export function collectModelsWithDefaultModel( customModels, defaultModel, ); - const allModels = Object.values(modelTable); + let allModels = Object.values(modelTable); + + allModels = sortModelTable(allModels); + return allModels; }