mirror of
https://github.com/stackblitz/bolt.new
synced 2025-02-06 04:48:04 +00:00
Merge remote-tracking branch 'upstream/main' into linting
This commit is contained in:
commit
4589014bda
@ -10,9 +10,12 @@ import { ollama } from 'ollama-ai-provider';
|
|||||||
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
|
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
|
||||||
import { createMistral } from '@ai-sdk/mistral';
|
import { createMistral } from '@ai-sdk/mistral';
|
||||||
import { createCohere } from '@ai-sdk/cohere';
|
import { createCohere } from '@ai-sdk/cohere';
|
||||||
|
import type { LanguageModelV1 } from 'ai';
|
||||||
|
|
||||||
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
|
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
|
||||||
|
|
||||||
|
type OptionalApiKey = string | undefined;
|
||||||
|
|
||||||
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
|
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
|
||||||
const anthropic = createAnthropic({
|
const anthropic = createAnthropic({
|
||||||
apiKey,
|
apiKey,
|
||||||
@ -20,9 +23,6 @@ export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
|
|||||||
|
|
||||||
return anthropic(model);
|
return anthropic(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
type OptionalApiKey = string | undefined;
|
|
||||||
|
|
||||||
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
|
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
|
||||||
const openai = createOpenAI({
|
const openai = createOpenAI({
|
||||||
baseURL,
|
baseURL,
|
||||||
@ -85,7 +85,7 @@ export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
|
|||||||
export function getOllamaModel(baseURL: string, model: string) {
|
export function getOllamaModel(baseURL: string, model: string) {
|
||||||
const ollamaInstance = ollama(model, {
|
const ollamaInstance = ollama(model, {
|
||||||
numCtx: DEFAULT_NUM_CTX,
|
numCtx: DEFAULT_NUM_CTX,
|
||||||
});
|
}) as LanguageModelV1 & { config: any };
|
||||||
|
|
||||||
ollamaInstance.config.baseURL = `${baseURL}/api`;
|
ollamaInstance.config.baseURL = `${baseURL}/api`;
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
/*
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
* @ts-nocheck
|
// @ts-nocheck – TODO: Provider proper types
|
||||||
* Preventing TS checks with files presented in the video for a better presentation.
|
|
||||||
*/
|
|
||||||
import { streamText as _streamText, convertToCoreMessages } from 'ai';
|
import { streamText as _streamText, convertToCoreMessages } from 'ai';
|
||||||
import { getModel } from '~/lib/.server/llm/model';
|
import { getModel } from '~/lib/.server/llm/model';
|
||||||
import { MAX_TOKENS } from './constants';
|
import { MAX_TOKENS } from './constants';
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
/*
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
* @ts-nocheck
|
// @ts-nocheck – TODO: Provider proper types
|
||||||
* Preventing TS checks with files presented in the video for a better presentation.
|
|
||||||
*/
|
|
||||||
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
||||||
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
|
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
|
||||||
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
|
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
|
||||||
|
@ -167,6 +167,48 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
|||||||
provider: 'HuggingFace',
|
provider: 'HuggingFace',
|
||||||
maxTokenAllowed: 8000,
|
maxTokenAllowed: 8000,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
|
||||||
|
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
|
||||||
|
provider: 'HuggingFace',
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Qwen/Qwen2.5-72B-Instruct',
|
||||||
|
label: 'Qwen2.5-72B-Instruct (HuggingFace)',
|
||||||
|
provider: 'HuggingFace',
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'meta-llama/Llama-3.1-70B-Instruct',
|
||||||
|
label: 'Llama-3.1-70B-Instruct (HuggingFace)',
|
||||||
|
provider: 'HuggingFace',
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'meta-llama/Llama-3.1-405B',
|
||||||
|
label: 'Llama-3.1-405B (HuggingFace)',
|
||||||
|
provider: 'HuggingFace',
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: '01-ai/Yi-1.5-34B-Chat',
|
||||||
|
label: 'Yi-1.5-34B-Chat (HuggingFace)',
|
||||||
|
provider: 'HuggingFace',
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'codellama/CodeLlama-34b-Instruct-hf',
|
||||||
|
label: 'CodeLlama-34b-Instruct (HuggingFace)',
|
||||||
|
provider: 'HuggingFace',
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
|
||||||
|
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
|
||||||
|
provider: 'HuggingFace',
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
getApiKeyLink: 'https://huggingface.co/settings/tokens',
|
getApiKeyLink: 'https://huggingface.co/settings/tokens',
|
||||||
},
|
},
|
||||||
|
@ -11,7 +11,7 @@ interface Logger {
|
|||||||
setLevel: (level: DebugLevel) => void;
|
setLevel: (level: DebugLevel) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info';
|
let currentLevel: DebugLevel = import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV ? 'debug' : 'info';
|
||||||
|
|
||||||
const isWorker = 'HTMLRewriter' in globalThis;
|
const isWorker = 'HTMLRewriter' in globalThis;
|
||||||
const supportsColor = !isWorker;
|
const supportsColor = !isWorker;
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
"test": "vitest --run",
|
"test": "vitest --run",
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest",
|
||||||
"lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app",
|
"lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app",
|
||||||
"lint:fix": "pnpm run lint -- --fix",
|
"lint:fix": "npm run lint -- --fix && prettier app --write",
|
||||||
"start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
|
"start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
|
||||||
"dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
|
"dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
|
||||||
"dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",
|
"dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",
|
||||||
|
Loading…
Reference in New Issue
Block a user