mirror of
https://github.com/open-webui/open-webui
synced 2025-03-19 19:48:14 +00:00
💄 Limit the size and number of uploaded files
💄 Limit the size and number of uploaded files
This commit is contained in:
parent
7fa9f381e1
commit
b6da4baa97
@ -95,6 +95,8 @@ from config import (
|
|||||||
TIKA_SERVER_URL,
|
TIKA_SERVER_URL,
|
||||||
RAG_TOP_K,
|
RAG_TOP_K,
|
||||||
RAG_RELEVANCE_THRESHOLD,
|
RAG_RELEVANCE_THRESHOLD,
|
||||||
|
RAG_MAX_FILE_SIZE,
|
||||||
|
RAG_MAX_FILE_COUNT,
|
||||||
RAG_EMBEDDING_ENGINE,
|
RAG_EMBEDDING_ENGINE,
|
||||||
RAG_EMBEDDING_MODEL,
|
RAG_EMBEDDING_MODEL,
|
||||||
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
||||||
@ -143,6 +145,8 @@ app.state.config = AppConfig()
|
|||||||
|
|
||||||
app.state.config.TOP_K = RAG_TOP_K
|
app.state.config.TOP_K = RAG_TOP_K
|
||||||
app.state.config.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD
|
app.state.config.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD
|
||||||
|
app.state.config.MAX_FILE_SIZE = RAG_MAX_FILE_SIZE
|
||||||
|
app.state.config.MAX_FILE_COUNT = RAG_MAX_FILE_COUNT
|
||||||
|
|
||||||
app.state.config.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH
|
app.state.config.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH
|
||||||
app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
|
app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
|
||||||
@ -567,6 +571,8 @@ async def get_query_settings(user=Depends(get_admin_user)):
|
|||||||
"template": app.state.config.RAG_TEMPLATE,
|
"template": app.state.config.RAG_TEMPLATE,
|
||||||
"k": app.state.config.TOP_K,
|
"k": app.state.config.TOP_K,
|
||||||
"r": app.state.config.RELEVANCE_THRESHOLD,
|
"r": app.state.config.RELEVANCE_THRESHOLD,
|
||||||
|
"max_file_size": app.state.config.MAX_FILE_SIZE,
|
||||||
|
"max_file_count": app.state.config.MAX_FILE_COUNT,
|
||||||
"hybrid": app.state.config.ENABLE_RAG_HYBRID_SEARCH,
|
"hybrid": app.state.config.ENABLE_RAG_HYBRID_SEARCH,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -574,6 +580,8 @@ async def get_query_settings(user=Depends(get_admin_user)):
|
|||||||
class QuerySettingsForm(BaseModel):
|
class QuerySettingsForm(BaseModel):
|
||||||
k: Optional[int] = None
|
k: Optional[int] = None
|
||||||
r: Optional[float] = None
|
r: Optional[float] = None
|
||||||
|
max_file_size: Optional[int] = None
|
||||||
|
max_file_count: Optional[int] = None
|
||||||
template: Optional[str] = None
|
template: Optional[str] = None
|
||||||
hybrid: Optional[bool] = None
|
hybrid: Optional[bool] = None
|
||||||
|
|
||||||
@ -590,11 +598,20 @@ async def update_query_settings(
|
|||||||
app.state.config.ENABLE_RAG_HYBRID_SEARCH = (
|
app.state.config.ENABLE_RAG_HYBRID_SEARCH = (
|
||||||
form_data.hybrid if form_data.hybrid else False
|
form_data.hybrid if form_data.hybrid else False
|
||||||
)
|
)
|
||||||
|
app.state.config.MAX_FILE_SIZE = (
|
||||||
|
form_data.max_file_size if form_data.max_file_size else 10
|
||||||
|
)
|
||||||
|
app.state.config.MAX_FILE_COUNT = (
|
||||||
|
form_data.max_file_count if form_data.max_file_count else 5
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"status": True,
|
"status": True,
|
||||||
"template": app.state.config.RAG_TEMPLATE,
|
"template": app.state.config.RAG_TEMPLATE,
|
||||||
"k": app.state.config.TOP_K,
|
"k": app.state.config.TOP_K,
|
||||||
"r": app.state.config.RELEVANCE_THRESHOLD,
|
"r": app.state.config.RELEVANCE_THRESHOLD,
|
||||||
|
"max_file_size": app.state.config.MAX_FILE_SIZE,
|
||||||
|
"max_file_count": app.state.config.MAX_FILE_COUNT,
|
||||||
"hybrid": app.state.config.ENABLE_RAG_HYBRID_SEARCH,
|
"hybrid": app.state.config.ENABLE_RAG_HYBRID_SEARCH,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1005,6 +1005,18 @@ ENABLE_RAG_HYBRID_SEARCH = PersistentConfig(
|
|||||||
os.environ.get("ENABLE_RAG_HYBRID_SEARCH", "").lower() == "true",
|
os.environ.get("ENABLE_RAG_HYBRID_SEARCH", "").lower() == "true",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
RAG_MAX_FILE_COUNT = PersistentConfig(
|
||||||
|
"RAG_MAX_FILE_COUNT",
|
||||||
|
"rag.max_file_count",
|
||||||
|
int(os.environ.get("RAG_MAX_FILE_COUNT", "5")),
|
||||||
|
)
|
||||||
|
|
||||||
|
RAG_MAX_FILE_SIZE = PersistentConfig(
|
||||||
|
"RAG_MAX_FILE_SIZE",
|
||||||
|
"rag.max_file_size",
|
||||||
|
int(os.environ.get("RAG_MAX_FILE_SIZE", "10")),
|
||||||
|
)
|
||||||
|
|
||||||
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = PersistentConfig(
|
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = PersistentConfig(
|
||||||
"ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION",
|
"ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION",
|
||||||
"rag.enable_web_loader_ssl_verification",
|
"rag.enable_web_loader_ssl_verification",
|
||||||
|
@ -137,6 +137,8 @@ export const getQuerySettings = async (token: string) => {
|
|||||||
type QuerySettings = {
|
type QuerySettings = {
|
||||||
k: number | null;
|
k: number | null;
|
||||||
r: number | null;
|
r: number | null;
|
||||||
|
max_file_size: number | null;
|
||||||
|
max_file_count: number | null;
|
||||||
template: string | null;
|
template: string | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -53,6 +53,8 @@
|
|||||||
template: '',
|
template: '',
|
||||||
r: 0.0,
|
r: 0.0,
|
||||||
k: 4,
|
k: 4,
|
||||||
|
max_file_size: 10,
|
||||||
|
max_file_count: 5,
|
||||||
hybrid: false
|
hybrid: false
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -386,6 +388,41 @@
|
|||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
<div class=" my-2 flex gap-1.5">
|
||||||
|
<div class=" w-full justify-between">
|
||||||
|
<div class="self-center text-xs font-medium min-w-fit mb-1">
|
||||||
|
{$i18n.t('Max File Count')}
|
||||||
|
</div>
|
||||||
|
<div class="self-center">
|
||||||
|
<input
|
||||||
|
class=" w-full rounded-lg py-1.5 px-4 text-sm bg-gray-50 dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||||
|
type="number"
|
||||||
|
placeholder={$i18n.t('Enter Max File Count')}
|
||||||
|
bind:value={querySettings.max_file_count}
|
||||||
|
autocomplete="off"
|
||||||
|
min="0"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="w-full">
|
||||||
|
<div class=" self-center text-xs font-medium min-w-fit mb-1">
|
||||||
|
{$i18n.t('Max File Size(MB)')}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="self-center">
|
||||||
|
<input
|
||||||
|
class="w-full rounded-lg py-1.5 px-4 text-sm bg-gray-50 dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||||
|
type="number"
|
||||||
|
placeholder={$i18n.t('Enter Max File Size(MB)')}
|
||||||
|
bind:value={querySettings.max_file_size}
|
||||||
|
autocomplete="off"
|
||||||
|
min="0"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class=" flex w-full justify-between">
|
<div class=" flex w-full justify-between">
|
||||||
<div class=" self-center text-xs font-medium">{$i18n.t('Hybrid Search')}</div>
|
<div class=" self-center text-xs font-medium">{$i18n.t('Hybrid Search')}</div>
|
||||||
|
|
||||||
|
@ -15,8 +15,16 @@
|
|||||||
user as _user
|
user as _user
|
||||||
} from '$lib/stores';
|
} from '$lib/stores';
|
||||||
import { blobToFile, findWordIndices } from '$lib/utils';
|
import { blobToFile, findWordIndices } from '$lib/utils';
|
||||||
import { processDocToVectorDB } from '$lib/apis/rag';
|
|
||||||
import { transcribeAudio } from '$lib/apis/audio';
|
import { transcribeAudio } from '$lib/apis/audio';
|
||||||
|
|
||||||
|
import {
|
||||||
|
getQuerySettings,
|
||||||
|
processDocToVectorDB,
|
||||||
|
uploadDocToVectorDB,
|
||||||
|
uploadWebToVectorDB,
|
||||||
|
uploadYoutubeTranscriptionToVectorDB
|
||||||
|
} from '$lib/apis/rag';
|
||||||
|
|
||||||
import { uploadFile } from '$lib/apis/files';
|
import { uploadFile } from '$lib/apis/files';
|
||||||
import {
|
import {
|
||||||
SUPPORTED_FILE_TYPE,
|
SUPPORTED_FILE_TYPE,
|
||||||
@ -54,6 +62,7 @@
|
|||||||
let commandsElement;
|
let commandsElement;
|
||||||
|
|
||||||
let inputFiles;
|
let inputFiles;
|
||||||
|
let querySettings;
|
||||||
let dragged = false;
|
let dragged = false;
|
||||||
|
|
||||||
let user = null;
|
let user = null;
|
||||||
@ -169,7 +178,67 @@
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const processFileCountLimit = async (querySettings, inputFiles) => {
|
||||||
|
const maxFiles = querySettings.max_file_count;
|
||||||
|
const currentFilesCount = files.length;
|
||||||
|
const inputFilesCount = inputFiles.length;
|
||||||
|
const totalFilesCount = currentFilesCount + inputFilesCount;
|
||||||
|
|
||||||
|
if (currentFilesCount >= maxFiles || totalFilesCount > maxFiles) {
|
||||||
|
toast.error(
|
||||||
|
$i18n.t('File count exceeds the limit of {{size}}', {
|
||||||
|
count: maxFiles
|
||||||
|
})
|
||||||
|
);
|
||||||
|
if (currentFilesCount >= maxFiles) {
|
||||||
|
return [false, null];
|
||||||
|
}
|
||||||
|
if (totalFilesCount > maxFiles) {
|
||||||
|
inputFiles = inputFiles.slice(0, maxFiles - currentFilesCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [true, inputFiles];
|
||||||
|
};
|
||||||
|
|
||||||
|
const processFileSizeLimit = async (querySettings, file) => {
|
||||||
|
if (file.size <= querySettings.max_file_size * 1024 * 1024) {
|
||||||
|
if (['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])) {
|
||||||
|
if (visionCapableModels.length === 0) {
|
||||||
|
toast.error($i18n.t('Selected model(s) do not support image inputs'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let reader = new FileReader();
|
||||||
|
reader.onload = (event) => {
|
||||||
|
files = [
|
||||||
|
...files,
|
||||||
|
{
|
||||||
|
type: 'image',
|
||||||
|
url: `${event.target.result}`
|
||||||
|
}
|
||||||
|
];
|
||||||
|
};
|
||||||
|
reader.readAsDataURL(file);
|
||||||
|
} else {
|
||||||
|
uploadFileHandler(file);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toast.error(
|
||||||
|
$i18n.t('File size exceeds the limit of {{size}}MB', {
|
||||||
|
size: querySettings.max_file_size
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
|
const initializeSettings = async () => {
|
||||||
|
try {
|
||||||
|
querySettings = await getQuerySettings(localStorage.token);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching query settings:', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
initializeSettings();
|
||||||
window.setTimeout(() => chatTextAreaElement?.focus(), 0);
|
window.setTimeout(() => chatTextAreaElement?.focus(), 0);
|
||||||
|
|
||||||
const dropZone = document.querySelector('body');
|
const dropZone = document.querySelector('body');
|
||||||
@ -198,27 +267,19 @@
|
|||||||
const inputFiles = Array.from(e.dataTransfer?.files);
|
const inputFiles = Array.from(e.dataTransfer?.files);
|
||||||
|
|
||||||
if (inputFiles && inputFiles.length > 0) {
|
if (inputFiles && inputFiles.length > 0) {
|
||||||
inputFiles.forEach((file) => {
|
console.log(inputFiles);
|
||||||
|
const [canProcess, filesToProcess] = await processFileCountLimit(
|
||||||
|
querySettings,
|
||||||
|
inputFiles
|
||||||
|
);
|
||||||
|
if (!canProcess) {
|
||||||
|
dragged = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.log(filesToProcess);
|
||||||
|
filesToProcess.forEach((file) => {
|
||||||
console.log(file, file.name.split('.').at(-1));
|
console.log(file, file.name.split('.').at(-1));
|
||||||
if (['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])) {
|
processFileSizeLimit(querySettings, file);
|
||||||
if (visionCapableModels.length === 0) {
|
|
||||||
toast.error($i18n.t('Selected model(s) do not support image inputs'));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let reader = new FileReader();
|
|
||||||
reader.onload = (event) => {
|
|
||||||
files = [
|
|
||||||
...files,
|
|
||||||
{
|
|
||||||
type: 'image',
|
|
||||||
url: `${event.target.result}`
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
reader.readAsDataURL(file);
|
|
||||||
} else {
|
|
||||||
uploadFileHandler(file);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
toast.error($i18n.t(`File not found.`));
|
toast.error($i18n.t(`File not found.`));
|
||||||
@ -341,26 +402,19 @@
|
|||||||
on:change={async () => {
|
on:change={async () => {
|
||||||
if (inputFiles && inputFiles.length > 0) {
|
if (inputFiles && inputFiles.length > 0) {
|
||||||
const _inputFiles = Array.from(inputFiles);
|
const _inputFiles = Array.from(inputFiles);
|
||||||
_inputFiles.forEach((file) => {
|
console.log(_inputFiles);
|
||||||
if (['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])) {
|
const [canProcess, filesToProcess] = await processFileCountLimit(
|
||||||
if (visionCapableModels.length === 0) {
|
querySettings,
|
||||||
toast.error($i18n.t('Selected model(s) do not support image inputs'));
|
_inputFiles
|
||||||
return;
|
);
|
||||||
}
|
if (!canProcess) {
|
||||||
let reader = new FileReader();
|
filesInputElement.value = '';
|
||||||
reader.onload = (event) => {
|
return;
|
||||||
files = [
|
}
|
||||||
...files,
|
console.log(filesToProcess);
|
||||||
{
|
filesToProcess.forEach((file) => {
|
||||||
type: 'image',
|
console.log(file, file.name.split('.').at(-1));
|
||||||
url: `${event.target.result}`
|
processFileSizeLimit(querySettings, file);
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
reader.readAsDataURL(file);
|
|
||||||
} else {
|
|
||||||
uploadFileHandler(file);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
toast.error($i18n.t(`File not found.`));
|
toast.error($i18n.t(`File not found.`));
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
import { createNewDoc, deleteDocByName, getDocs } from '$lib/apis/documents';
|
import { createNewDoc, deleteDocByName, getDocs } from '$lib/apis/documents';
|
||||||
|
|
||||||
import { SUPPORTED_FILE_TYPE, SUPPORTED_FILE_EXTENSIONS } from '$lib/constants';
|
import { SUPPORTED_FILE_TYPE, SUPPORTED_FILE_EXTENSIONS } from '$lib/constants';
|
||||||
import { processDocToVectorDB, uploadDocToVectorDB } from '$lib/apis/rag';
|
import { getQuerySettings, processDocToVectorDB, uploadDocToVectorDB } from '$lib/apis/rag';
|
||||||
import { blobToFile, transformFileName } from '$lib/utils';
|
import { blobToFile, transformFileName } from '$lib/utils';
|
||||||
|
|
||||||
import Checkbox from '$lib/components/common/Checkbox.svelte';
|
import Checkbox from '$lib/components/common/Checkbox.svelte';
|
||||||
@ -24,6 +24,7 @@
|
|||||||
let importFiles = '';
|
let importFiles = '';
|
||||||
|
|
||||||
let inputFiles = '';
|
let inputFiles = '';
|
||||||
|
let querySettings;
|
||||||
let query = '';
|
let query = '';
|
||||||
let documentsImportInputElement: HTMLInputElement;
|
let documentsImportInputElement: HTMLInputElement;
|
||||||
let tags = [];
|
let tags = [];
|
||||||
@ -98,7 +99,17 @@
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const initializeSettings = async () => {
|
||||||
|
try {
|
||||||
|
querySettings = await getQuerySettings(localStorage.token);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching query settings:', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
|
initializeSettings();
|
||||||
|
|
||||||
documents.subscribe((docs) => {
|
documents.subscribe((docs) => {
|
||||||
tags = docs.reduce((a, e, i, arr) => {
|
tags = docs.reduce((a, e, i, arr) => {
|
||||||
return [...new Set([...a, ...(e?.content?.tags ?? []).map((tag) => tag.name)])];
|
return [...new Set([...a, ...(e?.content?.tags ?? []).map((tag) => tag.name)])];
|
||||||
@ -136,16 +147,24 @@
|
|||||||
if (inputFiles && inputFiles.length > 0) {
|
if (inputFiles && inputFiles.length > 0) {
|
||||||
for (const file of inputFiles) {
|
for (const file of inputFiles) {
|
||||||
console.log(file, file.name.split('.').at(-1));
|
console.log(file, file.name.split('.').at(-1));
|
||||||
if (
|
if (file.size <= querySettings.max_file_size * 1024 * 1024) {
|
||||||
SUPPORTED_FILE_TYPE.includes(file['type']) ||
|
if (
|
||||||
SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1))
|
SUPPORTED_FILE_TYPE.includes(file['type']) ||
|
||||||
) {
|
SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1))
|
||||||
uploadDoc(file);
|
) {
|
||||||
|
uploadDoc(file);
|
||||||
|
} else {
|
||||||
|
toast.error(
|
||||||
|
`Unknown File Type '${file['type']}', but accepting and treating as plain text`
|
||||||
|
);
|
||||||
|
uploadDoc(file);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
toast.error(
|
toast.error(
|
||||||
`Unknown File Type '${file['type']}', but accepting and treating as plain text`
|
$i18n.t('File size exceeds the limit of {{size}}MB', {
|
||||||
|
size: querySettings.max_file_size
|
||||||
|
})
|
||||||
);
|
);
|
||||||
uploadDoc(file);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "وضع الملف",
|
"File Mode": "وضع الملف",
|
||||||
"File not found.": "لم يتم العثور على الملف.",
|
"File not found.": "لم يتم العثور على الملف.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama إدارة موديلات ",
|
"Manage Ollama Models": "Ollama إدارة موديلات ",
|
||||||
"Manage Pipelines": "إدارة خطوط الأنابيب",
|
"Manage Pipelines": "إدارة خطوط الأنابيب",
|
||||||
"March": "مارس",
|
"March": "مارس",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "ماكس توكنز (num_predict)",
|
"Max Tokens (num_predict)": "ماكس توكنز (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "يمكن تنزيل 3 نماذج كحد أقصى في وقت واحد. الرجاء معاودة المحاولة في وقت لاحق.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "يمكن تنزيل 3 نماذج كحد أقصى في وقت واحد. الرجاء معاودة المحاولة في وقت لاحق.",
|
||||||
"May": "مايو",
|
"May": "مايو",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Файл Мод",
|
"File Mode": "Файл Мод",
|
||||||
"File not found.": "Файл не е намерен.",
|
"File not found.": "Файл не е намерен.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Управление на Ollama Моделите",
|
"Manage Ollama Models": "Управление на Ollama Моделите",
|
||||||
"Manage Pipelines": "Управление на тръбопроводи",
|
"Manage Pipelines": "Управление на тръбопроводи",
|
||||||
"March": "Март",
|
"March": "Март",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Макс токени (num_predict)",
|
"Max Tokens (num_predict)": "Макс токени (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Максимум 3 модели могат да бъдат сваляни едновременно. Моля, опитайте отново по-късно.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Максимум 3 модели могат да бъдат сваляни едновременно. Моля, опитайте отново по-късно.",
|
||||||
"May": "Май",
|
"May": "Май",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "ফাইল মোড",
|
"File Mode": "ফাইল মোড",
|
||||||
"File not found.": "ফাইল পাওয়া যায়নি",
|
"File not found.": "ফাইল পাওয়া যায়নি",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama মডেলসূহ ব্যবস্থাপনা করুন",
|
"Manage Ollama Models": "Ollama মডেলসূহ ব্যবস্থাপনা করুন",
|
||||||
"Manage Pipelines": "পাইপলাইন পরিচালনা করুন",
|
"Manage Pipelines": "পাইপলাইন পরিচালনা করুন",
|
||||||
"March": "মার্চ",
|
"March": "মার্চ",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "সর্বোচ্চ টোকেন (num_predict)",
|
"Max Tokens (num_predict)": "সর্বোচ্চ টোকেন (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "একসঙ্গে সর্বোচ্চ তিনটি মডেল ডাউনলোড করা যায়। দয়া করে পরে আবার চেষ্টা করুন।",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "একসঙ্গে সর্বোচ্চ তিনটি মডেল ডাউনলোড করা যায়। দয়া করে পরে আবার চেষ্টা করুন।",
|
||||||
"May": "মে",
|
"May": "মে",
|
||||||
|
@ -374,6 +374,8 @@
|
|||||||
"Manage Ollama Models": "Gestionar els models Ollama",
|
"Manage Ollama Models": "Gestionar els models Ollama",
|
||||||
"Manage Pipelines": "Gestionar les Pipelines",
|
"Manage Pipelines": "Gestionar les Pipelines",
|
||||||
"March": "Març",
|
"March": "Març",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Nombre màxim de Tokens (num_predict)",
|
"Max Tokens (num_predict)": "Nombre màxim de Tokens (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Es poden descarregar un màxim de 3 models simultàniament. Si us plau, prova-ho més tard.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Es poden descarregar un màxim de 3 models simultàniament. Si us plau, prova-ho més tard.",
|
||||||
"May": "Maig",
|
"May": "Maig",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "File mode",
|
"File Mode": "File mode",
|
||||||
"File not found.": "Wala makit-an ang file.",
|
"File not found.": "Wala makit-an ang file.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Pagdumala sa mga modelo sa Ollama",
|
"Manage Ollama Models": "Pagdumala sa mga modelo sa Ollama",
|
||||||
"Manage Pipelines": "",
|
"Manage Pipelines": "",
|
||||||
"March": "",
|
"March": "",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "",
|
"Max Tokens (num_predict)": "",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Ang labing taas nga 3 nga mga disenyo mahimong ma-download nga dungan. ",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Ang labing taas nga 3 nga mga disenyo mahimong ma-download nga dungan. ",
|
||||||
"May": "",
|
"May": "",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Datei",
|
"File": "Datei",
|
||||||
"File Mode": "Datei-Modus",
|
"File Mode": "Datei-Modus",
|
||||||
"File not found.": "Datei nicht gefunden.",
|
"File not found.": "Datei nicht gefunden.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "Filter ist jetzt global deaktiviert",
|
"Filter is now globally disabled": "Filter ist jetzt global deaktiviert",
|
||||||
"Filter is now globally enabled": "Filter ist jetzt global aktiviert",
|
"Filter is now globally enabled": "Filter ist jetzt global aktiviert",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama-Modelle verwalten",
|
"Manage Ollama Models": "Ollama-Modelle verwalten",
|
||||||
"Manage Pipelines": "Pipelines verwalten",
|
"Manage Pipelines": "Pipelines verwalten",
|
||||||
"March": "März",
|
"March": "März",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Maximale Tokenanzahl (num_predict)",
|
"Max Tokens (num_predict)": "Maximale Tokenanzahl (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Es können maximal 3 Modelle gleichzeitig heruntergeladen werden. Bitte versuchen Sie es später erneut.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Es können maximal 3 Modelle gleichzeitig heruntergeladen werden. Bitte versuchen Sie es später erneut.",
|
||||||
"May": "Mai",
|
"May": "Mai",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Bark Mode",
|
"File Mode": "Bark Mode",
|
||||||
"File not found.": "Bark not found.",
|
"File not found.": "Bark not found.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Manage Ollama Wowdels",
|
"Manage Ollama Models": "Manage Ollama Wowdels",
|
||||||
"Manage Pipelines": "",
|
"Manage Pipelines": "",
|
||||||
"March": "",
|
"March": "",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "",
|
"Max Tokens (num_predict)": "",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maximum of 3 models can be downloaded simultaneously. Please try again later.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maximum of 3 models can be downloaded simultaneously. Please try again later.",
|
||||||
"May": "",
|
"May": "",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "",
|
"File Mode": "",
|
||||||
"File not found.": "",
|
"File not found.": "",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "",
|
"Manage Ollama Models": "",
|
||||||
"Manage Pipelines": "",
|
"Manage Pipelines": "",
|
||||||
"March": "",
|
"March": "",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "",
|
"Max Tokens (num_predict)": "",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "",
|
||||||
"May": "",
|
"May": "",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "",
|
"File Mode": "",
|
||||||
"File not found.": "",
|
"File not found.": "",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "",
|
"Manage Ollama Models": "",
|
||||||
"Manage Pipelines": "",
|
"Manage Pipelines": "",
|
||||||
"March": "",
|
"March": "",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "",
|
"Max Tokens (num_predict)": "",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "",
|
||||||
"May": "",
|
"May": "",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Archivo",
|
"File": "Archivo",
|
||||||
"File Mode": "Modo de archivo",
|
"File Mode": "Modo de archivo",
|
||||||
"File not found.": "Archivo no encontrado.",
|
"File not found.": "Archivo no encontrado.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Administrar Modelos Ollama",
|
"Manage Ollama Models": "Administrar Modelos Ollama",
|
||||||
"Manage Pipelines": "Administrar Pipelines",
|
"Manage Pipelines": "Administrar Pipelines",
|
||||||
"March": "Marzo",
|
"March": "Marzo",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Máximo de fichas (num_predict)",
|
"Max Tokens (num_predict)": "Máximo de fichas (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Se pueden descargar un máximo de 3 modelos simultáneamente. Por favor, inténtelo de nuevo más tarde.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Se pueden descargar un máximo de 3 modelos simultáneamente. Por favor, inténtelo de nuevo más tarde.",
|
||||||
"May": "Mayo",
|
"May": "Mayo",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "حالت فایل",
|
"File Mode": "حالت فایل",
|
||||||
"File not found.": "فایل یافت نشد.",
|
"File not found.": "فایل یافت نشد.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "مدیریت مدل\u200cهای اولاما",
|
"Manage Ollama Models": "مدیریت مدل\u200cهای اولاما",
|
||||||
"Manage Pipelines": "مدیریت خطوط لوله",
|
"Manage Pipelines": "مدیریت خطوط لوله",
|
||||||
"March": "مارچ",
|
"March": "مارچ",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "توکنهای بیشینه (num_predict)",
|
"Max Tokens (num_predict)": "توکنهای بیشینه (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "حداکثر 3 مدل را می توان به طور همزمان دانلود کرد. لطفاً بعداً دوباره امتحان کنید.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "حداکثر 3 مدل را می توان به طور همزمان دانلود کرد. لطفاً بعداً دوباره امتحان کنید.",
|
||||||
"May": "ماهی",
|
"May": "ماهی",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Tiedostotila",
|
"File Mode": "Tiedostotila",
|
||||||
"File not found.": "Tiedostoa ei löytynyt.",
|
"File not found.": "Tiedostoa ei löytynyt.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Hallitse Ollama-malleja",
|
"Manage Ollama Models": "Hallitse Ollama-malleja",
|
||||||
"Manage Pipelines": "Hallitse putkia",
|
"Manage Pipelines": "Hallitse putkia",
|
||||||
"March": "maaliskuu",
|
"March": "maaliskuu",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Tokenien enimmäismäärä (num_predict)",
|
"Max Tokens (num_predict)": "Tokenien enimmäismäärä (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Enintään 3 mallia voidaan ladata samanaikaisesti. Yritä myöhemmin uudelleen.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Enintään 3 mallia voidaan ladata samanaikaisesti. Yritä myöhemmin uudelleen.",
|
||||||
"May": "toukokuu",
|
"May": "toukokuu",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Fichier",
|
"File": "Fichier",
|
||||||
"File Mode": "Mode fichier",
|
"File Mode": "Mode fichier",
|
||||||
"File not found.": "Fichier introuvable.",
|
"File not found.": "Fichier introuvable.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "Le filtre est maintenant désactivé globalement",
|
"Filter is now globally disabled": "Le filtre est maintenant désactivé globalement",
|
||||||
"Filter is now globally enabled": "Le filtre est désormais activé globalement",
|
"Filter is now globally enabled": "Le filtre est désormais activé globalement",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Gérer les modèles Ollama",
|
"Manage Ollama Models": "Gérer les modèles Ollama",
|
||||||
"Manage Pipelines": "Gérer les pipelines",
|
"Manage Pipelines": "Gérer les pipelines",
|
||||||
"March": "Mars",
|
"March": "Mars",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Tokens maximaux (num_predict)",
|
"Max Tokens (num_predict)": "Tokens maximaux (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Un maximum de 3 modèles peut être téléchargé en même temps. Veuillez réessayer ultérieurement.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Un maximum de 3 modèles peut être téléchargé en même temps. Veuillez réessayer ultérieurement.",
|
||||||
"May": "Mai",
|
"May": "Mai",
|
||||||
|
@ -374,6 +374,8 @@
|
|||||||
"Manage Ollama Models": "Gérer les modèles Ollama",
|
"Manage Ollama Models": "Gérer les modèles Ollama",
|
||||||
"Manage Pipelines": "Gérer les pipelines",
|
"Manage Pipelines": "Gérer les pipelines",
|
||||||
"March": "Mars",
|
"March": "Mars",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Tokens maximaux (num_predict)",
|
"Max Tokens (num_predict)": "Tokens maximaux (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Un maximum de 3 modèles peut être téléchargé en même temps. Veuillez réessayer ultérieurement.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Un maximum de 3 modèles peut être téléchargé en même temps. Veuillez réessayer ultérieurement.",
|
||||||
"May": "Mai",
|
"May": "Mai",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "מצב קובץ",
|
"File Mode": "מצב קובץ",
|
||||||
"File not found.": "הקובץ לא נמצא.",
|
"File not found.": "הקובץ לא נמצא.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "נהל מודלים של Ollama",
|
"Manage Ollama Models": "נהל מודלים של Ollama",
|
||||||
"Manage Pipelines": "ניהול צינורות",
|
"Manage Pipelines": "ניהול צינורות",
|
||||||
"March": "מרץ",
|
"March": "מרץ",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "מקסימום אסימונים (num_predict)",
|
"Max Tokens (num_predict)": "מקסימום אסימונים (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "ניתן להוריד מקסימום 3 מודלים בו זמנית. אנא נסה שוב מאוחר יותר.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "ניתן להוריד מקסימום 3 מודלים בו זמנית. אנא נסה שוב מאוחר יותר.",
|
||||||
"May": "מאי",
|
"May": "מאי",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "फ़ाइल मोड",
|
"File Mode": "फ़ाइल मोड",
|
||||||
"File not found.": "फ़ाइल प्राप्त नहीं हुई।",
|
"File not found.": "फ़ाइल प्राप्त नहीं हुई।",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama मॉडल प्रबंधित करें",
|
"Manage Ollama Models": "Ollama मॉडल प्रबंधित करें",
|
||||||
"Manage Pipelines": "पाइपलाइनों का प्रबंधन करें",
|
"Manage Pipelines": "पाइपलाइनों का प्रबंधन करें",
|
||||||
"March": "मार्च",
|
"March": "मार्च",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "अधिकतम टोकन (num_predict)",
|
"Max Tokens (num_predict)": "अधिकतम टोकन (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "अधिकतम 3 मॉडल एक साथ डाउनलोड किये जा सकते हैं। कृपया बाद में पुन: प्रयास करें।",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "अधिकतम 3 मॉडल एक साथ डाउनलोड किये जा सकते हैं। कृपया बाद में पुन: प्रयास करें।",
|
||||||
"May": "मेई",
|
"May": "मेई",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Način datoteke",
|
"File Mode": "Način datoteke",
|
||||||
"File not found.": "Datoteka nije pronađena.",
|
"File not found.": "Datoteka nije pronađena.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Upravljanje Ollama modelima",
|
"Manage Ollama Models": "Upravljanje Ollama modelima",
|
||||||
"Manage Pipelines": "Upravljanje cjevovodima",
|
"Manage Pipelines": "Upravljanje cjevovodima",
|
||||||
"March": "Ožujak",
|
"March": "Ožujak",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Maksimalan broj tokena (num_predict)",
|
"Max Tokens (num_predict)": "Maksimalan broj tokena (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksimalno 3 modela se mogu preuzeti istovremeno. Pokušajte ponovo kasnije.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksimalno 3 modela se mogu preuzeti istovremeno. Pokušajte ponovo kasnije.",
|
||||||
"May": "Svibanj",
|
"May": "Svibanj",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Berkas",
|
"File": "Berkas",
|
||||||
"File Mode": "Mode File",
|
"File Mode": "Mode File",
|
||||||
"File not found.": "File tidak ditemukan.",
|
"File not found.": "File tidak ditemukan.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "Filter sekarang dinonaktifkan secara global",
|
"Filter is now globally disabled": "Filter sekarang dinonaktifkan secara global",
|
||||||
"Filter is now globally enabled": "Filter sekarang diaktifkan secara global",
|
"Filter is now globally enabled": "Filter sekarang diaktifkan secara global",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Mengelola Model Ollama",
|
"Manage Ollama Models": "Mengelola Model Ollama",
|
||||||
"Manage Pipelines": "Mengelola Saluran Pipa",
|
"Manage Pipelines": "Mengelola Saluran Pipa",
|
||||||
"March": "Maret",
|
"March": "Maret",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Token Maksimal (num_prediksi)",
|
"Max Tokens (num_predict)": "Token Maksimal (num_prediksi)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksimal 3 model dapat diunduh secara bersamaan. Silakan coba lagi nanti.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksimal 3 model dapat diunduh secara bersamaan. Silakan coba lagi nanti.",
|
||||||
"May": "Mei",
|
"May": "Mei",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Modalità file",
|
"File Mode": "Modalità file",
|
||||||
"File not found.": "File non trovato.",
|
"File not found.": "File non trovato.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Gestisci modelli Ollama",
|
"Manage Ollama Models": "Gestisci modelli Ollama",
|
||||||
"Manage Pipelines": "Gestire le pipeline",
|
"Manage Pipelines": "Gestire le pipeline",
|
||||||
"March": "Marzo",
|
"March": "Marzo",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Numero massimo di gettoni (num_predict)",
|
"Max Tokens (num_predict)": "Numero massimo di gettoni (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "È possibile scaricare un massimo di 3 modelli contemporaneamente. Riprova più tardi.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "È possibile scaricare un massimo di 3 modelli contemporaneamente. Riprova più tardi.",
|
||||||
"May": "Maggio",
|
"May": "Maggio",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "ファイルモード",
|
"File Mode": "ファイルモード",
|
||||||
"File not found.": "ファイルが見つかりません。",
|
"File not found.": "ファイルが見つかりません。",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama モデルを管理",
|
"Manage Ollama Models": "Ollama モデルを管理",
|
||||||
"Manage Pipelines": "パイプラインの管理",
|
"Manage Pipelines": "パイプラインの管理",
|
||||||
"March": "3月",
|
"March": "3月",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "最大トークン数 (num_predict)",
|
"Max Tokens (num_predict)": "最大トークン数 (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "同時にダウンロードできるモデルは最大 3 つです。後でもう一度お試しください。",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "同時にダウンロードできるモデルは最大 3 つです。後でもう一度お試しください。",
|
||||||
"May": "5月",
|
"May": "5月",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "ფაილური რეჟიმი",
|
"File Mode": "ფაილური რეჟიმი",
|
||||||
"File not found.": "ფაილი ვერ მოიძებნა",
|
"File not found.": "ფაილი ვერ მოიძებნა",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama მოდელების მართვა",
|
"Manage Ollama Models": "Ollama მოდელების მართვა",
|
||||||
"Manage Pipelines": "მილსადენების მართვა",
|
"Manage Pipelines": "მილსადენების მართვა",
|
||||||
"March": "მარტივი",
|
"March": "მარტივი",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "მაქს ტოკენსი (num_predict)",
|
"Max Tokens (num_predict)": "მაქს ტოკენსი (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "მაქსიმუმ 3 მოდელის ჩამოტვირთვა შესაძლებელია ერთდროულად. Გთხოვთ სცადოთ მოგვიანებით.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "მაქსიმუმ 3 მოდელის ჩამოტვირთვა შესაძლებელია ერთდროულად. Გთხოვთ სცადოთ მოგვიანებით.",
|
||||||
"May": "მაი",
|
"May": "მაი",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "파일 모드",
|
"File Mode": "파일 모드",
|
||||||
"File not found.": "파일을 찾을 수 없습니다.",
|
"File not found.": "파일을 찾을 수 없습니다.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama 모델 관리",
|
"Manage Ollama Models": "Ollama 모델 관리",
|
||||||
"Manage Pipelines": "파이프라인 관리",
|
"Manage Pipelines": "파이프라인 관리",
|
||||||
"March": "3월",
|
"March": "3월",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "최대 토큰(num_predict)",
|
"Max Tokens (num_predict)": "최대 토큰(num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "최대 3개의 모델을 동시에 다운로드할 수 있습니다. 나중에 다시 시도하세요.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "최대 3개의 모델을 동시에 다운로드할 수 있습니다. 나중에 다시 시도하세요.",
|
||||||
"May": "5월",
|
"May": "5월",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Fil",
|
"File": "Fil",
|
||||||
"File Mode": "Filmodus",
|
"File Mode": "Filmodus",
|
||||||
"File not found.": "Fil ikke funnet.",
|
"File not found.": "Fil ikke funnet.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "Filer",
|
"Files": "Filer",
|
||||||
"Filter is now globally disabled": "Filteret er nå deaktivert på systemnivå",
|
"Filter is now globally disabled": "Filteret er nå deaktivert på systemnivå",
|
||||||
"Filter is now globally enabled": "Filteret er nå aktivert på systemnivå",
|
"Filter is now globally enabled": "Filteret er nå aktivert på systemnivå",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Administrer Ollama-modeller",
|
"Manage Ollama Models": "Administrer Ollama-modeller",
|
||||||
"Manage Pipelines": "Administrer pipelines",
|
"Manage Pipelines": "Administrer pipelines",
|
||||||
"March": "mars",
|
"March": "mars",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Maks antall tokens (num_predict)",
|
"Max Tokens (num_predict)": "Maks antall tokens (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksimalt 3 modeller kan lastes ned samtidig. Vennligst prøv igjen senere.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksimalt 3 modeller kan lastes ned samtidig. Vennligst prøv igjen senere.",
|
||||||
"May": "mai",
|
"May": "mai",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Bestandsmodus",
|
"File Mode": "Bestandsmodus",
|
||||||
"File not found.": "Bestand niet gevonden.",
|
"File not found.": "Bestand niet gevonden.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Beheer Ollama Modellen",
|
"Manage Ollama Models": "Beheer Ollama Modellen",
|
||||||
"Manage Pipelines": "Pijplijnen beheren",
|
"Manage Pipelines": "Pijplijnen beheren",
|
||||||
"March": "Maart",
|
"March": "Maart",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Max Tokens (num_predict)",
|
"Max Tokens (num_predict)": "Max Tokens (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maximaal 3 modellen kunnen tegelijkertijd worden gedownload. Probeer het later opnieuw.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maximaal 3 modellen kunnen tegelijkertijd worden gedownload. Probeer het later opnieuw.",
|
||||||
"May": "Mei",
|
"May": "Mei",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "ਫਾਈਲ ਮੋਡ",
|
"File Mode": "ਫਾਈਲ ਮੋਡ",
|
||||||
"File not found.": "ਫਾਈਲ ਨਹੀਂ ਮਿਲੀ।",
|
"File not found.": "ਫਾਈਲ ਨਹੀਂ ਮਿਲੀ।",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "ਓਲਾਮਾ ਮਾਡਲਾਂ ਦਾ ਪ੍ਰਬੰਧਨ ਕਰੋ",
|
"Manage Ollama Models": "ਓਲਾਮਾ ਮਾਡਲਾਂ ਦਾ ਪ੍ਰਬੰਧਨ ਕਰੋ",
|
||||||
"Manage Pipelines": "ਪਾਈਪਲਾਈਨਾਂ ਦਾ ਪ੍ਰਬੰਧਨ ਕਰੋ",
|
"Manage Pipelines": "ਪਾਈਪਲਾਈਨਾਂ ਦਾ ਪ੍ਰਬੰਧਨ ਕਰੋ",
|
||||||
"March": "ਮਾਰਚ",
|
"March": "ਮਾਰਚ",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "ਮੈਕਸ ਟੋਕਨ (num_predict)",
|
"Max Tokens (num_predict)": "ਮੈਕਸ ਟੋਕਨ (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "ਇੱਕ ਸਮੇਂ ਵਿੱਚ ਵੱਧ ਤੋਂ ਵੱਧ 3 ਮਾਡਲ ਡਾਊਨਲੋਡ ਕੀਤੇ ਜਾ ਸਕਦੇ ਹਨ। ਕਿਰਪਾ ਕਰਕੇ ਬਾਅਦ ਵਿੱਚ ਦੁਬਾਰਾ ਕੋਸ਼ਿਸ਼ ਕਰੋ।",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "ਇੱਕ ਸਮੇਂ ਵਿੱਚ ਵੱਧ ਤੋਂ ਵੱਧ 3 ਮਾਡਲ ਡਾਊਨਲੋਡ ਕੀਤੇ ਜਾ ਸਕਦੇ ਹਨ। ਕਿਰਪਾ ਕਰਕੇ ਬਾਅਦ ਵਿੱਚ ਦੁਬਾਰਾ ਕੋਸ਼ਿਸ਼ ਕਰੋ।",
|
||||||
"May": "ਮਈ",
|
"May": "ਮਈ",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Tryb pliku",
|
"File Mode": "Tryb pliku",
|
||||||
"File not found.": "Plik nie został znaleziony.",
|
"File not found.": "Plik nie został znaleziony.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Zarządzaj modelami Ollama",
|
"Manage Ollama Models": "Zarządzaj modelami Ollama",
|
||||||
"Manage Pipelines": "Zarządzanie potokami",
|
"Manage Pipelines": "Zarządzanie potokami",
|
||||||
"March": "Marzec",
|
"March": "Marzec",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Maksymalna liczba żetonów (num_predict)",
|
"Max Tokens (num_predict)": "Maksymalna liczba żetonów (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksymalnie 3 modele można pobierać jednocześnie. Spróbuj ponownie później.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maksymalnie 3 modele można pobierać jednocześnie. Spróbuj ponownie później.",
|
||||||
"May": "Maj",
|
"May": "Maj",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Arquivo",
|
"File": "Arquivo",
|
||||||
"File Mode": "Modo de Arquivo",
|
"File Mode": "Modo de Arquivo",
|
||||||
"File not found.": "Arquivo não encontrado.",
|
"File not found.": "Arquivo não encontrado.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "Arquivos",
|
"Files": "Arquivos",
|
||||||
"Filter is now globally disabled": "O filtro está agora desativado globalmente",
|
"Filter is now globally disabled": "O filtro está agora desativado globalmente",
|
||||||
"Filter is now globally enabled": "O filtro está agora ativado globalmente",
|
"Filter is now globally enabled": "O filtro está agora ativado globalmente",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Gerenciar Modelos Ollama",
|
"Manage Ollama Models": "Gerenciar Modelos Ollama",
|
||||||
"Manage Pipelines": "Gerenciar Pipelines",
|
"Manage Pipelines": "Gerenciar Pipelines",
|
||||||
"March": "Março",
|
"March": "Março",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Máximo de Tokens (num_predict)",
|
"Max Tokens (num_predict)": "Máximo de Tokens (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Máximo de 3 modelos podem ser baixados simultaneamente. Por favor, tente novamente mais tarde.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Máximo de 3 modelos podem ser baixados simultaneamente. Por favor, tente novamente mais tarde.",
|
||||||
"May": "Maio",
|
"May": "Maio",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Modo de Ficheiro",
|
"File Mode": "Modo de Ficheiro",
|
||||||
"File not found.": "Ficheiro não encontrado.",
|
"File not found.": "Ficheiro não encontrado.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Gerir Modelos Ollama",
|
"Manage Ollama Models": "Gerir Modelos Ollama",
|
||||||
"Manage Pipelines": "Gerir pipelines",
|
"Manage Pipelines": "Gerir pipelines",
|
||||||
"March": "Março",
|
"March": "Março",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Máx Tokens (num_predict)",
|
"Max Tokens (num_predict)": "Máx Tokens (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "O máximo de 3 modelos podem ser descarregados simultaneamente. Tente novamente mais tarde.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "O máximo de 3 modelos podem ser descarregados simultaneamente. Tente novamente mais tarde.",
|
||||||
"May": "Maio",
|
"May": "Maio",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Режим датотеке",
|
"File Mode": "Режим датотеке",
|
||||||
"File not found.": "Датотека није пронађена.",
|
"File not found.": "Датотека није пронађена.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Управљај Ollama моделима",
|
"Manage Ollama Models": "Управљај Ollama моделима",
|
||||||
"Manage Pipelines": "Управљање цевоводима",
|
"Manage Pipelines": "Управљање цевоводима",
|
||||||
"March": "Март",
|
"March": "Март",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Маx Токенс (нум_предицт)",
|
"Max Tokens (num_predict)": "Маx Токенс (нум_предицт)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Највише 3 модела могу бити преузета истовремено. Покушајте поново касније.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Највише 3 модела могу бити преузета истовремено. Покушајте поново касније.",
|
||||||
"May": "Мај",
|
"May": "Мај",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "Fil-läge",
|
"File Mode": "Fil-läge",
|
||||||
"File not found.": "Fil hittades inte.",
|
"File not found.": "Fil hittades inte.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Hantera Ollama-modeller",
|
"Manage Ollama Models": "Hantera Ollama-modeller",
|
||||||
"Manage Pipelines": "Hantera rörledningar",
|
"Manage Pipelines": "Hantera rörledningar",
|
||||||
"March": "mars",
|
"March": "mars",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Maximalt antal tokens (num_predict)",
|
"Max Tokens (num_predict)": "Maximalt antal tokens (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Högst 3 modeller kan laddas ner samtidigt. Vänligen försök igen senare.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Högst 3 modeller kan laddas ner samtidigt. Vänligen försök igen senare.",
|
||||||
"May": "maj",
|
"May": "maj",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "ไฟล์",
|
"File": "ไฟล์",
|
||||||
"File Mode": "โหมดไฟล์",
|
"File Mode": "โหมดไฟล์",
|
||||||
"File not found.": "ไม่พบไฟล์",
|
"File not found.": "ไม่พบไฟล์",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "ไฟล์",
|
"Files": "ไฟล์",
|
||||||
"Filter is now globally disabled": "การกรองถูกปิดใช้งานทั่วโลกแล้ว",
|
"Filter is now globally disabled": "การกรองถูกปิดใช้งานทั่วโลกแล้ว",
|
||||||
"Filter is now globally enabled": "การกรองถูกเปิดใช้งานทั่วโลกแล้ว",
|
"Filter is now globally enabled": "การกรองถูกเปิดใช้งานทั่วโลกแล้ว",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "จัดการโมเดล Ollama",
|
"Manage Ollama Models": "จัดการโมเดล Ollama",
|
||||||
"Manage Pipelines": "จัดการไปป์ไลน์",
|
"Manage Pipelines": "จัดการไปป์ไลน์",
|
||||||
"March": "มีนาคม",
|
"March": "มีนาคม",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "โทเค็นสูงสุด (num_predict)",
|
"Max Tokens (num_predict)": "โทเค็นสูงสุด (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "สามารถดาวน์โหลดโมเดลได้สูงสุด 3 โมเดลในเวลาเดียวกัน โปรดลองอีกครั้งในภายหลัง",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "สามารถดาวน์โหลดโมเดลได้สูงสุด 3 โมเดลในเวลาเดียวกัน โปรดลองอีกครั้งในภายหลัง",
|
||||||
"May": "พฤษภาคม",
|
"May": "พฤษภาคม",
|
||||||
|
@ -324,6 +324,8 @@
|
|||||||
"Management": "Dolandyryş",
|
"Management": "Dolandyryş",
|
||||||
"Manual Input": "El bilen Girdi",
|
"Manual Input": "El bilen Girdi",
|
||||||
"March": "Mart",
|
"March": "Mart",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Mark as Read": "Okalan hökmünde belläň",
|
"Mark as Read": "Okalan hökmünde belläň",
|
||||||
"Match": "Gab",
|
"Match": "Gab",
|
||||||
"May": "Maý",
|
"May": "Maý",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "",
|
"File": "",
|
||||||
"File Mode": "",
|
"File Mode": "",
|
||||||
"File not found.": "",
|
"File not found.": "",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "",
|
"Filter is now globally disabled": "",
|
||||||
"Filter is now globally enabled": "",
|
"Filter is now globally enabled": "",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "",
|
"Manage Ollama Models": "",
|
||||||
"Manage Pipelines": "",
|
"Manage Pipelines": "",
|
||||||
"March": "",
|
"March": "",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "",
|
"Max Tokens (num_predict)": "",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "",
|
||||||
"May": "",
|
"May": "",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Dosya",
|
"File": "Dosya",
|
||||||
"File Mode": "Dosya Modu",
|
"File Mode": "Dosya Modu",
|
||||||
"File not found.": "Dosya bulunamadı.",
|
"File not found.": "Dosya bulunamadı.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "",
|
"Files": "",
|
||||||
"Filter is now globally disabled": "Filtre artık global olarak devre dışı",
|
"Filter is now globally disabled": "Filtre artık global olarak devre dışı",
|
||||||
"Filter is now globally enabled": "Filtre artık global olarak devrede",
|
"Filter is now globally enabled": "Filtre artık global olarak devrede",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Ollama Modellerini Yönet",
|
"Manage Ollama Models": "Ollama Modellerini Yönet",
|
||||||
"Manage Pipelines": "Pipelineları Yönet",
|
"Manage Pipelines": "Pipelineları Yönet",
|
||||||
"March": "Mart",
|
"March": "Mart",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Maksimum Token (num_predict)",
|
"Max Tokens (num_predict)": "Maksimum Token (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Aynı anda en fazla 3 model indirilebilir. Lütfen daha sonra tekrar deneyin.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Aynı anda en fazla 3 model indirilebilir. Lütfen daha sonra tekrar deneyin.",
|
||||||
"May": "Mayıs",
|
"May": "Mayıs",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Файл",
|
"File": "Файл",
|
||||||
"File Mode": "Файловий режим",
|
"File Mode": "Файловий режим",
|
||||||
"File not found.": "Файл не знайдено.",
|
"File not found.": "Файл не знайдено.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "Файли",
|
"Files": "Файли",
|
||||||
"Filter is now globally disabled": "Фільтр глобально вимкнено",
|
"Filter is now globally disabled": "Фільтр глобально вимкнено",
|
||||||
"Filter is now globally enabled": "Фільтр увімкнено глобально",
|
"Filter is now globally enabled": "Фільтр увімкнено глобально",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Керування моделями Ollama",
|
"Manage Ollama Models": "Керування моделями Ollama",
|
||||||
"Manage Pipelines": "Керування конвеєрами",
|
"Manage Pipelines": "Керування конвеєрами",
|
||||||
"March": "Березень",
|
"March": "Березень",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Макс токенів (num_predict)",
|
"Max Tokens (num_predict)": "Макс токенів (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Максимум 3 моделі можна завантажити одночасно. Будь ласка, спробуйте пізніше.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Максимум 3 моделі можна завантажити одночасно. Будь ласка, спробуйте пізніше.",
|
||||||
"May": "Травень",
|
"May": "Травень",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "Tệp",
|
"File": "Tệp",
|
||||||
"File Mode": "Chế độ Tệp văn bản",
|
"File Mode": "Chế độ Tệp văn bản",
|
||||||
"File not found.": "Không tìm thấy tệp.",
|
"File not found.": "Không tìm thấy tệp.",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "Tệp",
|
"Files": "Tệp",
|
||||||
"Filter is now globally disabled": "Bộ lọc hiện đã bị vô hiệu hóa trên toàn hệ thống",
|
"Filter is now globally disabled": "Bộ lọc hiện đã bị vô hiệu hóa trên toàn hệ thống",
|
||||||
"Filter is now globally enabled": "Bộ lọc hiện được kích hoạt trên toàn hệ thống",
|
"Filter is now globally enabled": "Bộ lọc hiện được kích hoạt trên toàn hệ thống",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "Quản lý mô hình với Ollama",
|
"Manage Ollama Models": "Quản lý mô hình với Ollama",
|
||||||
"Manage Pipelines": "Quản lý Pipelines",
|
"Manage Pipelines": "Quản lý Pipelines",
|
||||||
"March": "Tháng 3",
|
"March": "Tháng 3",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "Tokens tối đa (num_predict)",
|
"Max Tokens (num_predict)": "Tokens tối đa (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Tối đa 3 mô hình có thể được tải xuống cùng lúc. Vui lòng thử lại sau.",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Tối đa 3 mô hình có thể được tải xuống cùng lúc. Vui lòng thử lại sau.",
|
||||||
"May": "Tháng 5",
|
"May": "Tháng 5",
|
||||||
|
@ -285,6 +285,7 @@
|
|||||||
"File": "文件",
|
"File": "文件",
|
||||||
"File Mode": "文件模式",
|
"File Mode": "文件模式",
|
||||||
"File not found.": "文件未找到。",
|
"File not found.": "文件未找到。",
|
||||||
|
"File size exceeds the limit of {{size}}MB": "",
|
||||||
"Files": "文件",
|
"Files": "文件",
|
||||||
"Filter is now globally disabled": "过滤器已全局禁用",
|
"Filter is now globally disabled": "过滤器已全局禁用",
|
||||||
"Filter is now globally enabled": "过滤器已全局启用",
|
"Filter is now globally enabled": "过滤器已全局启用",
|
||||||
@ -374,6 +375,8 @@
|
|||||||
"Manage Ollama Models": "管理 Ollama 模型",
|
"Manage Ollama Models": "管理 Ollama 模型",
|
||||||
"Manage Pipelines": "管理 Pipeline",
|
"Manage Pipelines": "管理 Pipeline",
|
||||||
"March": "三月",
|
"March": "三月",
|
||||||
|
"Max File Count": "",
|
||||||
|
"Max File Size(MB)": "",
|
||||||
"Max Tokens (num_predict)": "最多 Token (num_predict)",
|
"Max Tokens (num_predict)": "最多 Token (num_predict)",
|
||||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "最多可以同时下载 3 个模型,请稍后重试。",
|
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "最多可以同时下载 3 个模型,请稍后重试。",
|
||||||
"May": "五月",
|
"May": "五月",
|
||||||
|
Loading…
Reference in New Issue
Block a user