mirror of
				https://github.com/open-webui/open-webui
				synced 2025-06-26 18:26:48 +00:00 
			
		
		
		
	fix: chat general
This commit is contained in:
		
							parent
							
								
									cb93038abf
								
							
						
					
					
						commit
						092884fec5
					
				@ -298,7 +298,7 @@
 | 
				
			|||||||
							id="chat-textarea"
 | 
												id="chat-textarea"
 | 
				
			||||||
							class=" dark:bg-gray-800 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled
 | 
												class=" dark:bg-gray-800 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled
 | 
				
			||||||
								? ''
 | 
													? ''
 | 
				
			||||||
								: ' pl-4'} rounded-xl resize-none"
 | 
													: ' pl-4'} rounded-xl resize-none h-[48px]"
 | 
				
			||||||
							placeholder={speechRecognitionListening ? 'Listening...' : 'Send a message'}
 | 
												placeholder={speechRecognitionListening ? 'Listening...' : 'Send a message'}
 | 
				
			||||||
							bind:value={prompt}
 | 
												bind:value={prompt}
 | 
				
			||||||
							on:keypress={(e) => {
 | 
												on:keypress={(e) => {
 | 
				
			||||||
 | 
				
			|||||||
@ -1,5 +1,6 @@
 | 
				
			|||||||
<script lang="ts">
 | 
					<script lang="ts">
 | 
				
			||||||
	import { models, showSettings, settings } from '$lib/stores';
 | 
						import { models, showSettings, settings } from '$lib/stores';
 | 
				
			||||||
 | 
						import { onMount, tick } from 'svelte';
 | 
				
			||||||
	import toast from 'svelte-french-toast';
 | 
						import toast from 'svelte-french-toast';
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	export let selectedModels = [''];
 | 
						export let selectedModels = [''];
 | 
				
			||||||
@ -15,6 +16,12 @@
 | 
				
			|||||||
		localStorage.setItem('settings', JSON.stringify($settings));
 | 
							localStorage.setItem('settings', JSON.stringify($settings));
 | 
				
			||||||
		toast.success('Default model updated');
 | 
							toast.success('Default model updated');
 | 
				
			||||||
	};
 | 
						};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						$: if (selectedModels.length > 0 && $models.length > 0) {
 | 
				
			||||||
 | 
							selectedModels = selectedModels.map((model) =>
 | 
				
			||||||
 | 
								$models.map((m) => m.name).includes(model) ? model : ''
 | 
				
			||||||
 | 
							);
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
</script>
 | 
					</script>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<div class="flex flex-col my-2">
 | 
					<div class="flex flex-col my-2">
 | 
				
			||||||
 | 
				
			|||||||
@ -109,10 +109,14 @@
 | 
				
			|||||||
		await Promise.all(
 | 
							await Promise.all(
 | 
				
			||||||
			selectedModels.map(async (model) => {
 | 
								selectedModels.map(async (model) => {
 | 
				
			||||||
				console.log(model);
 | 
									console.log(model);
 | 
				
			||||||
				if ($models.filter((m) => m.name === model)[0].external) {
 | 
									const modelTag = $models.filter((m) => m.name === model).at(0);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
									if (modelTag?.external) {
 | 
				
			||||||
					await sendPromptOpenAI(model, prompt, parentId, _chatId);
 | 
										await sendPromptOpenAI(model, prompt, parentId, _chatId);
 | 
				
			||||||
				} else {
 | 
									} else if (modelTag) {
 | 
				
			||||||
					await sendPromptOllama(model, prompt, parentId, _chatId);
 | 
										await sendPromptOllama(model, prompt, parentId, _chatId);
 | 
				
			||||||
 | 
									} else {
 | 
				
			||||||
 | 
										toast.error(`Model ${model} not found`);
 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
			})
 | 
								})
 | 
				
			||||||
		);
 | 
							);
 | 
				
			||||||
 | 
				
			|||||||
@ -136,17 +136,20 @@
 | 
				
			|||||||
		await Promise.all(
 | 
							await Promise.all(
 | 
				
			||||||
			selectedModels.map(async (model) => {
 | 
								selectedModels.map(async (model) => {
 | 
				
			||||||
				console.log(model);
 | 
									console.log(model);
 | 
				
			||||||
				if ($models.filter((m) => m.name === model)[0].external) {
 | 
									const modelTag = $models.filter((m) => m.name === model).at(0);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
									if (modelTag?.external) {
 | 
				
			||||||
					await sendPromptOpenAI(model, prompt, parentId, _chatId);
 | 
										await sendPromptOpenAI(model, prompt, parentId, _chatId);
 | 
				
			||||||
				} else {
 | 
									} else if (modelTag) {
 | 
				
			||||||
					await sendPromptOllama(model, prompt, parentId, _chatId);
 | 
										await sendPromptOllama(model, prompt, parentId, _chatId);
 | 
				
			||||||
 | 
									} else {
 | 
				
			||||||
 | 
										toast.error(`Model ${model} not found`);
 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
			})
 | 
								})
 | 
				
			||||||
		);
 | 
							);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		await chats.set(await getChatList(localStorage.token));
 | 
							await chats.set(await getChatList(localStorage.token));
 | 
				
			||||||
	};
 | 
						};
 | 
				
			||||||
 | 
					 | 
				
			||||||
	const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
 | 
						const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
 | 
				
			||||||
		// Create response message
 | 
							// Create response message
 | 
				
			||||||
		let responseMessageId = uuidv4();
 | 
							let responseMessageId = uuidv4();
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
		Reference in New Issue
	
	Block a user