Merge pull request #2453 from AlyMobarak/main

[fix] for issue #2447 (OpenAI-like API fails when a redundant, empty assistant message is sent over)
This commit is contained in:
Timothy Jaeryang Baek 2024-05-21 07:05:07 -10:00 committed by GitHub
commit b3de61247b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 32 additions and 31 deletions

View File

@ -1,26 +1,25 @@
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { toast } from 'svelte-sonner';
import { v4 as uuidv4 } from 'uuid';
import { onMount, tick, getContext } from 'svelte';
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { getContext, onMount, tick } from 'svelte';
import {
models,
modelfiles,
user,
settings,
chats,
chatId,
config,
WEBUI_NAME,
tags as _tags,
showSidebar
chatId,
chats,
config,
modelfiles,
models,
settings,
showSidebar,
user
} from '$lib/stores';
import { copyToClipboard, splitStream } from '$lib/utils';
import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
import {
addTagById,
createNewChat,
@ -30,18 +29,18 @@
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryCollection, queryDoc } from '$lib/apis/rag';
import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import { queryCollection, queryDoc } from '$lib/apis/rag';
import { queryMemory } from '$lib/apis/memories';
import { createOpenAITextStream } from '$lib/apis/streaming';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
import { LITELLM_API_BASE_URL, OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
import { RAGTemplate } from '$lib/utils/rag';
import { LITELLM_API_BASE_URL, OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
import { WEBUI_BASE_URL } from '$lib/constants';
import { createOpenAITextStream } from '$lib/apis/streaming';
import { queryMemory } from '$lib/apis/memories';
const i18n = getContext('i18n');
@ -614,6 +613,7 @@
...messages
]
.filter((message) => message)
.filter((message) => message.content != "")
.map((message, idx, arr) => ({
role: message.role,
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&

View File

@ -1,25 +1,24 @@
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { toast } from 'svelte-sonner';
import { v4 as uuidv4 } from 'uuid';
import { onMount, tick, getContext } from 'svelte';
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import {
models,
modelfiles,
user,
settings,
chats,
chatId,
config,
WEBUI_NAME,
tags as _tags,
showSidebar
chatId,
chats,
config,
modelfiles,
models,
settings,
showSidebar,
user
} from '$lib/stores';
import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils';
import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
import { getContext, onMount, tick } from 'svelte';
import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
import {
addTagById,
createNewChat,
@ -30,20 +29,21 @@
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
import { queryMemory } from '$lib/apis/memories';
import { createOpenAITextStream } from '$lib/apis/streaming';
import {
LITELLM_API_BASE_URL,
OPENAI_API_BASE_URL,
OLLAMA_API_BASE_URL,
OPENAI_API_BASE_URL,
WEBUI_BASE_URL
} from '$lib/constants';
import { createOpenAITextStream } from '$lib/apis/streaming';
import { queryMemory } from '$lib/apis/memories';
const i18n = getContext('i18n');
@ -620,6 +620,7 @@
...messages
]
.filter((message) => message)
.filter((message) => message.content != "")
.map((message, idx, arr) => ({
role: message.role,
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&