|
@@ -1020,21 +1020,6 @@
|
|
scrollToBottom();
|
|
scrollToBottom();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
- if ($chatId == _chatId) {
|
|
|
|
- if ($settings.saveChatHistory ?? true) {
|
|
|
|
- chat = await updateChatById(localStorage.token, _chatId, {
|
|
|
|
- messages: messages,
|
|
|
|
- history: history,
|
|
|
|
- models: selectedModels,
|
|
|
|
- params: params,
|
|
|
|
- files: chatFiles
|
|
|
|
- });
|
|
|
|
-
|
|
|
|
- currentChatPage.set(1);
|
|
|
|
- await chats.set(await getChatList(localStorage.token, $currentChatPage));
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
} else {
|
|
} else {
|
|
if (res !== null) {
|
|
if (res !== null) {
|
|
const error = await res.json();
|
|
const error = await res.json();
|
|
@@ -1066,6 +1051,7 @@
|
|
|
|
|
|
messages = messages;
|
|
messages = messages;
|
|
}
|
|
}
|
|
|
|
+ await saveChatHandler(_chatId);
|
|
|
|
|
|
stopResponseFlag = false;
|
|
stopResponseFlag = false;
|
|
await tick();
|
|
await tick();
|
|
@@ -1335,18 +1321,7 @@
|
|
await handleOpenAIError(error, null, model, responseMessage);
|
|
await handleOpenAIError(error, null, model, responseMessage);
|
|
}
|
|
}
|
|
|
|
|
|
- if ($chatId == _chatId) {
|
|
|
|
- chat = await updateChatById(localStorage.token, _chatId, {
|
|
|
|
- models: selectedModels,
|
|
|
|
- messages: messages,
|
|
|
|
- history: history,
|
|
|
|
- params: params,
|
|
|
|
- files: chatFiles
|
|
|
|
- });
|
|
|
|
-
|
|
|
|
- currentChatPage.set(1);
|
|
|
|
- await chats.set(await getChatList(localStorage.token, $currentChatPage));
|
|
|
|
- }
|
|
|
|
|
|
+ await saveChatHandler(_chatId);
|
|
|
|
|
|
messages = messages;
|
|
messages = messages;
|
|
|
|
|