|
@@ -238,7 +238,6 @@
|
|
|
await sendPrompt(userPrompt, userMessageId);
|
|
|
}
|
|
|
};
|
|
|
-
|
|
|
const sendPrompt = async (prompt, parentId) => {
|
|
|
const _chatId = JSON.parse(JSON.stringify($chatId));
|
|
|
|
|
@@ -292,40 +291,41 @@
|
|
|
}
|
|
|
|
|
|
await Promise.all(
|
|
|
- selectedModels.map(async (model) => {
|
|
|
- console.log(model);
|
|
|
- const modelTag = $models.filter((m) => m.name === model).at(0);
|
|
|
-
|
|
|
- // Create response message
|
|
|
- let responseMessageId = uuidv4();
|
|
|
- let responseMessage = {
|
|
|
- parentId: parentId,
|
|
|
- id: responseMessageId,
|
|
|
- childrenIds: [],
|
|
|
- role: 'assistant',
|
|
|
- content: '',
|
|
|
- model: model,
|
|
|
- timestamp: Math.floor(Date.now() / 1000) // Unix epoch
|
|
|
- };
|
|
|
-
|
|
|
- // Add message to history and Set currentId to messageId
|
|
|
- history.messages[responseMessageId] = responseMessage;
|
|
|
- history.currentId = responseMessageId;
|
|
|
-
|
|
|
- // Append messageId to childrenIds of parent message
|
|
|
- if (parentId !== null) {
|
|
|
- history.messages[parentId].childrenIds = [
|
|
|
- ...history.messages[parentId].childrenIds,
|
|
|
- responseMessageId
|
|
|
- ];
|
|
|
- }
|
|
|
+ selectedModels.map(async (modelId) => {
|
|
|
+ const model = $models.filter((m) => m.id === modelId).at(0);
|
|
|
+
|
|
|
+ if (model) {
|
|
|
+ // Create response message
|
|
|
+ let responseMessageId = uuidv4();
|
|
|
+ let responseMessage = {
|
|
|
+ parentId: parentId,
|
|
|
+ id: responseMessageId,
|
|
|
+ childrenIds: [],
|
|
|
+ role: 'assistant',
|
|
|
+ content: '',
|
|
|
+ model: model.id,
|
|
|
+ timestamp: Math.floor(Date.now() / 1000) // Unix epoch
|
|
|
+ };
|
|
|
+
|
|
|
+ // Add message to history and Set currentId to messageId
|
|
|
+ history.messages[responseMessageId] = responseMessage;
|
|
|
+ history.currentId = responseMessageId;
|
|
|
+
|
|
|
+ // Append messageId to childrenIds of parent message
|
|
|
+ if (parentId !== null) {
|
|
|
+ history.messages[parentId].childrenIds = [
|
|
|
+ ...history.messages[parentId].childrenIds,
|
|
|
+ responseMessageId
|
|
|
+ ];
|
|
|
+ }
|
|
|
|
|
|
- if (modelTag?.external) {
|
|
|
- await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
|
|
|
- } else if (modelTag) {
|
|
|
- await sendPromptOllama(model, prompt, responseMessageId, _chatId);
|
|
|
+ if (model?.external) {
|
|
|
+ await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
|
|
|
+ } else if (model) {
|
|
|
+ await sendPromptOllama(model, prompt, responseMessageId, _chatId);
|
|
|
+ }
|
|
|
} else {
|
|
|
- toast.error(`Model ${model} not found`);
|
|
|
+ toast.error(`Model ${modelId} not found`);
|
|
|
}
|
|
|
})
|
|
|
);
|
|
@@ -334,6 +334,7 @@
|
|
|
};
|
|
|
|
|
|
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
|
|
|
+ model = model.id;
|
|
|
const responseMessage = history.messages[responseMessageId];
|
|
|
|
|
|
// Wait until history/message have been updated
|
|
@@ -543,57 +544,60 @@
|
|
|
|
|
|
const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
|
|
|
const responseMessage = history.messages[responseMessageId];
|
|
|
-
|
|
|
scrollToBottom();
|
|
|
|
|
|
- const res = await generateOpenAIChatCompletion(localStorage.token, {
|
|
|
- model: model,
|
|
|
- stream: true,
|
|
|
- messages: [
|
|
|
- $settings.system
|
|
|
- ? {
|
|
|
- role: 'system',
|
|
|
- content: $settings.system
|
|
|
- }
|
|
|
- : undefined,
|
|
|
- ...messages.filter((message) => !message.deleted)
|
|
|
- ]
|
|
|
- .filter((message) => message)
|
|
|
- .map((message, idx, arr) => ({
|
|
|
- role: message.role,
|
|
|
- ...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
|
|
|
+ const res = await generateOpenAIChatCompletion(
|
|
|
+ localStorage.token,
|
|
|
+ {
|
|
|
+ model: model.id,
|
|
|
+ stream: true,
|
|
|
+ messages: [
|
|
|
+ $settings.system
|
|
|
? {
|
|
|
- content: [
|
|
|
- {
|
|
|
- type: 'text',
|
|
|
- text:
|
|
|
- arr.length - 1 !== idx
|
|
|
- ? message.content
|
|
|
- : message?.raContent ?? message.content
|
|
|
- },
|
|
|
- ...message.files
|
|
|
- .filter((file) => file.type === 'image')
|
|
|
- .map((file) => ({
|
|
|
- type: 'image_url',
|
|
|
- image_url: {
|
|
|
- url: file.url
|
|
|
- }
|
|
|
- }))
|
|
|
- ]
|
|
|
+ role: 'system',
|
|
|
+ content: $settings.system
|
|
|
}
|
|
|
- : {
|
|
|
- content:
|
|
|
- arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
|
|
|
- })
|
|
|
- })),
|
|
|
- seed: $settings?.options?.seed ?? undefined,
|
|
|
- stop: $settings?.options?.stop ?? undefined,
|
|
|
- temperature: $settings?.options?.temperature ?? undefined,
|
|
|
- top_p: $settings?.options?.top_p ?? undefined,
|
|
|
- num_ctx: $settings?.options?.num_ctx ?? undefined,
|
|
|
- frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
|
|
|
- max_tokens: $settings?.options?.num_predict ?? undefined
|
|
|
- });
|
|
|
+ : undefined,
|
|
|
+ ...messages.filter((message) => !message.deleted)
|
|
|
+ ]
|
|
|
+ .filter((message) => message)
|
|
|
+ .map((message, idx, arr) => ({
|
|
|
+ role: message.role,
|
|
|
+ ...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
|
|
|
+ ? {
|
|
|
+ content: [
|
|
|
+ {
|
|
|
+ type: 'text',
|
|
|
+ text:
|
|
|
+ arr.length - 1 !== idx
|
|
|
+ ? message.content
|
|
|
+ : message?.raContent ?? message.content
|
|
|
+ },
|
|
|
+ ...message.files
|
|
|
+ .filter((file) => file.type === 'image')
|
|
|
+ .map((file) => ({
|
|
|
+ type: 'image_url',
|
|
|
+ image_url: {
|
|
|
+ url: file.url
|
|
|
+ }
|
|
|
+ }))
|
|
|
+ ]
|
|
|
+ }
|
|
|
+ : {
|
|
|
+ content:
|
|
|
+ arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
|
|
|
+ })
|
|
|
+ })),
|
|
|
+ seed: $settings?.options?.seed ?? undefined,
|
|
|
+ stop: $settings?.options?.stop ?? undefined,
|
|
|
+ temperature: $settings?.options?.temperature ?? undefined,
|
|
|
+ top_p: $settings?.options?.top_p ?? undefined,
|
|
|
+ num_ctx: $settings?.options?.num_ctx ?? undefined,
|
|
|
+ frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
|
|
|
+ max_tokens: $settings?.options?.num_predict ?? undefined
|
|
|
+ },
|
|
|
+ model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}`
|
|
|
+ );
|
|
|
|
|
|
if (res && res.ok) {
|
|
|
const reader = res.body
|
|
@@ -704,7 +708,6 @@
|
|
|
await setChatTitle(_chatId, userPrompt);
|
|
|
}
|
|
|
};
|
|
|
-
|
|
|
const stopResponse = () => {
|
|
|
stopResponseFlag = true;
|
|
|
console.log('stopResponse');
|
|
@@ -719,25 +722,26 @@
|
|
|
responseMessage.done = false;
|
|
|
await tick();
|
|
|
|
|
|
- const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0);
|
|
|
-
|
|
|
- if (modelTag?.external) {
|
|
|
- await sendPromptOpenAI(
|
|
|
- responseMessage.model,
|
|
|
- history.messages[responseMessage.parentId].content,
|
|
|
- responseMessage.id,
|
|
|
- _chatId
|
|
|
- );
|
|
|
- } else if (modelTag) {
|
|
|
- await sendPromptOllama(
|
|
|
- responseMessage.model,
|
|
|
- history.messages[responseMessage.parentId].content,
|
|
|
- responseMessage.id,
|
|
|
- _chatId
|
|
|
- );
|
|
|
- } else {
|
|
|
- toast.error(`Model ${model} not found`);
|
|
|
+ const model = $models.filter((m) => m.id === responseMessage.model).at(0);
|
|
|
+
|
|
|
+ if (model) {
|
|
|
+ if (model?.external) {
|
|
|
+ await sendPromptOpenAI(
|
|
|
+ model,
|
|
|
+ history.messages[responseMessage.parentId].content,
|
|
|
+ responseMessage.id,
|
|
|
+ _chatId
|
|
|
+ );
|
|
|
+ } else
|
|
|
+ await sendPromptOllama(
|
|
|
+ model,
|
|
|
+ history.messages[responseMessage.parentId].content,
|
|
|
+ responseMessage.id,
|
|
|
+ _chatId
|
|
|
+ );
|
|
|
}
|
|
|
+ } else {
|
|
|
+ toast.error(`Model ${modelId} not found`);
|
|
|
}
|
|
|
};
|
|
|
|