diff --git a/.env.example b/.env.example index a42ed5d683e..7800677e253 100644 --- a/.env.example +++ b/.env.example @@ -164,6 +164,16 @@ ASSISTANTS_API_KEY=user_provided # ASSISTANTS_BASE_URL= # ASSISTANTS_MODELS=gpt-4o,gpt-3.5-turbo-0125,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-16k,gpt-3.5-turbo,gpt-4,gpt-4-0314,gpt-4-32k-0314,gpt-4-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-1106,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview +#==========================# +# Azure Assistants API # +#==========================# + +# Note: You should map your credentials with custom variables according to your Azure OpenAI Configuration +# The models for Azure Assistants are also determined by your Azure OpenAI configuration. + +# More info, including how to enable use of Assistants with Azure here: +# https://www.librechat.ai/docs/configuration/librechat_yaml/ai_endpoints/azure#using-assistants-with-azure + #============# # OpenRouter # #============# diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js index b4a50bc05c6..f2290875754 100644 --- a/api/app/clients/OpenAIClient.js +++ b/api/app/clients/OpenAIClient.js @@ -756,6 +756,8 @@ class OpenAIClient extends BaseClient { * In case of failure, it will return the default title, "New Chat". */ async titleConvo({ text, conversationId, responseText = '' }) { + this.conversationId = conversationId; + if (this.options.attachments) { delete this.options.attachments; } @@ -838,13 +840,17 @@ ${convo} try { let useChatCompletion = true; + if (this.options.reverseProxyUrl === CohereConstants.API_URL) { useChatCompletion = false; } + title = ( await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion }) ).replaceAll('"', ''); + const completionTokens = this.getTokenCount(title); + this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' }); } catch (e) { logger.error( @@ -868,6 +874,7 @@ ${convo} context: 'title', tokenBuffer: 150, }); + title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal }); } catch (e) { if (e?.message?.toLowerCase()?.includes('abort')) { @@ -1005,9 +1012,9 @@ ${convo} await spendTokens( { context, - user: this.user, model: this.modelOptions.model, conversationId: this.conversationId, + user: this.user ?? this.options.req.user?.id, endpointTokenConfig: this.options.endpointTokenConfig, }, { promptTokens, completionTokens }, diff --git a/api/app/clients/specs/OpenAIClient.test.js b/api/app/clients/specs/OpenAIClient.test.js index 7ef4fdcae51..45903984193 100644 --- a/api/app/clients/specs/OpenAIClient.test.js +++ b/api/app/clients/specs/OpenAIClient.test.js @@ -144,6 +144,7 @@ describe('OpenAIClient', () => { const defaultOptions = { // debug: true, + req: {}, openaiApiKey: 'new-api-key', modelOptions: { model, diff --git a/api/models/Action.js b/api/models/Action.js index 9acac078b9f..86bd5d85948 100644 --- a/api/models/Action.js +++ b/api/models/Action.js @@ -62,8 +62,24 @@ const deleteAction = async (searchParams, session = null) => { return await Action.findOneAndDelete(searchParams, options).lean(); }; +/** + * Deletes actions by params, within a transaction session if provided. + * + * @param {Object} searchParams - The search parameters to find the actions to delete. + * @param {string} searchParams.action_id - The ID of the action(s) to delete. + * @param {string} searchParams.user - The user ID of the action's author. + * @param {mongoose.ClientSession} [session] - The transaction session to use (optional). + * @returns {Promise} A promise that resolves to the number of deleted action documents. + */ +const deleteActions = async (searchParams, session = null) => { + const options = session ? { session } : {}; + const result = await Action.deleteMany(searchParams, options); + return result.deletedCount; +}; + module.exports = { - updateAction, getActions, + updateAction, deleteAction, + deleteActions, }; diff --git a/api/models/Assistant.js b/api/models/Assistant.js index 17e40772209..bf9382d0ee7 100644 --- a/api/models/Assistant.js +++ b/api/models/Assistant.js @@ -39,8 +39,21 @@ const getAssistants = async (searchParams) => { return await Assistant.find(searchParams).lean(); }; +/** + * Deletes an assistant based on the provided ID. + * + * @param {Object} searchParams - The search parameters to find the assistant to delete. + * @param {string} searchParams.assistant_id - The ID of the assistant to delete. + * @param {string} searchParams.user - The user ID of the assistant's author. + * @returns {Promise} Resolves when the assistant has been successfully deleted. + */ +const deleteAssistant = async (searchParams) => { + return await Assistant.findOneAndDelete(searchParams); +}; + module.exports = { updateAssistant, + deleteAssistant, getAssistants, getAssistant, }; diff --git a/api/models/plugins/mongoMeili.js b/api/models/plugins/mongoMeili.js index 261b5c50c78..df96338302b 100644 --- a/api/models/plugins/mongoMeili.js +++ b/api/models/plugins/mongoMeili.js @@ -155,7 +155,7 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) { function (results, value, key) { return { ...results, [key]: 1 }; }, - { _id: 1 }, + { _id: 1, __v: 1 }, ), ).lean(); diff --git a/api/models/spendTokens.js b/api/models/spendTokens.js index 830cda20758..917d0c93db3 100644 --- a/api/models/spendTokens.js +++ b/api/models/spendTokens.js @@ -40,7 +40,7 @@ const spendTokens = async (txData, tokenUsage) => { }); } - if (!completionTokens) { + if (!completionTokens && isNaN(completionTokens)) { logger.debug('[spendTokens] !completionTokens', { prompt, completion }); return; } diff --git a/api/package.json b/api/package.json index d91b6031eff..d4e0132ddaa 100644 --- a/api/package.json +++ b/api/package.json @@ -76,7 +76,7 @@ "nodejs-gpt": "^1.37.4", "nodemailer": "^6.9.4", "ollama": "^0.5.0", - "openai": "4.36.0", + "openai": "^4.47.1", "openai-chat-tokens": "^0.2.8", "openid-client": "^5.4.2", "passport": "^0.6.0", diff --git a/api/server/controllers/EndpointController.js b/api/server/controllers/EndpointController.js index b99dd5eda9c..d80ea6b14f9 100644 --- a/api/server/controllers/EndpointController.js +++ b/api/server/controllers/EndpointController.js @@ -16,10 +16,28 @@ async function endpointController(req, res) { /** @type {TEndpointsConfig} */ const mergedConfig = { ...defaultEndpointsConfig, ...customConfigEndpoints }; if (mergedConfig[EModelEndpoint.assistants] && req.app.locals?.[EModelEndpoint.assistants]) { - const { disableBuilder, retrievalModels, capabilities, ..._rest } = + const { disableBuilder, retrievalModels, capabilities, version, ..._rest } = req.app.locals[EModelEndpoint.assistants]; + mergedConfig[EModelEndpoint.assistants] = { ...mergedConfig[EModelEndpoint.assistants], + version, + retrievalModels, + disableBuilder, + capabilities, + }; + } + + if ( + mergedConfig[EModelEndpoint.azureAssistants] && + req.app.locals?.[EModelEndpoint.azureAssistants] + ) { + const { disableBuilder, retrievalModels, capabilities, version, ..._rest } = + req.app.locals[EModelEndpoint.azureAssistants]; + + mergedConfig[EModelEndpoint.azureAssistants] = { + ...mergedConfig[EModelEndpoint.azureAssistants], + version, retrievalModels, disableBuilder, capabilities, diff --git a/api/server/routes/assistants/chat.js b/api/server/controllers/assistants/chatV1.js similarity index 93% rename from api/server/routes/assistants/chat.js rename to api/server/controllers/assistants/chatV1.js index 96a09d02dd8..34f9e9203c5 100644 --- a/api/server/routes/assistants/chat.js +++ b/api/server/controllers/assistants/chatV1.js @@ -1,14 +1,13 @@ const { v4 } = require('uuid'); -const express = require('express'); const { Constants, RunStatus, CacheKeys, - FileSources, ContentTypes, EModelEndpoint, ViolationTypes, ImageVisionTool, + checkOpenAIStorage, AssistantStreamEvents, } = require('librechat-data-provider'); const { @@ -21,27 +20,18 @@ const { } = require('~/server/services/Threads'); const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils'); const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService'); -const { addTitle, initializeClient } = require('~/server/services/Endpoints/assistants'); const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts'); const { createRun, StreamRunManager } = require('~/server/services/Runs'); +const { addTitle } = require('~/server/services/Endpoints/assistants'); const { getTransactions } = require('~/models/Transaction'); const checkBalance = require('~/models/checkBalance'); const { getConvo } = require('~/models/Conversation'); const getLogStores = require('~/cache/getLogStores'); const { getModelMaxTokens } = require('~/utils'); +const { getOpenAIClient } = require('./helpers'); const { logger } = require('~/config'); -const router = express.Router(); -const { - setHeaders, - handleAbort, - validateModel, - handleAbortError, - // validateEndpoint, - buildEndpointOption, -} = require('~/server/middleware'); - -router.post('/abort', handleAbort()); +const { handleAbortError } = require('~/server/middleware'); const ten_minutes = 1000 * 60 * 10; @@ -49,16 +39,17 @@ const ten_minutes = 1000 * 60 * 10; * @route POST / * @desc Chat with an assistant * @access Public - * @param {express.Request} req - The request object, containing the request data. - * @param {express.Response} res - The response object, used to send back a response. + * @param {Express.Request} req - The request object, containing the request data. + * @param {Express.Response} res - The response object, used to send back a response. * @returns {void} */ -router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res) => { +const chatV1 = async (req, res) => { logger.debug('[/assistants/chat/] req.body', req.body); const { text, model, + endpoint, files = [], promptPrefix, assistant_id, @@ -70,7 +61,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res } = req.body; /** @type {Partial} */ - const assistantsConfig = req.app.locals?.[EModelEndpoint.assistants]; + const assistantsConfig = req.app.locals?.[endpoint]; if (assistantsConfig) { const { supportedIds, excludedIds } = assistantsConfig; @@ -138,7 +129,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res user: req.user.id, shouldSaveMessage: false, messageId: responseMessageId, - endpoint: EModelEndpoint.assistants, + endpoint, }; if (error.message === 'Run cancelled') { @@ -149,7 +140,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res logger.debug('[/assistants/chat/] Request aborted on close'); } else if (/Files.*are invalid/.test(error.message)) { const errorMessage = `Files are invalid, or may not have uploaded yet.${ - req.app.locals?.[EModelEndpoint.azureOpenAI].assistants + endpoint === EModelEndpoint.azureAssistants ? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.' : '' }`; @@ -205,6 +196,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res const runMessages = await checkMessageGaps({ openai, run_id, + endpoint, thread_id, conversationId, latestMessageId: responseMessageId, @@ -311,8 +303,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res }); }; - /** @type {{ openai: OpenAIClient }} */ - const { openai: _openai, client } = await initializeClient({ + const { openai: _openai, client } = await getOpenAIClient({ req, res, endpointOption: req.body.endpointOption, @@ -370,10 +361,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res /** @type {MongoFile[]} */ const attachments = await req.body.endpointOption.attachments; - if ( - attachments && - attachments.every((attachment) => attachment.source === FileSources.openai) - ) { + if (attachments && attachments.every((attachment) => checkOpenAIStorage(attachment.source))) { return; } @@ -431,7 +419,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res if (processedFiles) { for (const file of processedFiles) { - if (file.source !== FileSources.openai) { + if (!checkOpenAIStorage(file.source)) { attachedFileIds.delete(file.file_id); const index = file_ids.indexOf(file.file_id); if (index > -1) { @@ -467,6 +455,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res assistant_id, thread_id, model: assistant_id, + endpoint, }; previousMessages.push(requestMessage); @@ -476,7 +465,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res conversation = { conversationId, - endpoint: EModelEndpoint.assistants, + endpoint, promptPrefix: promptPrefix, instructions: instructions, assistant_id, @@ -513,7 +502,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res let response; const processRun = async (retry = false) => { - if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) { + if (endpoint === EModelEndpoint.azureAssistants) { body.model = openai._options.model; openai.attachedFileIds = attachedFileIds; openai.visionPromise = visionPromise; @@ -603,6 +592,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res assistant_id, thread_id, model: assistant_id, + endpoint, }; sendMessage(res, { @@ -655,6 +645,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res } catch (error) { await handleError(error); } -}); +}; -module.exports = router; +module.exports = chatV1; diff --git a/api/server/controllers/assistants/chatV2.js b/api/server/controllers/assistants/chatV2.js new file mode 100644 index 00000000000..c72d5fc9b43 --- /dev/null +++ b/api/server/controllers/assistants/chatV2.js @@ -0,0 +1,618 @@ +const { v4 } = require('uuid'); +const { + Constants, + RunStatus, + CacheKeys, + ContentTypes, + ToolCallTypes, + EModelEndpoint, + ViolationTypes, + retrievalMimeTypes, + AssistantStreamEvents, +} = require('librechat-data-provider'); +const { + initThread, + recordUsage, + saveUserMessage, + checkMessageGaps, + addThreadMetadata, + saveAssistantMessage, +} = require('~/server/services/Threads'); +const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils'); +const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService'); +const { createRun, StreamRunManager } = require('~/server/services/Runs'); +const { addTitle } = require('~/server/services/Endpoints/assistants'); +const { getTransactions } = require('~/models/Transaction'); +const checkBalance = require('~/models/checkBalance'); +const { getConvo } = require('~/models/Conversation'); +const getLogStores = require('~/cache/getLogStores'); +const { getModelMaxTokens } = require('~/utils'); +const { getOpenAIClient } = require('./helpers'); +const { logger } = require('~/config'); + +const { handleAbortError } = require('~/server/middleware'); + +const ten_minutes = 1000 * 60 * 10; + +/** + * @route POST / + * @desc Chat with an assistant + * @access Public + * @param {Express.Request} req - The request object, containing the request data. + * @param {Express.Response} res - The response object, used to send back a response. + * @returns {void} + */ +const chatV2 = async (req, res) => { + logger.debug('[/assistants/chat/] req.body', req.body); + + /** @type {{ files: MongoFile[]}} */ + const { + text, + model, + endpoint, + files = [], + promptPrefix, + assistant_id, + instructions, + thread_id: _thread_id, + messageId: _messageId, + conversationId: convoId, + parentMessageId: _parentId = Constants.NO_PARENT, + } = req.body; + + /** @type {Partial} */ + const assistantsConfig = req.app.locals?.[endpoint]; + + if (assistantsConfig) { + const { supportedIds, excludedIds } = assistantsConfig; + const error = { message: 'Assistant not supported' }; + if (supportedIds?.length && !supportedIds.includes(assistant_id)) { + return await handleAbortError(res, req, error, { + sender: 'System', + conversationId: convoId, + messageId: v4(), + parentMessageId: _messageId, + error, + }); + } else if (excludedIds?.length && excludedIds.includes(assistant_id)) { + return await handleAbortError(res, req, error, { + sender: 'System', + conversationId: convoId, + messageId: v4(), + parentMessageId: _messageId, + }); + } + } + + /** @type {OpenAIClient} */ + let openai; + /** @type {string|undefined} - the current thread id */ + let thread_id = _thread_id; + /** @type {string|undefined} - the current run id */ + let run_id; + /** @type {string|undefined} - the parent messageId */ + let parentMessageId = _parentId; + /** @type {TMessage[]} */ + let previousMessages = []; + /** @type {import('librechat-data-provider').TConversation | null} */ + let conversation = null; + /** @type {string[]} */ + let file_ids = []; + /** @type {Set} */ + let attachedFileIds = new Set(); + /** @type {TMessage | null} */ + let requestMessage = null; + + const userMessageId = v4(); + const responseMessageId = v4(); + + /** @type {string} - The conversation UUID - created if undefined */ + const conversationId = convoId ?? v4(); + + const cache = getLogStores(CacheKeys.ABORT_KEYS); + const cacheKey = `${req.user.id}:${conversationId}`; + + /** @type {Run | undefined} - The completed run, undefined if incomplete */ + let completedRun; + + const handleError = async (error) => { + const defaultErrorMessage = + 'The Assistant run failed to initialize. Try sending a message in a new conversation.'; + const messageData = { + thread_id, + assistant_id, + conversationId, + parentMessageId, + sender: 'System', + user: req.user.id, + shouldSaveMessage: false, + messageId: responseMessageId, + endpoint, + }; + + if (error.message === 'Run cancelled') { + return res.end(); + } else if (error.message === 'Request closed' && completedRun) { + return; + } else if (error.message === 'Request closed') { + logger.debug('[/assistants/chat/] Request aborted on close'); + } else if (/Files.*are invalid/.test(error.message)) { + const errorMessage = `Files are invalid, or may not have uploaded yet.${ + endpoint === EModelEndpoint.azureAssistants + ? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.' + : '' + }`; + return sendResponse(res, messageData, errorMessage); + } else if (error?.message?.includes('string too long')) { + return sendResponse( + res, + messageData, + 'Message too long. The Assistants API has a limit of 32,768 characters per message. Please shorten it and try again.', + ); + } else if (error?.message?.includes(ViolationTypes.TOKEN_BALANCE)) { + return sendResponse(res, messageData, error.message); + } else { + logger.error('[/assistants/chat/]', error); + } + + if (!openai || !thread_id || !run_id) { + return sendResponse(res, messageData, defaultErrorMessage); + } + + await sleep(2000); + + try { + const status = await cache.get(cacheKey); + if (status === 'cancelled') { + logger.debug('[/assistants/chat/] Run already cancelled'); + return res.end(); + } + await cache.delete(cacheKey); + const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id); + logger.debug('[/assistants/chat/] Cancelled run:', cancelledRun); + } catch (error) { + logger.error('[/assistants/chat/] Error cancelling run', error); + } + + await sleep(2000); + + let run; + try { + run = await openai.beta.threads.runs.retrieve(thread_id, run_id); + await recordUsage({ + ...run.usage, + model: run.model, + user: req.user.id, + conversationId, + }); + } catch (error) { + logger.error('[/assistants/chat/] Error fetching or processing run', error); + } + + let finalEvent; + try { + const runMessages = await checkMessageGaps({ + openai, + run_id, + endpoint, + thread_id, + conversationId, + latestMessageId: responseMessageId, + }); + + const errorContentPart = { + text: { + value: + error?.message ?? 'There was an error processing your request. Please try again later.', + }, + type: ContentTypes.ERROR, + }; + + if (!Array.isArray(runMessages[runMessages.length - 1]?.content)) { + runMessages[runMessages.length - 1].content = [errorContentPart]; + } else { + const contentParts = runMessages[runMessages.length - 1].content; + for (let i = 0; i < contentParts.length; i++) { + const currentPart = contentParts[i]; + /** @type {CodeToolCall | RetrievalToolCall | FunctionToolCall | undefined} */ + const toolCall = currentPart?.[ContentTypes.TOOL_CALL]; + if ( + toolCall && + toolCall?.function && + !(toolCall?.function?.output || toolCall?.function?.output?.length) + ) { + contentParts[i] = { + ...currentPart, + [ContentTypes.TOOL_CALL]: { + ...toolCall, + function: { + ...toolCall.function, + output: 'error processing tool', + }, + }, + }; + } + } + runMessages[runMessages.length - 1].content.push(errorContentPart); + } + + finalEvent = { + final: true, + conversation: await getConvo(req.user.id, conversationId), + runMessages, + }; + } catch (error) { + logger.error('[/assistants/chat/] Error finalizing error process', error); + return sendResponse(res, messageData, 'The Assistant run failed'); + } + + return sendResponse(res, finalEvent); + }; + + try { + res.on('close', async () => { + if (!completedRun) { + await handleError(new Error('Request closed')); + } + }); + + if (convoId && !_thread_id) { + completedRun = true; + throw new Error('Missing thread_id for existing conversation'); + } + + if (!assistant_id) { + completedRun = true; + throw new Error('Missing assistant_id'); + } + + const checkBalanceBeforeRun = async () => { + if (!isEnabled(process.env.CHECK_BALANCE)) { + return; + } + const transactions = + (await getTransactions({ + user: req.user.id, + context: 'message', + conversationId, + })) ?? []; + + const totalPreviousTokens = Math.abs( + transactions.reduce((acc, curr) => acc + curr.rawAmount, 0), + ); + + // TODO: make promptBuffer a config option; buffer for titles, needs buffer for system instructions + const promptBuffer = parentMessageId === Constants.NO_PARENT && !_thread_id ? 200 : 0; + // 5 is added for labels + let promptTokens = (await countTokens(text + (promptPrefix ?? ''))) + 5; + promptTokens += totalPreviousTokens + promptBuffer; + // Count tokens up to the current context window + promptTokens = Math.min(promptTokens, getModelMaxTokens(model)); + + await checkBalance({ + req, + res, + txData: { + model, + user: req.user.id, + tokenType: 'prompt', + amount: promptTokens, + }, + }); + }; + + const { openai: _openai, client } = await getOpenAIClient({ + req, + res, + endpointOption: req.body.endpointOption, + initAppClient: true, + }); + + openai = _openai; + + if (previousMessages.length) { + parentMessageId = previousMessages[previousMessages.length - 1].messageId; + } + + let userMessage = { + role: 'user', + content: [ + { + type: ContentTypes.TEXT, + text, + }, + ], + metadata: { + messageId: userMessageId, + }, + }; + + /** @type {CreateRunBody | undefined} */ + const body = { + assistant_id, + model, + }; + + if (promptPrefix) { + body.additional_instructions = promptPrefix; + } + + if (instructions) { + body.instructions = instructions; + } + + const getRequestFileIds = async () => { + let thread_file_ids = []; + if (convoId) { + const convo = await getConvo(req.user.id, convoId); + if (convo && convo.file_ids) { + thread_file_ids = convo.file_ids; + } + } + + if (files.length || thread_file_ids.length) { + attachedFileIds = new Set([...file_ids, ...thread_file_ids]); + + let attachmentIndex = 0; + for (const file of files) { + file_ids.push(file.file_id); + if (file.type.startsWith('image')) { + userMessage.content.push({ + type: ContentTypes.IMAGE_FILE, + [ContentTypes.IMAGE_FILE]: { file_id: file.file_id }, + }); + } + + if (!userMessage.attachments) { + userMessage.attachments = []; + } + + userMessage.attachments.push({ + file_id: file.file_id, + tools: [{ type: ToolCallTypes.CODE_INTERPRETER }], + }); + + if (file.type.startsWith('image')) { + continue; + } + + const mimeType = file.type; + const isSupportedByRetrieval = retrievalMimeTypes.some((regex) => regex.test(mimeType)); + if (isSupportedByRetrieval) { + userMessage.attachments[attachmentIndex].tools.push({ + type: ToolCallTypes.FILE_SEARCH, + }); + } + + attachmentIndex++; + } + } + }; + + const initializeThread = async () => { + await getRequestFileIds(); + + // TODO: may allow multiple messages to be created beforehand in a future update + const initThreadBody = { + messages: [userMessage], + metadata: { + user: req.user.id, + conversationId, + }, + }; + + const result = await initThread({ openai, body: initThreadBody, thread_id }); + thread_id = result.thread_id; + + createOnTextProgress({ + openai, + conversationId, + userMessageId, + messageId: responseMessageId, + thread_id, + }); + + requestMessage = { + user: req.user.id, + text, + messageId: userMessageId, + parentMessageId, + // TODO: make sure client sends correct format for `files`, use zod + files, + file_ids, + conversationId, + isCreatedByUser: true, + assistant_id, + thread_id, + model: assistant_id, + endpoint, + }; + + previousMessages.push(requestMessage); + + /* asynchronous */ + saveUserMessage({ ...requestMessage, model }); + + conversation = { + conversationId, + endpoint, + promptPrefix: promptPrefix, + instructions: instructions, + assistant_id, + // model, + }; + + if (file_ids.length) { + conversation.file_ids = file_ids; + } + }; + + const promises = [initializeThread(), checkBalanceBeforeRun()]; + await Promise.all(promises); + + const sendInitialResponse = () => { + sendMessage(res, { + sync: true, + conversationId, + // messages: previousMessages, + requestMessage, + responseMessage: { + user: req.user.id, + messageId: openai.responseMessage.messageId, + parentMessageId: userMessageId, + conversationId, + assistant_id, + thread_id, + model: assistant_id, + }, + }); + }; + + /** @type {RunResponse | typeof StreamRunManager | undefined} */ + let response; + + const processRun = async (retry = false) => { + if (endpoint === EModelEndpoint.azureAssistants) { + body.model = openai._options.model; + openai.attachedFileIds = attachedFileIds; + if (retry) { + response = await runAssistant({ + openai, + thread_id, + run_id, + in_progress: openai.in_progress, + }); + return; + } + + /* NOTE: + * By default, a Run will use the model and tools configuration specified in Assistant object, + * but you can override most of these when creating the Run for added flexibility: + */ + const run = await createRun({ + openai, + thread_id, + body, + }); + + run_id = run.id; + await cache.set(cacheKey, `${thread_id}:${run_id}`, ten_minutes); + sendInitialResponse(); + + // todo: retry logic + response = await runAssistant({ openai, thread_id, run_id }); + return; + } + + /** @type {{[AssistantStreamEvents.ThreadRunCreated]: (event: ThreadRunCreated) => Promise}} */ + const handlers = { + [AssistantStreamEvents.ThreadRunCreated]: async (event) => { + await cache.set(cacheKey, `${thread_id}:${event.data.id}`, ten_minutes); + run_id = event.data.id; + sendInitialResponse(); + }, + }; + + const streamRunManager = new StreamRunManager({ + req, + res, + openai, + handlers, + thread_id, + attachedFileIds, + responseMessage: openai.responseMessage, + // streamOptions: { + + // }, + }); + + await streamRunManager.runAssistant({ + thread_id, + body, + }); + + response = streamRunManager; + }; + + await processRun(); + logger.debug('[/assistants/chat/] response', { + run: response.run, + steps: response.steps, + }); + + if (response.run.status === RunStatus.CANCELLED) { + logger.debug('[/assistants/chat/] Run cancelled, handled by `abortRun`'); + return res.end(); + } + + if (response.run.status === RunStatus.IN_PROGRESS) { + processRun(true); + } + + completedRun = response.run; + + /** @type {ResponseMessage} */ + const responseMessage = { + ...(response.responseMessage ?? response.finalMessage), + parentMessageId: userMessageId, + conversationId, + user: req.user.id, + assistant_id, + thread_id, + model: assistant_id, + endpoint, + }; + + sendMessage(res, { + final: true, + conversation, + requestMessage: { + parentMessageId, + thread_id, + }, + }); + res.end(); + + await saveAssistantMessage({ ...responseMessage, model }); + + if (parentMessageId === Constants.NO_PARENT && !_thread_id) { + addTitle(req, { + text, + responseText: response.text, + conversationId, + client, + }); + } + + await addThreadMetadata({ + openai, + thread_id, + messageId: responseMessage.messageId, + messages: response.messages, + }); + + if (!response.run.usage) { + await sleep(3000); + completedRun = await openai.beta.threads.runs.retrieve(thread_id, response.run.id); + if (completedRun.usage) { + await recordUsage({ + ...completedRun.usage, + user: req.user.id, + model: completedRun.model ?? model, + conversationId, + }); + } + } else { + await recordUsage({ + ...response.run.usage, + user: req.user.id, + model: response.run.model ?? model, + conversationId, + }); + } + } catch (error) { + await handleError(error); + } +}; + +module.exports = chatV2; diff --git a/api/server/controllers/assistants/helpers.js b/api/server/controllers/assistants/helpers.js new file mode 100644 index 00000000000..f8c9efde47d --- /dev/null +++ b/api/server/controllers/assistants/helpers.js @@ -0,0 +1,158 @@ +const { EModelEndpoint, CacheKeys, defaultAssistantsVersion } = require('librechat-data-provider'); +const { + initializeClient: initAzureClient, +} = require('~/server/services/Endpoints/azureAssistants'); +const { initializeClient } = require('~/server/services/Endpoints/assistants'); +const { getLogStores } = require('~/cache'); + +/** + * @param {Express.Request} req + * @param {string} [endpoint] + * @returns {Promise} + */ +const getCurrentVersion = async (req, endpoint) => { + const index = req.baseUrl.lastIndexOf('/v'); + let version = index !== -1 ? req.baseUrl.substring(index + 1, index + 3) : null; + if (!version && req.body.version) { + version = `v${req.body.version}`; + } + if (!version && endpoint) { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cachedEndpointsConfig = await cache.get(CacheKeys.ENDPOINT_CONFIG); + version = `v${ + cachedEndpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint] + }`; + } + if (!version?.startsWith('v') && version.length !== 2) { + throw new Error(`[${req.baseUrl}] Invalid version: ${version}`); + } + return version; +}; + +/** + * Asynchronously lists assistants based on provided query parameters. + * + * Initializes the client with the current request and response objects and lists assistants + * according to the query parameters. This function abstracts the logic for non-Azure paths. + * + * @async + * @param {object} params - The parameters object. + * @param {object} params.req - The request object, used for initializing the client. + * @param {object} params.res - The response object, used for initializing the client. + * @param {string} params.version - The API version to use. + * @param {object} params.query - The query parameters to list assistants (e.g., limit, order). + * @returns {Promise} A promise that resolves to the response from the `openai.beta.assistants.list` method call. + */ +const listAssistants = async ({ req, res, version, query }) => { + const { openai } = await getOpenAIClient({ req, res, version }); + return openai.beta.assistants.list(query); +}; + +/** + * Asynchronously lists assistants for Azure configured groups. + * + * Iterates through Azure configured assistant groups, initializes the client with the current request and response objects, + * lists assistants based on the provided query parameters, and merges their data alongside the model information into a single array. + * + * @async + * @param {object} params - The parameters object. + * @param {object} params.req - The request object, used for initializing the client and manipulating the request body. + * @param {object} params.res - The response object, used for initializing the client. + * @param {string} params.version - The API version to use. + * @param {TAzureConfig} params.azureConfig - The Azure configuration object containing assistantGroups and groupMap. + * @param {object} params.query - The query parameters to list assistants (e.g., limit, order). + * @returns {Promise} A promise that resolves to an array of assistant data merged with their respective model information. + */ +const listAssistantsForAzure = async ({ req, res, version, azureConfig = {}, query }) => { + /** @type {Array<[string, TAzureModelConfig]>} */ + const groupModelTuples = []; + const promises = []; + /** @type {Array} */ + const groups = []; + + const { groupMap, assistantGroups } = azureConfig; + + for (const groupName of assistantGroups) { + const group = groupMap[groupName]; + groups.push(group); + + const currentModelTuples = Object.entries(group?.models); + groupModelTuples.push(currentModelTuples); + + /* The specified model is only necessary to + fetch assistants for the shared instance */ + req.body.model = currentModelTuples[0][0]; + promises.push(listAssistants({ req, res, version, query })); + } + + const resolvedQueries = await Promise.all(promises); + const data = resolvedQueries.flatMap((res, i) => + res.data.map((assistant) => { + const deploymentName = assistant.model; + const currentGroup = groups[i]; + const currentModelTuples = groupModelTuples[i]; + const firstModel = currentModelTuples[0][0]; + + if (currentGroup.deploymentName === deploymentName) { + return { ...assistant, model: firstModel }; + } + + for (const [model, modelConfig] of currentModelTuples) { + if (modelConfig.deploymentName === deploymentName) { + return { ...assistant, model }; + } + } + + return { ...assistant, model: firstModel }; + }), + ); + + return { + first_id: data[0]?.id, + last_id: data[data.length - 1]?.id, + object: 'list', + has_more: false, + data, + }; +}; + +async function getOpenAIClient({ req, res, endpointOption, initAppClient, overrideEndpoint }) { + let endpoint = overrideEndpoint ?? req.body.endpoint ?? req.query.endpoint; + const version = await getCurrentVersion(req, endpoint); + if (!endpoint) { + throw new Error(`[${req.baseUrl}] Endpoint is required`); + } + + let result; + if (endpoint === EModelEndpoint.assistants) { + result = await initializeClient({ req, res, version, endpointOption, initAppClient }); + } else if (endpoint === EModelEndpoint.azureAssistants) { + result = await initAzureClient({ req, res, version, endpointOption, initAppClient }); + } + + return result; +} + +const fetchAssistants = async (req, res) => { + const { limit = 100, order = 'desc', after, before, endpoint } = req.query; + const version = await getCurrentVersion(req, endpoint); + const query = { limit, order, after, before }; + + /** @type {AssistantListResponse} */ + let body; + + if (endpoint === EModelEndpoint.assistants) { + ({ body } = await listAssistants({ req, res, version, query })); + } else if (endpoint === EModelEndpoint.azureAssistants) { + const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI]; + body = await listAssistantsForAzure({ req, res, version, azureConfig, query }); + } + + return body; +}; + +module.exports = { + getOpenAIClient, + fetchAssistants, + getCurrentVersion, +}; diff --git a/api/server/routes/assistants/assistants.js b/api/server/controllers/assistants/v1.js similarity index 75% rename from api/server/routes/assistants/assistants.js rename to api/server/controllers/assistants/v1.js index 67f200f6b4d..3bbd6b63dd0 100644 --- a/api/server/routes/assistants/assistants.js +++ b/api/server/controllers/assistants/v1.js @@ -1,34 +1,11 @@ -const multer = require('multer'); -const express = require('express'); -const { FileContext, EModelEndpoint } = require('librechat-data-provider'); -const { - initializeClient, - listAssistantsForAzure, - listAssistants, -} = require('~/server/services/Endpoints/assistants'); +const { FileContext } = require('librechat-data-provider'); const { getStrategyFunctions } = require('~/server/services/Files/strategies'); +const { deleteAssistantActions } = require('~/server/services/ActionService'); const { uploadImageBuffer } = require('~/server/services/Files/process'); const { updateAssistant, getAssistants } = require('~/models/Assistant'); +const { getOpenAIClient, fetchAssistants } = require('./helpers'); const { deleteFileByFilter } = require('~/models/File'); const { logger } = require('~/config'); -const actions = require('./actions'); -const tools = require('./tools'); - -const upload = multer(); -const router = express.Router(); - -/** - * Assistant actions route. - * @route GET|POST /assistants/actions - */ -router.use('/actions', actions); - -/** - * Create an assistant. - * @route GET /assistants/tools - * @returns {TPlugin[]} 200 - application/json - */ -router.use('/tools', tools); /** * Create an assistant. @@ -36,12 +13,11 @@ router.use('/tools', tools); * @param {AssistantCreateParams} req.body - The assistant creation parameters. * @returns {Assistant} 201 - success response - application/json */ -router.post('/', async (req, res) => { +const createAssistant = async (req, res) => { try { - /** @type {{ openai: OpenAI }} */ - const { openai } = await initializeClient({ req, res }); + const { openai } = await getOpenAIClient({ req, res }); - const { tools = [], ...assistantData } = req.body; + const { tools = [], endpoint, ...assistantData } = req.body; assistantData.tools = tools .map((tool) => { if (typeof tool !== 'string') { @@ -52,18 +28,28 @@ router.post('/', async (req, res) => { }) .filter((tool) => tool); + let azureModelIdentifier = null; if (openai.locals?.azureOptions) { + azureModelIdentifier = assistantData.model; assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName; } + assistantData.metadata = { + author: req.user.id, + endpoint, + }; + const assistant = await openai.beta.assistants.create(assistantData); + if (azureModelIdentifier) { + assistant.model = azureModelIdentifier; + } logger.debug('/assistants/', assistant); res.status(201).json(assistant); } catch (error) { logger.error('[/assistants] Error creating assistant', error); res.status(500).json({ error: error.message }); } -}); +}; /** * Retrieves an assistant. @@ -71,10 +57,10 @@ router.post('/', async (req, res) => { * @param {string} req.params.id - Assistant identifier. * @returns {Assistant} 200 - success response - application/json */ -router.get('/:id', async (req, res) => { +const retrieveAssistant = async (req, res) => { try { - /** @type {{ openai: OpenAI }} */ - const { openai } = await initializeClient({ req, res }); + /* NOTE: not actually being used right now */ + const { openai } = await getOpenAIClient({ req, res }); const assistant_id = req.params.id; const assistant = await openai.beta.assistants.retrieve(assistant_id); @@ -83,22 +69,23 @@ router.get('/:id', async (req, res) => { logger.error('[/assistants/:id] Error retrieving assistant', error); res.status(500).json({ error: error.message }); } -}); +}; /** * Modifies an assistant. * @route PATCH /assistants/:id + * @param {object} req - Express Request + * @param {object} req.params - Request params * @param {string} req.params.id - Assistant identifier. * @param {AssistantUpdateParams} req.body - The assistant update parameters. * @returns {Assistant} 200 - success response - application/json */ -router.patch('/:id', async (req, res) => { +const patchAssistant = async (req, res) => { try { - /** @type {{ openai: OpenAI }} */ - const { openai } = await initializeClient({ req, res }); + const { openai } = await getOpenAIClient({ req, res }); const assistant_id = req.params.id; - const updateData = req.body; + const { endpoint: _e, ...updateData } = req.body; updateData.tools = (updateData.tools ?? []) .map((tool) => { if (typeof tool !== 'string') { @@ -119,52 +106,46 @@ router.patch('/:id', async (req, res) => { logger.error('[/assistants/:id] Error updating assistant', error); res.status(500).json({ error: error.message }); } -}); +}; /** * Deletes an assistant. * @route DELETE /assistants/:id + * @param {object} req - Express Request + * @param {object} req.params - Request params * @param {string} req.params.id - Assistant identifier. * @returns {Assistant} 200 - success response - application/json */ -router.delete('/:id', async (req, res) => { +const deleteAssistant = async (req, res) => { try { - /** @type {{ openai: OpenAI }} */ - const { openai } = await initializeClient({ req, res }); + const { openai } = await getOpenAIClient({ req, res }); const assistant_id = req.params.id; const deletionStatus = await openai.beta.assistants.del(assistant_id); + if (deletionStatus?.deleted) { + await deleteAssistantActions({ req, assistant_id }); + } res.json(deletionStatus); } catch (error) { logger.error('[/assistants/:id] Error deleting assistant', error); res.status(500).json({ error: 'Error deleting assistant' }); } -}); +}; /** * Returns a list of assistants. * @route GET /assistants + * @param {object} req - Express Request * @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting. * @returns {AssistantListResponse} 200 - success response - application/json */ -router.get('/', async (req, res) => { +const listAssistants = async (req, res) => { try { - const { limit = 100, order = 'desc', after, before } = req.query; - const query = { limit, order, after, before }; + const body = await fetchAssistants(req, res); - const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI]; - /** @type {AssistantListResponse} */ - let body; - - if (azureConfig?.assistants) { - body = await listAssistantsForAzure({ req, res, azureConfig, query }); - } else { - ({ body } = await listAssistants({ req, res, query })); - } - - if (req.app.locals?.[EModelEndpoint.assistants]) { + if (req.app.locals?.[req.query.endpoint]) { /** @type {Partial} */ - const assistantsConfig = req.app.locals[EModelEndpoint.assistants]; + const assistantsConfig = req.app.locals[req.query.endpoint]; const { supportedIds, excludedIds } = assistantsConfig; if (supportedIds?.length) { body.data = body.data.filter((assistant) => supportedIds.includes(assistant.id)); @@ -178,31 +159,34 @@ router.get('/', async (req, res) => { logger.error('[/assistants] Error listing assistants', error); res.status(500).json({ message: 'Error listing assistants' }); } -}); +}; /** * Returns a list of the user's assistant documents (metadata saved to database). * @route GET /assistants/documents * @returns {AssistantDocument[]} 200 - success response - application/json */ -router.get('/documents', async (req, res) => { +const getAssistantDocuments = async (req, res) => { try { res.json(await getAssistants({ user: req.user.id })); } catch (error) { logger.error('[/assistants/documents] Error listing assistant documents', error); res.status(500).json({ error: error.message }); } -}); +}; /** * Uploads and updates an avatar for a specific assistant. * @route POST /avatar/:assistant_id + * @param {object} req - Express Request + * @param {object} req.params - Request params * @param {string} req.params.assistant_id - The ID of the assistant. * @param {Express.Multer.File} req.file - The avatar image file. + * @param {object} req.body - Request body * @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar. * @returns {Object} 200 - success response - application/json */ -router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) => { +const uploadAssistantAvatar = async (req, res) => { try { const { assistant_id } = req.params; if (!assistant_id) { @@ -210,8 +194,7 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) => } let { metadata: _metadata = '{}' } = req.body; - /** @type {{ openai: OpenAI }} */ - const { openai } = await initializeClient({ req, res }); + const { openai } = await getOpenAIClient({ req, res }); const image = await uploadImageBuffer({ req, @@ -266,6 +249,14 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) => logger.error(message, error); res.status(500).json({ message }); } -}); +}; -module.exports = router; +module.exports = { + createAssistant, + retrieveAssistant, + patchAssistant, + deleteAssistant, + listAssistants, + getAssistantDocuments, + uploadAssistantAvatar, +}; diff --git a/api/server/controllers/assistants/v2.js b/api/server/controllers/assistants/v2.js new file mode 100644 index 00000000000..81f55607a72 --- /dev/null +++ b/api/server/controllers/assistants/v2.js @@ -0,0 +1,208 @@ +const { ToolCallTypes } = require('librechat-data-provider'); +const { validateAndUpdateTool } = require('~/server/services/ActionService'); +const { getOpenAIClient } = require('./helpers'); +const { logger } = require('~/config'); + +/** + * Create an assistant. + * @route POST /assistants + * @param {AssistantCreateParams} req.body - The assistant creation parameters. + * @returns {Assistant} 201 - success response - application/json + */ +const createAssistant = async (req, res) => { + try { + /** @type {{ openai: OpenAIClient }} */ + const { openai } = await getOpenAIClient({ req, res }); + + const { tools = [], endpoint, ...assistantData } = req.body; + assistantData.tools = tools + .map((tool) => { + if (typeof tool !== 'string') { + return tool; + } + + return req.app.locals.availableTools[tool]; + }) + .filter((tool) => tool); + + let azureModelIdentifier = null; + if (openai.locals?.azureOptions) { + azureModelIdentifier = assistantData.model; + assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName; + } + + assistantData.metadata = { + author: req.user.id, + endpoint, + }; + + const assistant = await openai.beta.assistants.create(assistantData); + if (azureModelIdentifier) { + assistant.model = azureModelIdentifier; + } + logger.debug('/assistants/', assistant); + res.status(201).json(assistant); + } catch (error) { + logger.error('[/assistants] Error creating assistant', error); + res.status(500).json({ error: error.message }); + } +}; + +/** + * Modifies an assistant. + * @param {object} params + * @param {Express.Request} params.req + * @param {OpenAIClient} params.openai + * @param {string} params.assistant_id + * @param {AssistantUpdateParams} params.updateData + * @returns {Promise} The updated assistant. + */ +const updateAssistant = async ({ req, openai, assistant_id, updateData }) => { + const tools = []; + + let hasFileSearch = false; + for (const tool of updateData.tools ?? []) { + let actualTool = typeof tool === 'string' ? req.app.locals.availableTools[tool] : tool; + + if (!actualTool) { + continue; + } + + if (actualTool.type === ToolCallTypes.FILE_SEARCH) { + hasFileSearch = true; + } + + if (!actualTool.function) { + tools.push(actualTool); + continue; + } + + const updatedTool = await validateAndUpdateTool({ req, tool: actualTool, assistant_id }); + if (updatedTool) { + tools.push(updatedTool); + } + } + + if (hasFileSearch && !updateData.tool_resources) { + const assistant = await openai.beta.assistants.retrieve(assistant_id); + updateData.tool_resources = assistant.tool_resources ?? null; + } + + if (hasFileSearch && !updateData.tool_resources?.file_search) { + updateData.tool_resources = { + ...(updateData.tool_resources ?? {}), + file_search: { + vector_store_ids: [], + }, + }; + } + + updateData.tools = tools; + + if (openai.locals?.azureOptions && updateData.model) { + updateData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName; + } + + return await openai.beta.assistants.update(assistant_id, updateData); +}; + +/** + * Modifies an assistant with the resource file id. + * @param {object} params + * @param {Express.Request} params.req + * @param {OpenAIClient} params.openai + * @param {string} params.assistant_id + * @param {string} params.tool_resource + * @param {string} params.file_id + * @param {AssistantUpdateParams} params.updateData + * @returns {Promise} The updated assistant. + */ +const addResourceFileId = async ({ req, openai, assistant_id, tool_resource, file_id }) => { + const assistant = await openai.beta.assistants.retrieve(assistant_id); + const { tool_resources = {} } = assistant; + if (tool_resources[tool_resource]) { + tool_resources[tool_resource].file_ids.push(file_id); + } else { + tool_resources[tool_resource] = { file_ids: [file_id] }; + } + + delete assistant.id; + return await updateAssistant({ + req, + openai, + assistant_id, + updateData: { tools: assistant.tools, tool_resources }, + }); +}; + +/** + * Deletes a file ID from an assistant's resource. + * @param {object} params + * @param {Express.Request} params.req + * @param {OpenAIClient} params.openai + * @param {string} params.assistant_id + * @param {string} [params.tool_resource] + * @param {string} params.file_id + * @param {AssistantUpdateParams} params.updateData + * @returns {Promise} The updated assistant. + */ +const deleteResourceFileId = async ({ req, openai, assistant_id, tool_resource, file_id }) => { + const assistant = await openai.beta.assistants.retrieve(assistant_id); + const { tool_resources = {} } = assistant; + + if (tool_resource && tool_resources[tool_resource]) { + const resource = tool_resources[tool_resource]; + const index = resource.file_ids.indexOf(file_id); + if (index !== -1) { + resource.file_ids.splice(index, 1); + } + } else { + for (const resourceKey in tool_resources) { + const resource = tool_resources[resourceKey]; + const index = resource.file_ids.indexOf(file_id); + if (index !== -1) { + resource.file_ids.splice(index, 1); + break; + } + } + } + + delete assistant.id; + return await updateAssistant({ + req, + openai, + assistant_id, + updateData: { tools: assistant.tools, tool_resources }, + }); +}; + +/** + * Modifies an assistant. + * @route PATCH /assistants/:id + * @param {object} req - Express Request + * @param {object} req.params - Request params + * @param {string} req.params.id - Assistant identifier. + * @param {AssistantUpdateParams} req.body - The assistant update parameters. + * @returns {Assistant} 200 - success response - application/json + */ +const patchAssistant = async (req, res) => { + try { + const { openai } = await getOpenAIClient({ req, res }); + const assistant_id = req.params.id; + const { endpoint: _e, ...updateData } = req.body; + updateData.tools = updateData.tools ?? []; + const updatedAssistant = await updateAssistant({ req, openai, assistant_id, updateData }); + res.json(updatedAssistant); + } catch (error) { + logger.error('[/assistants/:id] Error updating assistant', error); + res.status(500).json({ error: error.message }); + } +}; + +module.exports = { + patchAssistant, + createAssistant, + updateAssistant, + addResourceFileId, + deleteResourceFileId, +}; diff --git a/api/server/middleware/abortMiddleware.js b/api/server/middleware/abortMiddleware.js index a868b107b34..69df9619ccd 100644 --- a/api/server/middleware/abortMiddleware.js +++ b/api/server/middleware/abortMiddleware.js @@ -1,4 +1,4 @@ -const { EModelEndpoint } = require('librechat-data-provider'); +const { isAssistantsEndpoint } = require('librechat-data-provider'); const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils'); const { truncateText, smartTruncateText } = require('~/app/clients/prompts'); const { saveMessage, getConvo, getConvoTitle } = require('~/models'); @@ -15,7 +15,7 @@ async function abortMessage(req, res) { abortKey = conversationId; } - if (endpoint === EModelEndpoint.assistants) { + if (isAssistantsEndpoint(endpoint)) { return await abortRun(req, res); } diff --git a/api/server/middleware/abortRun.js b/api/server/middleware/abortRun.js index 6db6329d44c..6522d6746de 100644 --- a/api/server/middleware/abortRun.js +++ b/api/server/middleware/abortRun.js @@ -10,7 +10,7 @@ const three_minutes = 1000 * 60 * 3; async function abortRun(req, res) { res.setHeader('Content-Type', 'application/json'); - const { abortKey } = req.body; + const { abortKey, endpoint } = req.body; const [conversationId, latestMessageId] = abortKey.split(':'); const conversation = await getConvo(req.user.id, conversationId); @@ -68,9 +68,10 @@ async function abortRun(req, res) { runMessages = await checkMessageGaps({ openai, - latestMessageId, + endpoint, thread_id, run_id, + latestMessageId, conversationId, }); diff --git a/api/server/middleware/buildEndpointOption.js b/api/server/middleware/buildEndpointOption.js index 3de13ed2e67..ddaaa35a326 100644 --- a/api/server/middleware/buildEndpointOption.js +++ b/api/server/middleware/buildEndpointOption.js @@ -1,5 +1,6 @@ const { parseConvo, EModelEndpoint } = require('librechat-data-provider'); const { getModelsConfig } = require('~/server/controllers/ModelController'); +const azureAssistants = require('~/server/services/Endpoints/azureAssistants'); const assistants = require('~/server/services/Endpoints/assistants'); const gptPlugins = require('~/server/services/Endpoints/gptPlugins'); const { processFiles } = require('~/server/services/Files/process'); @@ -18,6 +19,7 @@ const buildFunction = { [EModelEndpoint.anthropic]: anthropic.buildOptions, [EModelEndpoint.gptPlugins]: gptPlugins.buildOptions, [EModelEndpoint.assistants]: assistants.buildOptions, + [EModelEndpoint.azureAssistants]: azureAssistants.buildOptions, }; async function buildEndpointOption(req, res, next) { diff --git a/api/server/routes/assistants/actions.js b/api/server/routes/assistants/actions.js index 9cf47c86995..51503576128 100644 --- a/api/server/routes/assistants/actions.js +++ b/api/server/routes/assistants/actions.js @@ -2,7 +2,7 @@ const { v4 } = require('uuid'); const express = require('express'); const { encryptMetadata, domainParser } = require('~/server/services/ActionService'); const { actionDelimiter, EModelEndpoint } = require('librechat-data-provider'); -const { initializeClient } = require('~/server/services/Endpoints/assistants'); +const { getOpenAIClient } = require('~/server/controllers/assistants/helpers'); const { updateAction, getActions, deleteAction } = require('~/models/Action'); const { updateAssistant, getAssistant } = require('~/models/Assistant'); const { logger } = require('~/config'); @@ -45,7 +45,6 @@ router.post('/:assistant_id', async (req, res) => { let metadata = encryptMetadata(_metadata); let { domain } = metadata; - /* Azure doesn't support periods in function names */ domain = await domainParser(req, domain, true); if (!domain) { @@ -55,8 +54,7 @@ router.post('/:assistant_id', async (req, res) => { const action_id = _action_id ?? v4(); const initialPromises = []; - /** @type {{ openai: OpenAI }} */ - const { openai } = await initializeClient({ req, res }); + const { openai } = await getOpenAIClient({ req, res }); initialPromises.push(getAssistant({ assistant_id })); initialPromises.push(openai.beta.assistants.retrieve(assistant_id)); @@ -157,9 +155,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => { try { const { assistant_id, action_id, model } = req.params; req.body.model = model; - - /** @type {{ openai: OpenAI }} */ - const { openai } = await initializeClient({ req, res }); + const { openai } = await getOpenAIClient({ req, res }); const initialPromises = []; initialPromises.push(getAssistant({ assistant_id })); diff --git a/api/server/routes/assistants/chatV1.js b/api/server/routes/assistants/chatV1.js new file mode 100644 index 00000000000..99de23c2014 --- /dev/null +++ b/api/server/routes/assistants/chatV1.js @@ -0,0 +1,25 @@ +const express = require('express'); + +const router = express.Router(); +const { + setHeaders, + handleAbort, + validateModel, + // validateEndpoint, + buildEndpointOption, +} = require('~/server/middleware'); +const chatController = require('~/server/controllers/assistants/chatV1'); + +router.post('/abort', handleAbort()); + +/** + * @route POST / + * @desc Chat with an assistant + * @access Public + * @param {express.Request} req - The request object, containing the request data. + * @param {express.Response} res - The response object, used to send back a response. + * @returns {void} + */ +router.post('/', validateModel, buildEndpointOption, setHeaders, chatController); + +module.exports = router; diff --git a/api/server/routes/assistants/chatV2.js b/api/server/routes/assistants/chatV2.js new file mode 100644 index 00000000000..e0ef2e0b200 --- /dev/null +++ b/api/server/routes/assistants/chatV2.js @@ -0,0 +1,25 @@ +const express = require('express'); + +const router = express.Router(); +const { + setHeaders, + handleAbort, + validateModel, + // validateEndpoint, + buildEndpointOption, +} = require('~/server/middleware'); +const chatController = require('~/server/controllers/assistants/chatV2'); + +router.post('/abort', handleAbort()); + +/** + * @route POST / + * @desc Chat with an assistant + * @access Public + * @param {express.Request} req - The request object, containing the request data. + * @param {express.Response} res - The response object, used to send back a response. + * @returns {void} + */ +router.post('/', validateModel, buildEndpointOption, setHeaders, chatController); + +module.exports = router; diff --git a/api/server/routes/assistants/index.js b/api/server/routes/assistants/index.js index a47a768f9d5..6613177e7ba 100644 --- a/api/server/routes/assistants/index.js +++ b/api/server/routes/assistants/index.js @@ -7,16 +7,19 @@ const { // concurrentLimiter, // messageIpLimiter, // messageUserLimiter, -} = require('../../middleware'); +} = require('~/server/middleware'); -const assistants = require('./assistants'); -const chat = require('./chat'); +const v1 = require('./v1'); +const chatV1 = require('./chatV1'); +const v2 = require('./v2'); +const chatV2 = require('./chatV2'); router.use(requireJwtAuth); router.use(checkBan); router.use(uaParser); - -router.use('/', assistants); -router.use('/chat', chat); +router.use('/v1/', v1); +router.use('/v1/chat', chatV1); +router.use('/v2/', v2); +router.use('/v2/chat', chatV2); module.exports = router; diff --git a/api/server/routes/assistants/v1.js b/api/server/routes/assistants/v1.js new file mode 100644 index 00000000000..184450887ec --- /dev/null +++ b/api/server/routes/assistants/v1.js @@ -0,0 +1,81 @@ +const multer = require('multer'); +const express = require('express'); +const controllers = require('~/server/controllers/assistants/v1'); +const actions = require('./actions'); +const tools = require('./tools'); + +const upload = multer(); +const router = express.Router(); + +/** + * Assistant actions route. + * @route GET|POST /assistants/actions + */ +router.use('/actions', actions); + +/** + * Create an assistant. + * @route GET /assistants/tools + * @returns {TPlugin[]} 200 - application/json + */ +router.use('/tools', tools); + +/** + * Create an assistant. + * @route POST /assistants + * @param {AssistantCreateParams} req.body - The assistant creation parameters. + * @returns {Assistant} 201 - success response - application/json + */ +router.post('/', controllers.createAssistant); + +/** + * Retrieves an assistant. + * @route GET /assistants/:id + * @param {string} req.params.id - Assistant identifier. + * @returns {Assistant} 200 - success response - application/json + */ +router.get('/:id', controllers.retrieveAssistant); + +/** + * Modifies an assistant. + * @route PATCH /assistants/:id + * @param {string} req.params.id - Assistant identifier. + * @param {AssistantUpdateParams} req.body - The assistant update parameters. + * @returns {Assistant} 200 - success response - application/json + */ +router.patch('/:id', controllers.patchAssistant); + +/** + * Deletes an assistant. + * @route DELETE /assistants/:id + * @param {string} req.params.id - Assistant identifier. + * @returns {Assistant} 200 - success response - application/json + */ +router.delete('/:id', controllers.deleteAssistant); + +/** + * Returns a list of assistants. + * @route GET /assistants + * @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting. + * @returns {AssistantListResponse} 200 - success response - application/json + */ +router.get('/', controllers.listAssistants); + +/** + * Returns a list of the user's assistant documents (metadata saved to database). + * @route GET /assistants/documents + * @returns {AssistantDocument[]} 200 - success response - application/json + */ +router.get('/documents', controllers.getAssistantDocuments); + +/** + * Uploads and updates an avatar for a specific assistant. + * @route POST /avatar/:assistant_id + * @param {string} req.params.assistant_id - The ID of the assistant. + * @param {Express.Multer.File} req.file - The avatar image file. + * @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar. + * @returns {Object} 200 - success response - application/json + */ +router.post('/avatar/:assistant_id', upload.single('file'), controllers.uploadAssistantAvatar); + +module.exports = router; diff --git a/api/server/routes/assistants/v2.js b/api/server/routes/assistants/v2.js new file mode 100644 index 00000000000..3c70c623a0a --- /dev/null +++ b/api/server/routes/assistants/v2.js @@ -0,0 +1,82 @@ +const multer = require('multer'); +const express = require('express'); +const v1 = require('~/server/controllers/assistants/v1'); +const v2 = require('~/server/controllers/assistants/v2'); +const actions = require('./actions'); +const tools = require('./tools'); + +const upload = multer(); +const router = express.Router(); + +/** + * Assistant actions route. + * @route GET|POST /assistants/actions + */ +router.use('/actions', actions); + +/** + * Create an assistant. + * @route GET /assistants/tools + * @returns {TPlugin[]} 200 - application/json + */ +router.use('/tools', tools); + +/** + * Create an assistant. + * @route POST /assistants + * @param {AssistantCreateParams} req.body - The assistant creation parameters. + * @returns {Assistant} 201 - success response - application/json + */ +router.post('/', v2.createAssistant); + +/** + * Retrieves an assistant. + * @route GET /assistants/:id + * @param {string} req.params.id - Assistant identifier. + * @returns {Assistant} 200 - success response - application/json + */ +router.get('/:id', v1.retrieveAssistant); + +/** + * Modifies an assistant. + * @route PATCH /assistants/:id + * @param {string} req.params.id - Assistant identifier. + * @param {AssistantUpdateParams} req.body - The assistant update parameters. + * @returns {Assistant} 200 - success response - application/json + */ +router.patch('/:id', v2.patchAssistant); + +/** + * Deletes an assistant. + * @route DELETE /assistants/:id + * @param {string} req.params.id - Assistant identifier. + * @returns {Assistant} 200 - success response - application/json + */ +router.delete('/:id', v1.deleteAssistant); + +/** + * Returns a list of assistants. + * @route GET /assistants + * @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting. + * @returns {AssistantListResponse} 200 - success response - application/json + */ +router.get('/', v1.listAssistants); + +/** + * Returns a list of the user's assistant documents (metadata saved to database). + * @route GET /assistants/documents + * @returns {AssistantDocument[]} 200 - success response - application/json + */ +router.get('/documents', v1.getAssistantDocuments); + +/** + * Uploads and updates an avatar for a specific assistant. + * @route POST /avatar/:assistant_id + * @param {string} req.params.assistant_id - The ID of the assistant. + * @param {Express.Multer.File} req.file - The avatar image file. + * @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar. + * @returns {Object} 200 - success response - application/json + */ +router.post('/avatar/:assistant_id', upload.single('file'), v1.uploadAssistantAvatar); + +module.exports = router; diff --git a/api/server/routes/files/files.js b/api/server/routes/files/files.js index 812d4bd33d7..565893af3dc 100644 --- a/api/server/routes/files/files.js +++ b/api/server/routes/files/files.js @@ -1,6 +1,6 @@ const fs = require('fs').promises; const express = require('express'); -const { isUUID, FileSources } = require('librechat-data-provider'); +const { isUUID, checkOpenAIStorage } = require('librechat-data-provider'); const { filterFile, processFileUpload, @@ -89,7 +89,7 @@ router.get('/download/:userId/:file_id', async (req, res) => { return res.status(403).send('Forbidden'); } - if (file.source === FileSources.openai && !file.model) { + if (checkOpenAIStorage(file.source) && !file.model) { logger.warn(`${errorPrefix} has no associated model: ${file_id}`); return res.status(400).send('The model used when creating this file is not available'); } @@ -110,7 +110,8 @@ router.get('/download/:userId/:file_id', async (req, res) => { let passThrough; /** @type {ReadableStream | undefined} */ let fileStream; - if (file.source === FileSources.openai) { + + if (checkOpenAIStorage(file.source)) { req.body = { model: file.model }; const { openai } = await initializeClient({ req, res }); logger.debug(`Downloading file ${file_id} from OpenAI`); diff --git a/api/server/routes/search.js b/api/server/routes/search.js index 2197b38ce49..68cff7532b8 100644 --- a/api/server/routes/search.js +++ b/api/server/routes/search.js @@ -41,29 +41,10 @@ router.get('/', async function (req, res) { return; } - const messages = ( - await Message.meiliSearch( - q, - { - attributesToHighlight: ['text'], - highlightPreTag: '**', - highlightPostTag: '**', - }, - true, - ) - ).hits.map((message) => { - const { _formatted, ...rest } = message; - return { - ...rest, - searchResult: true, - text: _formatted.text, - }; - }); + const messages = (await Message.meiliSearch(q, undefined, true)).hits; const titles = (await Conversation.meiliSearch(q)).hits; + const sortedHits = reduceHits(messages, titles); - // debugging: - // logger.debug('user:', user, 'message hits:', messages.length, 'convo hits:', titles.length); - // logger.debug('sorted hits:', sortedHits.length); const result = await getConvosQueried(user, sortedHits, pageNumber); const activeMessages = []; @@ -86,8 +67,7 @@ router.get('/', async function (req, res) { delete result.cache; } delete result.convoMap; - // for debugging - // logger.debug(result, messages.length); + res.status(200).send(result); } catch (error) { logger.error('[/search] Error while searching messages & conversations', error); diff --git a/api/server/services/ActionService.js b/api/server/services/ActionService.js index 344a6570bab..6f832bce13c 100644 --- a/api/server/services/ActionService.js +++ b/api/server/services/ActionService.js @@ -1,20 +1,59 @@ const { - AuthTypeEnum, - EModelEndpoint, - actionDomainSeparator, CacheKeys, Constants, + AuthTypeEnum, + actionDelimiter, + isImageVisionTool, + actionDomainSeparator, } = require('librechat-data-provider'); const { encryptV2, decryptV2 } = require('~/server/utils/crypto'); -const { getActions } = require('~/models/Action'); +const { getActions, deleteActions } = require('~/models/Action'); +const { deleteAssistant } = require('~/models/Assistant'); const { getLogStores } = require('~/cache'); const { logger } = require('~/config'); +const toolNameRegex = /^[a-zA-Z0-9_-]+$/; + +/** + * Validates tool name against regex pattern and updates if necessary. + * @param {object} params - The parameters for the function. + * @param {object} params.req - Express Request. + * @param {FunctionTool} params.tool - The tool object. + * @param {string} params.assistant_id - The assistant ID + * @returns {object|null} - Updated tool object or null if invalid and not an action. + */ +const validateAndUpdateTool = async ({ req, tool, assistant_id }) => { + let actions; + if (isImageVisionTool(tool)) { + return null; + } + if (!toolNameRegex.test(tool.function.name)) { + const [functionName, domain] = tool.function.name.split(actionDelimiter); + actions = await getActions({ assistant_id, user: req.user.id }, true); + const matchingActions = actions.filter((action) => { + const metadata = action.metadata; + return metadata && metadata.domain === domain; + }); + const action = matchingActions[0]; + if (!action) { + return null; + } + + const parsedDomain = await domainParser(req, domain, true); + + if (!parsedDomain) { + return null; + } + + tool.function.name = `${functionName}${actionDelimiter}${parsedDomain}`; + } + return tool; +}; + /** * Encodes or decodes a domain name to/from base64, or replacing periods with a custom separator. * - * Necessary because Azure OpenAI Assistants API doesn't support periods in function - * names due to `[a-zA-Z0-9_-]*` Regex Validation, limited to a 64-character maximum. + * Necessary due to `[a-zA-Z0-9_-]*` Regex Validation, limited to a 64-character maximum. * * @param {Express.Request} req - The Express Request object. * @param {string} domain - The domain name to encode/decode. @@ -26,10 +65,6 @@ async function domainParser(req, domain, inverse = false) { return; } - if (!req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) { - return domain; - } - const domainsCache = getLogStores(CacheKeys.ENCODED_DOMAINS); const cachedDomain = await domainsCache.get(domain); if (inverse && cachedDomain) { @@ -170,10 +205,29 @@ function decryptMetadata(metadata) { return decryptedMetadata; } +/** + * Deletes an action and its corresponding assistant. + * @param {Object} params - The parameters for the function. + * @param {OpenAIClient} params.req - The Express Request object. + * @param {string} params.assistant_id - The ID of the assistant. + */ +const deleteAssistantActions = async ({ req, assistant_id }) => { + try { + await deleteActions({ assistant_id, user: req.user.id }); + await deleteAssistant({ assistant_id, user: req.user.id }); + } catch (error) { + const message = 'Trouble deleting Assistant Actions for Assistant ID: ' + assistant_id; + logger.error(message, error); + throw new Error(message); + } +}; + module.exports = { - loadActionSets, + deleteAssistantActions, + validateAndUpdateTool, createActionTool, encryptMetadata, decryptMetadata, + loadActionSets, domainParser, }; diff --git a/api/server/services/ActionService.spec.js b/api/server/services/ActionService.spec.js index 57f99889613..a9650d60302 100644 --- a/api/server/services/ActionService.spec.js +++ b/api/server/services/ActionService.spec.js @@ -73,12 +73,12 @@ describe('domainParser', () => { const TLD = '.com'; // Non-azure request - it('returns domain as is if not azure', async () => { + it('does not return domain as is if not azure', async () => { const domain = `example.com${actionDomainSeparator}test${actionDomainSeparator}`; const result1 = await domainParser(reqNoAzure, domain, false); const result2 = await domainParser(reqNoAzure, domain, true); - expect(result1).toEqual(domain); - expect(result2).toEqual(domain); + expect(result1).not.toEqual(domain); + expect(result2).not.toEqual(domain); }); // Test for Empty or Null Inputs diff --git a/api/server/services/AppService.js b/api/server/services/AppService.js index 4163a3df87b..b4d35f1366c 100644 --- a/api/server/services/AppService.js +++ b/api/server/services/AppService.js @@ -72,7 +72,14 @@ const AppService = async (app) => { } if (config?.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) { - endpointLocals[EModelEndpoint.assistants] = azureAssistantsDefaults(); + endpointLocals[EModelEndpoint.azureAssistants] = azureAssistantsDefaults(); + } + + if (config?.endpoints?.[EModelEndpoint.azureAssistants]) { + endpointLocals[EModelEndpoint.azureAssistants] = assistantsConfigSetup( + config, + endpointLocals[EModelEndpoint.azureAssistants], + ); } if (config?.endpoints?.[EModelEndpoint.assistants]) { diff --git a/api/server/services/AppService.spec.js b/api/server/services/AppService.spec.js index e55bff99469..602ef43f837 100644 --- a/api/server/services/AppService.spec.js +++ b/api/server/services/AppService.spec.js @@ -253,8 +253,8 @@ describe('AppService', () => { process.env.EASTUS_API_KEY = 'eastus-key'; await AppService(app); - expect(app.locals).toHaveProperty(EModelEndpoint.assistants); - expect(app.locals[EModelEndpoint.assistants].capabilities.length).toEqual(3); + expect(app.locals).toHaveProperty(EModelEndpoint.azureAssistants); + expect(app.locals[EModelEndpoint.azureAssistants].capabilities.length).toEqual(3); }); it('should correctly configure Azure OpenAI endpoint based on custom config', async () => { diff --git a/api/server/services/AssistantService.js b/api/server/services/AssistantService.js index 41e88dc8bdb..2db0a56b6be 100644 --- a/api/server/services/AssistantService.js +++ b/api/server/services/AssistantService.js @@ -78,7 +78,7 @@ async function createOnTextProgress({ * @return {Promise} */ async function getResponse({ openai, run_id, thread_id }) { - const run = await waitForRun({ openai, run_id, thread_id, pollIntervalMs: 500 }); + const run = await waitForRun({ openai, run_id, thread_id, pollIntervalMs: 2000 }); if (run.status === RunStatus.COMPLETED) { const messages = await openai.beta.threads.messages.list(thread_id, defaultOrderQuery); @@ -393,8 +393,9 @@ async function runAssistant({ }, }); + const { endpoint = EModelEndpoint.azureAssistants } = openai.req.body; /** @type {TCustomConfig.endpoints.assistants} */ - const assistantsEndpointConfig = openai.req.app.locals?.[EModelEndpoint.assistants] ?? {}; + const assistantsEndpointConfig = openai.req.app.locals?.[endpoint] ?? {}; const { pollIntervalMs, timeoutMs } = assistantsEndpointConfig; const run = await waitForRun({ diff --git a/api/server/services/Config/EndpointService.js b/api/server/services/Config/EndpointService.js index 987fbb88517..438cb81e80a 100644 --- a/api/server/services/Config/EndpointService.js +++ b/api/server/services/Config/EndpointService.js @@ -3,6 +3,7 @@ const { isUserProvided, generateConfig } = require('~/server/utils'); const { OPENAI_API_KEY: openAIApiKey, + AZURE_ASSISTANTS_API_KEY: azureAssistantsApiKey, ASSISTANTS_API_KEY: assistantsApiKey, AZURE_API_KEY: azureOpenAIApiKey, ANTHROPIC_API_KEY: anthropicApiKey, @@ -13,6 +14,7 @@ const { OPENAI_REVERSE_PROXY, AZURE_OPENAI_BASEURL, ASSISTANTS_BASE_URL, + AZURE_ASSISTANTS_BASE_URL, } = process.env ?? {}; const useAzurePlugins = !!PLUGINS_USE_AZURE; @@ -28,11 +30,20 @@ module.exports = { useAzurePlugins, userProvidedOpenAI, googleKey, + [EModelEndpoint.bingAI]: generateConfig(bingToken), + [EModelEndpoint.anthropic]: generateConfig(anthropicApiKey), + [EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken), [EModelEndpoint.openAI]: generateConfig(openAIApiKey, OPENAI_REVERSE_PROXY), - [EModelEndpoint.assistants]: generateConfig(assistantsApiKey, ASSISTANTS_BASE_URL, true), [EModelEndpoint.azureOpenAI]: generateConfig(azureOpenAIApiKey, AZURE_OPENAI_BASEURL), - [EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken), - [EModelEndpoint.anthropic]: generateConfig(anthropicApiKey), - [EModelEndpoint.bingAI]: generateConfig(bingToken), + [EModelEndpoint.assistants]: generateConfig( + assistantsApiKey, + ASSISTANTS_BASE_URL, + EModelEndpoint.assistants, + ), + [EModelEndpoint.azureAssistants]: generateConfig( + azureAssistantsApiKey, + AZURE_ASSISTANTS_BASE_URL, + EModelEndpoint.azureAssistants, + ), }, }; diff --git a/api/server/services/Config/loadConfigEndpoints.js b/api/server/services/Config/loadConfigEndpoints.js index cd05cb9acb4..203a461b00e 100644 --- a/api/server/services/Config/loadConfigEndpoints.js +++ b/api/server/services/Config/loadConfigEndpoints.js @@ -53,7 +53,7 @@ async function loadConfigEndpoints(req) { if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) { /** @type {Omit} */ - endpointsConfig[EModelEndpoint.assistants] = { + endpointsConfig[EModelEndpoint.azureAssistants] = { userProvide: false, }; } diff --git a/api/server/services/Config/loadConfigModels.js b/api/server/services/Config/loadConfigModels.js index b3997a2ada0..cb0b800d740 100644 --- a/api/server/services/Config/loadConfigModels.js +++ b/api/server/services/Config/loadConfigModels.js @@ -30,7 +30,7 @@ async function loadConfigModels(req) { } if (azureEndpoint?.assistants && azureConfig.assistantModels) { - modelsConfig[EModelEndpoint.assistants] = azureConfig.assistantModels; + modelsConfig[EModelEndpoint.azureAssistants] = azureConfig.assistantModels; } if (!Array.isArray(endpoints[EModelEndpoint.custom])) { diff --git a/api/server/services/Config/loadDefaultEConfig.js b/api/server/services/Config/loadDefaultEConfig.js index 960dfb4c77a..379bd425015 100644 --- a/api/server/services/Config/loadDefaultEConfig.js +++ b/api/server/services/Config/loadDefaultEConfig.js @@ -9,13 +9,15 @@ const { config } = require('./EndpointService'); */ async function loadDefaultEndpointsConfig(req) { const { google, gptPlugins } = await loadAsyncEndpoints(req); - const { openAI, assistants, bingAI, anthropic, azureOpenAI, chatGPTBrowser } = config; + const { openAI, assistants, azureAssistants, bingAI, anthropic, azureOpenAI, chatGPTBrowser } = + config; const enabledEndpoints = getEnabledEndpoints(); const endpointConfig = { [EModelEndpoint.openAI]: openAI, [EModelEndpoint.assistants]: assistants, + [EModelEndpoint.azureAssistants]: azureAssistants, [EModelEndpoint.azureOpenAI]: azureOpenAI, [EModelEndpoint.google]: google, [EModelEndpoint.bingAI]: bingAI, diff --git a/api/server/services/Config/loadDefaultModels.js b/api/server/services/Config/loadDefaultModels.js index e0b2ca0e4f9..c550fbebbdd 100644 --- a/api/server/services/Config/loadDefaultModels.js +++ b/api/server/services/Config/loadDefaultModels.js @@ -25,6 +25,7 @@ async function loadDefaultModels(req) { plugins: true, }); const assistants = await getOpenAIModels({ assistants: true }); + const azureAssistants = await getOpenAIModels({ azureAssistants: true }); return { [EModelEndpoint.openAI]: openAI, @@ -35,6 +36,7 @@ async function loadDefaultModels(req) { [EModelEndpoint.bingAI]: ['BingAI', 'Sydney'], [EModelEndpoint.chatGPTBrowser]: chatGPTBrowser, [EModelEndpoint.assistants]: assistants, + [EModelEndpoint.azureAssistants]: azureAssistants, }; } diff --git a/api/server/services/Endpoints/assistants/index.js b/api/server/services/Endpoints/assistants/index.js index 10e94f2cd4f..772b1efb118 100644 --- a/api/server/services/Endpoints/assistants/index.js +++ b/api/server/services/Endpoints/assistants/index.js @@ -2,95 +2,8 @@ const addTitle = require('./addTitle'); const buildOptions = require('./buildOptions'); const initializeClient = require('./initializeClient'); -/** - * Asynchronously lists assistants based on provided query parameters. - * - * Initializes the client with the current request and response objects and lists assistants - * according to the query parameters. This function abstracts the logic for non-Azure paths. - * - * @async - * @param {object} params - The parameters object. - * @param {object} params.req - The request object, used for initializing the client. - * @param {object} params.res - The response object, used for initializing the client. - * @param {object} params.query - The query parameters to list assistants (e.g., limit, order). - * @returns {Promise} A promise that resolves to the response from the `openai.beta.assistants.list` method call. - */ -const listAssistants = async ({ req, res, query }) => { - const { openai } = await initializeClient({ req, res }); - return openai.beta.assistants.list(query); -}; - -/** - * Asynchronously lists assistants for Azure configured groups. - * - * Iterates through Azure configured assistant groups, initializes the client with the current request and response objects, - * lists assistants based on the provided query parameters, and merges their data alongside the model information into a single array. - * - * @async - * @param {object} params - The parameters object. - * @param {object} params.req - The request object, used for initializing the client and manipulating the request body. - * @param {object} params.res - The response object, used for initializing the client. - * @param {TAzureConfig} params.azureConfig - The Azure configuration object containing assistantGroups and groupMap. - * @param {object} params.query - The query parameters to list assistants (e.g., limit, order). - * @returns {Promise} A promise that resolves to an array of assistant data merged with their respective model information. - */ -const listAssistantsForAzure = async ({ req, res, azureConfig = {}, query }) => { - /** @type {Array<[string, TAzureModelConfig]>} */ - const groupModelTuples = []; - const promises = []; - /** @type {Array} */ - const groups = []; - - const { groupMap, assistantGroups } = azureConfig; - - for (const groupName of assistantGroups) { - const group = groupMap[groupName]; - groups.push(group); - - const currentModelTuples = Object.entries(group?.models); - groupModelTuples.push(currentModelTuples); - - /* The specified model is only necessary to - fetch assistants for the shared instance */ - req.body.model = currentModelTuples[0][0]; - promises.push(listAssistants({ req, res, query })); - } - - const resolvedQueries = await Promise.all(promises); - const data = resolvedQueries.flatMap((res, i) => - res.data.map((assistant) => { - const deploymentName = assistant.model; - const currentGroup = groups[i]; - const currentModelTuples = groupModelTuples[i]; - const firstModel = currentModelTuples[0][0]; - - if (currentGroup.deploymentName === deploymentName) { - return { ...assistant, model: firstModel }; - } - - for (const [model, modelConfig] of currentModelTuples) { - if (modelConfig.deploymentName === deploymentName) { - return { ...assistant, model }; - } - } - - return { ...assistant, model: firstModel }; - }), - ); - - return { - first_id: data[0]?.id, - last_id: data[data.length - 1]?.id, - object: 'list', - has_more: false, - data, - }; -}; - module.exports = { addTitle, buildOptions, initializeClient, - listAssistants, - listAssistantsForAzure, }; diff --git a/api/server/services/Endpoints/assistants/initializeClient.js b/api/server/services/Endpoints/assistants/initializeClient.js index c44bc66f310..5dadd54d118 100644 --- a/api/server/services/Endpoints/assistants/initializeClient.js +++ b/api/server/services/Endpoints/assistants/initializeClient.js @@ -1,11 +1,6 @@ const OpenAI = require('openai'); const { HttpsProxyAgent } = require('https-proxy-agent'); -const { - ErrorTypes, - EModelEndpoint, - resolveHeaders, - mapModelToAzureConfig, -} = require('librechat-data-provider'); +const { ErrorTypes, EModelEndpoint } = require('librechat-data-provider'); const { getUserKeyValues, getUserKeyExpiry, @@ -13,9 +8,8 @@ const { } = require('~/server/services/UserService'); const OpenAIClient = require('~/app/clients/OpenAIClient'); const { isUserProvided } = require('~/server/utils'); -const { constructAzureURL } = require('~/utils'); -const initializeClient = async ({ req, res, endpointOption, initAppClient = false }) => { +const initializeClient = async ({ req, res, endpointOption, version, initAppClient = false }) => { const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env; const userProvidesKey = isUserProvided(ASSISTANTS_API_KEY); @@ -34,7 +28,11 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals let apiKey = userProvidesKey ? userValues.apiKey : ASSISTANTS_API_KEY; let baseURL = userProvidesURL ? userValues.baseURL : ASSISTANTS_BASE_URL; - const opts = {}; + const opts = { + defaultHeaders: { + 'OpenAI-Beta': `assistants=${version}`, + }, + }; const clientOptions = { reverseProxyUrl: baseURL ?? null, @@ -44,54 +42,6 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals ...endpointOption, }; - /** @type {TAzureConfig | undefined} */ - const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI]; - - /** @type {AzureOptions | undefined} */ - let azureOptions; - - if (azureConfig && azureConfig.assistants) { - const { modelGroupMap, groupMap, assistantModels } = azureConfig; - const modelName = req.body.model ?? req.query.model ?? assistantModels[0]; - const { - azureOptions: currentOptions, - baseURL: azureBaseURL, - headers = {}, - serverless, - } = mapModelToAzureConfig({ - modelName, - modelGroupMap, - groupMap, - }); - - azureOptions = currentOptions; - - baseURL = constructAzureURL({ - baseURL: azureBaseURL ?? 'https://${INSTANCE_NAME}.openai.azure.com/openai', - azureOptions, - }); - - apiKey = azureOptions.azureOpenAIApiKey; - opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion }; - opts.defaultHeaders = resolveHeaders({ ...headers, 'api-key': apiKey }); - opts.model = azureOptions.azureOpenAIApiDeploymentName; - - if (initAppClient) { - clientOptions.titleConvo = azureConfig.titleConvo; - clientOptions.titleModel = azureConfig.titleModel; - clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion'; - - const groupName = modelGroupMap[modelName].group; - clientOptions.addParams = azureConfig.groupMap[groupName].addParams; - clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams; - clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt; - - clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl; - clientOptions.headers = opts.defaultHeaders; - clientOptions.azure = !serverless && azureOptions; - } - } - if (userProvidesKey & !apiKey) { throw new Error( JSON.stringify({ @@ -125,10 +75,6 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals openai.req = req; openai.res = res; - if (azureOptions) { - openai.locals = { ...(openai.locals ?? {}), azureOptions }; - } - if (endpointOption && initAppClient) { const client = new OpenAIClient(apiKey, clientOptions); return { diff --git a/api/server/services/Endpoints/azureAssistants/buildOptions.js b/api/server/services/Endpoints/azureAssistants/buildOptions.js new file mode 100644 index 00000000000..047663c4e53 --- /dev/null +++ b/api/server/services/Endpoints/azureAssistants/buildOptions.js @@ -0,0 +1,19 @@ +const buildOptions = (endpoint, parsedBody) => { + // eslint-disable-next-line no-unused-vars + const { promptPrefix, assistant_id, iconURL, greeting, spec, ...rest } = parsedBody; + const endpointOption = { + endpoint, + promptPrefix, + assistant_id, + iconURL, + greeting, + spec, + modelOptions: { + ...rest, + }, + }; + + return endpointOption; +}; + +module.exports = buildOptions; diff --git a/api/server/services/Endpoints/azureAssistants/index.js b/api/server/services/Endpoints/azureAssistants/index.js new file mode 100644 index 00000000000..39944683067 --- /dev/null +++ b/api/server/services/Endpoints/azureAssistants/index.js @@ -0,0 +1,7 @@ +const buildOptions = require('./buildOptions'); +const initializeClient = require('./initializeClient'); + +module.exports = { + buildOptions, + initializeClient, +}; diff --git a/api/server/services/Endpoints/azureAssistants/initializeClient.js b/api/server/services/Endpoints/azureAssistants/initializeClient.js new file mode 100644 index 00000000000..69a55c74bbb --- /dev/null +++ b/api/server/services/Endpoints/azureAssistants/initializeClient.js @@ -0,0 +1,195 @@ +const OpenAI = require('openai'); +const { HttpsProxyAgent } = require('https-proxy-agent'); +const { + ErrorTypes, + EModelEndpoint, + resolveHeaders, + mapModelToAzureConfig, +} = require('librechat-data-provider'); +const { + getUserKeyValues, + getUserKeyExpiry, + checkUserKeyExpiry, +} = require('~/server/services/UserService'); +const OpenAIClient = require('~/app/clients/OpenAIClient'); +const { isUserProvided } = require('~/server/utils'); +const { constructAzureURL } = require('~/utils'); + +class Files { + constructor(client) { + this._client = client; + } + /** + * Create an assistant file by attaching a + * [File](https://platform.openai.com/docs/api-reference/files) to an + * [assistant](https://platform.openai.com/docs/api-reference/assistants). + */ + create(assistantId, body, options) { + return this._client.post(`/assistants/${assistantId}/files`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers }, + }); + } + + /** + * Retrieves an AssistantFile. + */ + retrieve(assistantId, fileId, options) { + return this._client.get(`/assistants/${assistantId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers }, + }); + } + + /** + * Delete an assistant file. + */ + del(assistantId, fileId, options) { + return this._client.delete(`/assistants/${assistantId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers }, + }); + } +} + +const initializeClient = async ({ req, res, version, endpointOption, initAppClient = false }) => { + const { PROXY, OPENAI_ORGANIZATION, AZURE_ASSISTANTS_API_KEY, AZURE_ASSISTANTS_BASE_URL } = + process.env; + + const userProvidesKey = isUserProvided(AZURE_ASSISTANTS_API_KEY); + const userProvidesURL = isUserProvided(AZURE_ASSISTANTS_BASE_URL); + + let userValues = null; + if (userProvidesKey || userProvidesURL) { + const expiresAt = await getUserKeyExpiry({ + userId: req.user.id, + name: EModelEndpoint.azureAssistants, + }); + checkUserKeyExpiry(expiresAt, EModelEndpoint.azureAssistants); + userValues = await getUserKeyValues({ + userId: req.user.id, + name: EModelEndpoint.azureAssistants, + }); + } + + let apiKey = userProvidesKey ? userValues.apiKey : AZURE_ASSISTANTS_API_KEY; + let baseURL = userProvidesURL ? userValues.baseURL : AZURE_ASSISTANTS_BASE_URL; + + const opts = {}; + + const clientOptions = { + reverseProxyUrl: baseURL ?? null, + proxy: PROXY ?? null, + req, + res, + ...endpointOption, + }; + + /** @type {TAzureConfig | undefined} */ + const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI]; + + /** @type {AzureOptions | undefined} */ + let azureOptions; + + if (azureConfig && azureConfig.assistants) { + const { modelGroupMap, groupMap, assistantModels } = azureConfig; + const modelName = req.body.model ?? req.query.model ?? assistantModels[0]; + const { + azureOptions: currentOptions, + baseURL: azureBaseURL, + headers = {}, + serverless, + } = mapModelToAzureConfig({ + modelName, + modelGroupMap, + groupMap, + }); + + azureOptions = currentOptions; + + baseURL = constructAzureURL({ + baseURL: azureBaseURL ?? 'https://${INSTANCE_NAME}.openai.azure.com/openai', + azureOptions, + }); + + apiKey = azureOptions.azureOpenAIApiKey; + opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion }; + opts.defaultHeaders = resolveHeaders({ + ...headers, + 'api-key': apiKey, + 'OpenAI-Beta': `assistants=${version}`, + }); + opts.model = azureOptions.azureOpenAIApiDeploymentName; + + if (initAppClient) { + clientOptions.titleConvo = azureConfig.titleConvo; + clientOptions.titleModel = azureConfig.titleModel; + clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion'; + + const groupName = modelGroupMap[modelName].group; + clientOptions.addParams = azureConfig.groupMap[groupName].addParams; + clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams; + clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt; + + clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl; + clientOptions.headers = opts.defaultHeaders; + clientOptions.azure = !serverless && azureOptions; + } + } + + if (userProvidesKey & !apiKey) { + throw new Error( + JSON.stringify({ + type: ErrorTypes.NO_USER_KEY, + }), + ); + } + + if (!apiKey) { + throw new Error('Assistants API key not provided. Please provide it again.'); + } + + if (baseURL) { + opts.baseURL = baseURL; + } + + if (PROXY) { + opts.httpAgent = new HttpsProxyAgent(PROXY); + } + + if (OPENAI_ORGANIZATION) { + opts.organization = OPENAI_ORGANIZATION; + } + + /** @type {OpenAIClient} */ + const openai = new OpenAI({ + apiKey, + ...opts, + }); + + openai.beta.assistants.files = new Files(openai); + + openai.req = req; + openai.res = res; + + if (azureOptions) { + openai.locals = { ...(openai.locals ?? {}), azureOptions }; + } + + if (endpointOption && initAppClient) { + const client = new OpenAIClient(apiKey, clientOptions); + return { + client, + openai, + openAIApiKey: apiKey, + }; + } + + return { + openai, + openAIApiKey: apiKey, + }; +}; + +module.exports = initializeClient; diff --git a/api/server/services/Endpoints/azureAssistants/initializeClient.spec.js b/api/server/services/Endpoints/azureAssistants/initializeClient.spec.js new file mode 100644 index 00000000000..6dc4a6d47a3 --- /dev/null +++ b/api/server/services/Endpoints/azureAssistants/initializeClient.spec.js @@ -0,0 +1,112 @@ +// const OpenAI = require('openai'); +const { HttpsProxyAgent } = require('https-proxy-agent'); +const { ErrorTypes } = require('librechat-data-provider'); +const { getUserKey, getUserKeyExpiry, getUserKeyValues } = require('~/server/services/UserService'); +const initializeClient = require('./initializeClient'); +// const { OpenAIClient } = require('~/app'); + +jest.mock('~/server/services/UserService', () => ({ + getUserKey: jest.fn(), + getUserKeyExpiry: jest.fn(), + getUserKeyValues: jest.fn(), + checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry, +})); + +const today = new Date(); +const tenDaysFromToday = new Date(today.setDate(today.getDate() + 10)); +const isoString = tenDaysFromToday.toISOString(); + +describe('initializeClient', () => { + // Set up environment variables + const originalEnvironment = process.env; + const app = { + locals: {}, + }; + + beforeEach(() => { + jest.resetModules(); // Clears the cache + process.env = { ...originalEnvironment }; // Make a copy + }); + + afterAll(() => { + process.env = originalEnvironment; // Restore original env vars + }); + + test('initializes OpenAI client with default API key and URL', async () => { + process.env.AZURE_ASSISTANTS_API_KEY = 'default-api-key'; + process.env.AZURE_ASSISTANTS_BASE_URL = 'https://default.api.url'; + + // Assuming 'isUserProvided' to return false for this test case + jest.mock('~/server/utils', () => ({ + isUserProvided: jest.fn().mockReturnValueOnce(false), + })); + + const req = { user: { id: 'user123' }, app }; + const res = {}; + + const { openai, openAIApiKey } = await initializeClient({ req, res }); + expect(openai.apiKey).toBe('default-api-key'); + expect(openAIApiKey).toBe('default-api-key'); + expect(openai.baseURL).toBe('https://default.api.url'); + }); + + test('initializes OpenAI client with user-provided API key and URL', async () => { + process.env.AZURE_ASSISTANTS_API_KEY = 'user_provided'; + process.env.AZURE_ASSISTANTS_BASE_URL = 'user_provided'; + + getUserKeyValues.mockResolvedValue({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' }); + getUserKeyExpiry.mockResolvedValue(isoString); + + const req = { user: { id: 'user123' }, app }; + const res = {}; + + const { openai, openAIApiKey } = await initializeClient({ req, res }); + expect(openAIApiKey).toBe('user-api-key'); + expect(openai.apiKey).toBe('user-api-key'); + expect(openai.baseURL).toBe('https://user.api.url'); + }); + + test('throws error for invalid JSON in user-provided values', async () => { + process.env.AZURE_ASSISTANTS_API_KEY = 'user_provided'; + getUserKey.mockResolvedValue('invalid-json'); + getUserKeyExpiry.mockResolvedValue(isoString); + getUserKeyValues.mockImplementation(() => { + let userValues = getUserKey(); + try { + userValues = JSON.parse(userValues); + } catch (e) { + throw new Error( + JSON.stringify({ + type: ErrorTypes.INVALID_USER_KEY, + }), + ); + } + return userValues; + }); + + const req = { user: { id: 'user123' } }; + const res = {}; + + await expect(initializeClient({ req, res })).rejects.toThrow(/invalid_user_key/); + }); + + test('throws error if API key is not provided', async () => { + delete process.env.AZURE_ASSISTANTS_API_KEY; // Simulate missing API key + + const req = { user: { id: 'user123' }, app }; + const res = {}; + + await expect(initializeClient({ req, res })).rejects.toThrow(/Assistants API key not/); + }); + + test('initializes OpenAI client with proxy configuration', async () => { + process.env.AZURE_ASSISTANTS_API_KEY = 'test-key'; + process.env.PROXY = 'http://proxy.server'; + + const req = { user: { id: 'user123' }, app }; + const res = {}; + + const { openai } = await initializeClient({ req, res }); + expect(openai.httpAgent).toBeInstanceOf(HttpsProxyAgent); + }); +}); diff --git a/api/server/services/Files/Firebase/crud.js b/api/server/services/Files/Firebase/crud.js index 43b5ec9b252..c4d1d05bf6b 100644 --- a/api/server/services/Files/Firebase/crud.js +++ b/api/server/services/Files/Firebase/crud.js @@ -180,7 +180,15 @@ const deleteFirebaseFile = async (req, file) => { if (!fileName.includes(req.user.id)) { throw new Error('Invalid file path'); } - await deleteFile('', fileName); + try { + await deleteFile('', fileName); + } catch (error) { + logger.error('Error deleting file from Firebase:', error); + if (error.code === 'storage/object-not-found') { + return; + } + throw error; + } }; /** diff --git a/api/server/services/Files/OpenAI/crud.js b/api/server/services/Files/OpenAI/crud.js index 346259e8215..881b2063b4a 100644 --- a/api/server/services/Files/OpenAI/crud.js +++ b/api/server/services/Files/OpenAI/crud.js @@ -14,9 +14,11 @@ const { logger } = require('~/config'); * @returns {Promise} */ async function uploadOpenAIFile({ req, file, openai }) { + const { height, width } = req.body; + const isImage = height && width; const uploadedFile = await openai.files.create({ file: fs.createReadStream(file.path), - purpose: FilePurpose.Assistants, + purpose: isImage ? FilePurpose.Vision : FilePurpose.Assistants, }); logger.debug( @@ -34,7 +36,7 @@ async function uploadOpenAIFile({ req, file, openai }) { await sleep(sleepTime); } - return uploadedFile; + return isImage ? { ...uploadedFile, height, width } : uploadedFile; } /** diff --git a/api/server/services/Files/process.js b/api/server/services/Files/process.js index 7f91d481ae4..197fd160cfe 100644 --- a/api/server/services/Files/process.js +++ b/api/server/services/Files/process.js @@ -10,10 +10,13 @@ const { EModelEndpoint, mergeFileConfig, hostImageIdSuffix, + checkOpenAIStorage, hostImageNamePrefix, + isAssistantsEndpoint, } = require('librechat-data-provider'); +const { addResourceFileId, deleteResourceFileId } = require('~/server/controllers/assistants/v2'); const { convertImage, resizeAndConvert } = require('~/server/services/Files/images'); -const { initializeClient } = require('~/server/services/Endpoints/assistants'); +const { getOpenAIClient } = require('~/server/controllers/assistants/helpers'); const { createFile, updateFileUsage, deleteFiles } = require('~/models/File'); const { LB_QueueAsyncCall } = require('~/server/utils/queue'); const { getStrategyFunctions } = require('./strategies'); @@ -34,14 +37,16 @@ const processFiles = async (files) => { /** * Enqueues the delete operation to the leaky bucket queue if necessary, or adds it directly to promises. * - * @param {Express.Request} req - The express request object. - * @param {MongoFile} file - The file object to delete. - * @param {Function} deleteFile - The delete file function. - * @param {Promise[]} promises - The array of promises to await. - * @param {OpenAI | undefined} [openai] - If an OpenAI file, the initialized OpenAI client. + * @param {object} params - The passed parameters. + * @param {Express.Request} params.req - The express request object. + * @param {MongoFile} params.file - The file object to delete. + * @param {Function} params.deleteFile - The delete file function. + * @param {Promise[]} params.promises - The array of promises to await. + * @param {string[]} params.resolvedFileIds - The array of promises to await. + * @param {OpenAI | undefined} [params.openai] - If an OpenAI file, the initialized OpenAI client. */ -function enqueueDeleteOperation(req, file, deleteFile, promises, openai) { - if (file.source === FileSources.openai) { +function enqueueDeleteOperation({ req, file, deleteFile, promises, resolvedFileIds, openai }) { + if (checkOpenAIStorage(file.source)) { // Enqueue to leaky bucket promises.push( new Promise((resolve, reject) => { @@ -53,6 +58,7 @@ function enqueueDeleteOperation(req, file, deleteFile, promises, openai) { logger.error('Error deleting file from OpenAI source', err); reject(err); } else { + resolvedFileIds.push(file.file_id); resolve(result); } }, @@ -62,10 +68,12 @@ function enqueueDeleteOperation(req, file, deleteFile, promises, openai) { } else { // Add directly to promises promises.push( - deleteFile(req, file).catch((err) => { - logger.error('Error deleting file', err); - return Promise.reject(err); - }), + deleteFile(req, file) + .then(() => resolvedFileIds.push(file.file_id)) + .catch((err) => { + logger.error('Error deleting file', err); + return Promise.reject(err); + }), ); } } @@ -80,35 +88,71 @@ function enqueueDeleteOperation(req, file, deleteFile, promises, openai) { * @param {Express.Request} params.req - The express request object. * @param {DeleteFilesBody} params.req.body - The request body. * @param {string} [params.req.body.assistant_id] - The assistant ID if file uploaded is associated to an assistant. + * @param {string} [params.req.body.tool_resource] - The tool resource if assistant file uploaded is associated to a tool resource. * * @returns {Promise} */ const processDeleteRequest = async ({ req, files }) => { - const file_ids = files.map((file) => file.file_id); - + const resolvedFileIds = []; const deletionMethods = {}; const promises = []; - promises.push(deleteFiles(file_ids)); - /** @type {OpenAI | undefined} */ - let openai; - if (req.body.assistant_id) { - ({ openai } = await initializeClient({ req })); + /** @type {Record} */ + const client = { [FileSources.openai]: undefined, [FileSources.azure]: undefined }; + const initializeClients = async () => { + const openAIClient = await getOpenAIClient({ + req, + overrideEndpoint: EModelEndpoint.assistants, + }); + client[FileSources.openai] = openAIClient.openai; + + if (!req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) { + return; + } + + const azureClient = await getOpenAIClient({ + req, + overrideEndpoint: EModelEndpoint.azureAssistants, + }); + client[FileSources.azure] = azureClient.openai; + }; + + if (req.body.assistant_id !== undefined) { + await initializeClients(); } for (const file of files) { const source = file.source ?? FileSources.local; - if (source === FileSources.openai && !openai) { - ({ openai } = await initializeClient({ req })); + if (checkOpenAIStorage(source) && !client[source]) { + await initializeClients(); } - if (req.body.assistant_id) { + const openai = client[source]; + + if (req.body.assistant_id && req.body.tool_resource) { + promises.push( + deleteResourceFileId({ + req, + openai, + file_id: file.file_id, + assistant_id: req.body.assistant_id, + tool_resource: req.body.tool_resource, + }), + ); + } else if (req.body.assistant_id) { promises.push(openai.beta.assistants.files.del(req.body.assistant_id, file.file_id)); } if (deletionMethods[source]) { - enqueueDeleteOperation(req, file, deletionMethods[source], promises, openai); + enqueueDeleteOperation({ + req, + file, + deleteFile: deletionMethods[source], + promises, + resolvedFileIds, + openai, + }); continue; } @@ -118,10 +162,11 @@ const processDeleteRequest = async ({ req, files }) => { } deletionMethods[source] = deleteFile; - enqueueDeleteOperation(req, file, deleteFile, promises, openai); + enqueueDeleteOperation({ req, file, deleteFile, promises, resolvedFileIds, openai }); } await Promise.allSettled(promises); + await deleteFiles(resolvedFileIds); }; /** @@ -180,12 +225,13 @@ const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath, c * * @param {Object} params - The parameters object. * @param {Express.Request} params.req - The Express request object. - * @param {Express.Response} params.res - The Express response object. + * @param {Express.Response} [params.res] - The Express response object. * @param {Express.Multer.File} params.file - The uploaded file. * @param {ImageMetadata} params.metadata - Additional metadata for the file. + * @param {boolean} params.returnFile - Whether to return the file metadata or return response as normal. * @returns {Promise} */ -const processImageFile = async ({ req, res, file, metadata }) => { +const processImageFile = async ({ req, res, file, metadata, returnFile = false }) => { const source = req.app.locals.fileStrategy; const { handleImageUpload } = getStrategyFunctions(source); const { file_id, temp_file_id, endpoint } = metadata; @@ -213,6 +259,10 @@ const processImageFile = async ({ req, res, file, metadata }) => { }, true, ); + + if (returnFile) { + return result; + } res.status(200).json({ message: 'File uploaded and processed successfully', ...result }); }; @@ -274,28 +324,57 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true }) * @returns {Promise} */ const processFileUpload = async ({ req, res, file, metadata }) => { - const isAssistantUpload = metadata.endpoint === EModelEndpoint.assistants; - const source = isAssistantUpload ? FileSources.openai : FileSources.vectordb; + const isAssistantUpload = isAssistantsEndpoint(metadata.endpoint); + const assistantSource = + metadata.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai; + const source = isAssistantUpload ? assistantSource : FileSources.vectordb; const { handleFileUpload } = getStrategyFunctions(source); const { file_id, temp_file_id } = metadata; /** @type {OpenAI | undefined} */ let openai; - if (source === FileSources.openai) { - ({ openai } = await initializeClient({ req })); + if (checkOpenAIStorage(source)) { + ({ openai } = await getOpenAIClient({ req })); } - const { id, bytes, filename, filepath, embedded } = await handleFileUpload({ + const { + id, + bytes, + filename, + filepath: _filepath, + embedded, + height, + width, + } = await handleFileUpload({ req, file, file_id, openai, }); - if (isAssistantUpload && !metadata.message_file) { + if (isAssistantUpload && !metadata.message_file && !metadata.tool_resource) { await openai.beta.assistants.files.create(metadata.assistant_id, { file_id: id, }); + } else if (isAssistantUpload && !metadata.message_file) { + await addResourceFileId({ + req, + openai, + file_id: id, + assistant_id: metadata.assistant_id, + tool_resource: metadata.tool_resource, + }); + } + + let filepath = isAssistantUpload ? `${openai.baseURL}/files/${id}` : _filepath; + if (isAssistantUpload && file.mimetype.startsWith('image')) { + const result = await processImageFile({ + req, + file, + metadata: { file_id: v4() }, + returnFile: true, + }); + filepath = result.filepath; } const result = await createFile( @@ -304,13 +383,15 @@ const processFileUpload = async ({ req, res, file, metadata }) => { file_id: id ?? file_id, temp_file_id, bytes, + filepath, filename: filename ?? file.originalname, - filepath: isAssistantUpload ? `${openai.baseURL}/files/${id}` : filepath, context: isAssistantUpload ? FileContext.assistants : FileContext.message_attachment, model: isAssistantUpload ? req.body.model : undefined, type: file.mimetype, embedded, source, + height, + width, }, true, ); @@ -340,7 +421,10 @@ const processOpenAIFile = async ({ originalName ? `/${originalName}` : '' }`; const type = mime.getType(originalName ?? file_id); - + const source = + openai.req.body.endpoint === EModelEndpoint.azureAssistants + ? FileSources.azure + : FileSources.openai; const file = { ..._file, type, @@ -349,7 +433,7 @@ const processOpenAIFile = async ({ usage: 1, user: userId, context: _file.purpose, - source: FileSources.openai, + source, model: openai.req.body.model, filename: originalName ?? file_id, }; @@ -394,12 +478,14 @@ const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileEx filename: `${hostImageNamePrefix}${filename}`, }; createFile(file, true); + const source = + req.body.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai; createFile( { ...file, file_id, filename, - source: FileSources.openai, + source, type: mime.getType(fileExt), }, true, @@ -500,7 +586,12 @@ async function retrieveAndProcessFile({ * Filters a file based on its size and the endpoint origin. * * @param {Object} params - The parameters for the function. - * @param {Express.Request} params.req - The request object from Express. + * @param {object} params.req - The request object from Express. + * @param {string} [params.req.endpoint] + * @param {string} [params.req.file_id] + * @param {number} [params.req.width] + * @param {number} [params.req.height] + * @param {number} [params.req.version] * @param {Express.Multer.File} params.file - The file uploaded to the server via multer. * @param {boolean} [params.image] - Whether the file expected is an image. * @returns {void} diff --git a/api/server/services/Files/strategies.js b/api/server/services/Files/strategies.js index 96733e4037f..fa4e456fc9f 100644 --- a/api/server/services/Files/strategies.js +++ b/api/server/services/Files/strategies.js @@ -111,6 +111,8 @@ const getStrategyFunctions = (fileSource) => { return localStrategy(); } else if (fileSource === FileSources.openai) { return openAIStrategy(); + } else if (fileSource === FileSources.azure) { + return openAIStrategy(); } else if (fileSource === FileSources.vectordb) { return vectorStrategy(); } else { diff --git a/api/server/services/ModelService.js b/api/server/services/ModelService.js index 3c560b297b2..b6ca6e4f4bb 100644 --- a/api/server/services/ModelService.js +++ b/api/server/services/ModelService.js @@ -167,6 +167,8 @@ const getOpenAIModels = async (opts) => { if (opts.assistants) { models = defaultModels[EModelEndpoint.assistants]; + } else if (opts.azure) { + models = defaultModels[EModelEndpoint.azureAssistants]; } if (opts.plugins) { diff --git a/api/server/services/Runs/handle.js b/api/server/services/Runs/handle.js index 8b73b099eec..dd048219bb0 100644 --- a/api/server/services/Runs/handle.js +++ b/api/server/services/Runs/handle.js @@ -55,7 +55,7 @@ async function createRun({ openai, thread_id, body }) { * @param {string} params.run_id - The ID of the run to wait for. * @param {string} params.thread_id - The ID of the thread associated with the run. * @param {RunManager} params.runManager - The RunManager instance to manage run steps. - * @param {number} [params.pollIntervalMs=750] - The interval for polling the run status; default is 750 milliseconds. + * @param {number} [params.pollIntervalMs=2000] - The interval for polling the run status; default is 2000 milliseconds. * @param {number} [params.timeout=180000] - The period to wait until timing out polling; default is 3 minutes (in ms). * @return {Promise} A promise that resolves to the last fetched run object. */ @@ -64,7 +64,7 @@ async function waitForRun({ run_id, thread_id, runManager, - pollIntervalMs = 750, + pollIntervalMs = 2000, timeout = 60000 * 3, }) { let timeElapsed = 0; @@ -233,7 +233,7 @@ async function _handleRun({ openai, run_id, thread_id }) { run_id, thread_id, runManager, - pollIntervalMs: 750, + pollIntervalMs: 2000, timeout: 60000, }); const actions = []; diff --git a/api/server/services/Threads/manage.js b/api/server/services/Threads/manage.js index f875b108412..fb151cee92a 100644 --- a/api/server/services/Threads/manage.js +++ b/api/server/services/Threads/manage.js @@ -3,7 +3,6 @@ const { v4 } = require('uuid'); const { Constants, ContentTypes, - EModelEndpoint, AnnotationTypes, defaultOrderQuery, } = require('librechat-data-provider'); @@ -50,6 +49,7 @@ async function initThread({ openai, body, thread_id: _thread_id }) { * @param {string} params.assistant_id - The current assistant Id. * @param {string} params.thread_id - The thread Id. * @param {string} params.conversationId - The message's conversationId + * @param {string} params.endpoint - The conversation endpoint * @param {string} [params.parentMessageId] - Optional if initial message. * Defaults to Constants.NO_PARENT. * @param {string} [params.instructions] - Optional: from preset for `instructions` field. @@ -82,7 +82,7 @@ async function saveUserMessage(params) { const userMessage = { user: params.user, - endpoint: EModelEndpoint.assistants, + endpoint: params.endpoint, messageId: params.messageId, conversationId: params.conversationId, parentMessageId: params.parentMessageId ?? Constants.NO_PARENT, @@ -96,7 +96,7 @@ async function saveUserMessage(params) { }; const convo = { - endpoint: EModelEndpoint.assistants, + endpoint: params.endpoint, conversationId: params.conversationId, promptPrefix: params.promptPrefix, instructions: params.instructions, @@ -126,6 +126,7 @@ async function saveUserMessage(params) { * @param {string} params.model - The model used by the assistant. * @param {ContentPart[]} params.content - The message content parts. * @param {string} params.conversationId - The message's conversationId + * @param {string} params.endpoint - The conversation endpoint * @param {string} params.parentMessageId - The latest user message that triggered this response. * @param {string} [params.instructions] - Optional: from preset for `instructions` field. * Overrides the instructions of the assistant. @@ -145,7 +146,7 @@ async function saveAssistantMessage(params) { const message = await recordMessage({ user: params.user, - endpoint: EModelEndpoint.assistants, + endpoint: params.endpoint, messageId: params.messageId, conversationId: params.conversationId, parentMessageId: params.parentMessageId, @@ -160,7 +161,7 @@ async function saveAssistantMessage(params) { }); await saveConvo(params.user, { - endpoint: EModelEndpoint.assistants, + endpoint: params.endpoint, conversationId: params.conversationId, promptPrefix: params.promptPrefix, instructions: params.instructions, @@ -205,20 +206,22 @@ async function addThreadMetadata({ openai, thread_id, messageId, messages }) { * * @param {Object} params - The parameters for synchronizing messages. * @param {OpenAIClient} params.openai - The OpenAI client instance. + * @param {string} params.endpoint - The current endpoint. + * @param {string} params.thread_id - The current thread ID. * @param {TMessage[]} params.dbMessages - The LibreChat DB messages. * @param {ThreadMessage[]} params.apiMessages - The thread messages from the API. - * @param {string} params.conversationId - The current conversation ID. - * @param {string} params.thread_id - The current thread ID. * @param {string} [params.assistant_id] - The current assistant ID. + * @param {string} params.conversationId - The current conversation ID. * @return {Promise} A promise that resolves to the updated messages */ async function syncMessages({ openai, - apiMessages, - dbMessages, - conversationId, + endpoint, thread_id, + dbMessages, + apiMessages, assistant_id, + conversationId, }) { let result = []; let dbMessageMap = new Map(dbMessages.map((msg) => [msg.messageId, msg])); @@ -290,7 +293,7 @@ async function syncMessages({ thread_id, conversationId, messageId: v4(), - endpoint: EModelEndpoint.assistants, + endpoint, parentMessageId: lastMessage ? lastMessage.messageId : Constants.NO_PARENT, role: apiMessage.role, isCreatedByUser: apiMessage.role === 'user', @@ -382,13 +385,21 @@ function mapMessagesToSteps(steps, messages) { * * @param {Object} params - The parameters for initializing a thread. * @param {OpenAIClient} params.openai - The OpenAI client instance. + * @param {string} params.endpoint - The current endpoint. * @param {string} [params.latestMessageId] - Optional: The latest message ID from LibreChat. * @param {string} params.thread_id - Response thread ID. * @param {string} params.run_id - Response Run ID. * @param {string} params.conversationId - LibreChat conversation ID. * @return {Promise} A promise that resolves to the updated messages */ -async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, conversationId }) { +async function checkMessageGaps({ + openai, + endpoint, + latestMessageId, + thread_id, + run_id, + conversationId, +}) { const promises = []; promises.push(openai.beta.threads.messages.list(thread_id, defaultOrderQuery)); promises.push(openai.beta.threads.runs.steps.list(thread_id, run_id)); @@ -406,6 +417,7 @@ async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, co role: 'assistant', run_id, thread_id, + endpoint, metadata: { messageId: latestMessageId, }, @@ -452,11 +464,12 @@ async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, co const syncedMessages = await syncMessages({ openai, + endpoint, + thread_id, dbMessages, apiMessages, - thread_id, - conversationId, assistant_id, + conversationId, }); return Object.values( @@ -498,41 +511,62 @@ const recordUsage = async ({ }; /** - * Safely replaces the annotated text within the specified range denoted by start_index and end_index, - * after verifying that the text within that range matches the given annotation text. - * Proceeds with the replacement even if a mismatch is found, but logs a warning. + * Creates a replaceAnnotation function with internal state for tracking the index offset. * - * @param {string} originalText The original text content. - * @param {number} start_index The starting index where replacement should begin. - * @param {number} end_index The ending index where replacement should end. - * @param {string} expectedText The text expected to be found in the specified range. - * @param {string} replacementText The text to insert in place of the existing content. - * @returns {string} The text with the replacement applied, regardless of text match. + * @returns {function} The replaceAnnotation function with closure for index offset. */ -function replaceAnnotation(originalText, start_index, end_index, expectedText, replacementText) { - if (start_index < 0 || end_index > originalText.length || start_index > end_index) { - logger.warn(`Invalid range specified for annotation replacement. - Attempting replacement with \`replace\` method instead... - length: ${originalText.length} - start_index: ${start_index} - end_index: ${end_index}`); - return originalText.replace(originalText, replacementText); - } +function createReplaceAnnotation() { + let indexOffset = 0; - const actualTextInRange = originalText.substring(start_index, end_index); + /** + * Safely replaces the annotated text within the specified range denoted by start_index and end_index, + * after verifying that the text within that range matches the given annotation text. + * Proceeds with the replacement even if a mismatch is found, but logs a warning. + * + * @param {object} params The original text content. + * @param {string} params.currentText The current text content, with/without replacements. + * @param {number} params.start_index The starting index where replacement should begin. + * @param {number} params.end_index The ending index where replacement should end. + * @param {string} params.expectedText The text expected to be found in the specified range. + * @param {string} params.replacementText The text to insert in place of the existing content. + * @returns {string} The text with the replacement applied, regardless of text match. + */ + function replaceAnnotation({ + currentText, + start_index, + end_index, + expectedText, + replacementText, + }) { + const adjustedStartIndex = start_index + indexOffset; + const adjustedEndIndex = end_index + indexOffset; + + if ( + adjustedStartIndex < 0 || + adjustedEndIndex > currentText.length || + adjustedStartIndex > adjustedEndIndex + ) { + logger.warn(`Invalid range specified for annotation replacement. + Attempting replacement with \`replace\` method instead... + length: ${currentText.length} + start_index: ${adjustedStartIndex} + end_index: ${adjustedEndIndex}`); + return currentText.replace(expectedText, replacementText); + } - if (actualTextInRange !== expectedText) { - logger.warn(`The text within the specified range does not match the expected annotation text. - Attempting replacement with \`replace\` method instead... - Expected: ${expectedText} - Actual: ${actualTextInRange}`); + if (currentText.substring(adjustedStartIndex, adjustedEndIndex) !== expectedText) { + return currentText.replace(expectedText, replacementText); + } - return originalText.replace(originalText, replacementText); + indexOffset += replacementText.length - (adjustedEndIndex - adjustedStartIndex); + return ( + currentText.slice(0, adjustedStartIndex) + + replacementText + + currentText.slice(adjustedEndIndex) + ); } - const beforeText = originalText.substring(0, start_index); - const afterText = originalText.substring(end_index); - return beforeText + replacementText + afterText; + return replaceAnnotation; } /** @@ -581,6 +615,11 @@ async function processMessages({ openai, client, messages = [] }) { continue; } + const originalText = currentText; + text += originalText; + + const replaceAnnotation = createReplaceAnnotation(); + logger.debug('[processMessages] Processing annotations:', annotations); for (const annotation of annotations) { let file; @@ -589,14 +628,16 @@ async function processMessages({ openai, client, messages = [] }) { const file_id = annotationType?.file_id; const alreadyProcessed = client.processedFileIds.has(file_id); - const replaceCurrentAnnotation = (replacement = '') => { - currentText = replaceAnnotation( + const replaceCurrentAnnotation = (replacementText = '') => { + const { start_index, end_index, text: expectedText } = annotation; + currentText = replaceAnnotation({ + originalText, currentText, - annotation.start_index, - annotation.end_index, - annotation.text, - replacement, - ); + start_index, + end_index, + expectedText, + replacementText, + }); edited = true; }; @@ -623,7 +664,7 @@ async function processMessages({ openai, client, messages = [] }) { replaceCurrentAnnotation(`^${sources.length}^`); } - text += currentText + ' '; + text = currentText; if (!file) { continue; diff --git a/api/server/services/start/assistants.js b/api/server/services/start/assistants.js index dfef99e5914..394d7d1a3e9 100644 --- a/api/server/services/start/assistants.js +++ b/api/server/services/start/assistants.js @@ -2,6 +2,7 @@ const { Capabilities, EModelEndpoint, assistantEndpointSchema, + defaultAssistantsVersion, } = require('librechat-data-provider'); const { logger } = require('~/config'); @@ -12,6 +13,7 @@ const { logger } = require('~/config'); function azureAssistantsDefaults() { return { capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter], + version: defaultAssistantsVersion.azureAssistants, }; } diff --git a/api/server/services/start/azureOpenAI.js b/api/server/services/start/azureOpenAI.js index 3b5c446204f..565c8f691bf 100644 --- a/api/server/services/start/azureOpenAI.js +++ b/api/server/services/start/azureOpenAI.js @@ -41,6 +41,17 @@ function azureConfigSetup(config) { ); } + if ( + azureConfiguration.assistants && + process.env.ENDPOINTS && + !process.env.ENDPOINTS.includes(EModelEndpoint.azureAssistants) + ) { + logger.warn( + `Azure Assistants are configured, but the endpoint will not be accessible as it's not included in the ENDPOINTS environment variable. + Please add the value "${EModelEndpoint.azureAssistants}" to the ENDPOINTS list if expected.`, + ); + } + return { modelNames, modelGroupMap, diff --git a/api/server/utils/handleText.js b/api/server/utils/handleText.js index bfa37e279f9..70dc16b9382 100644 --- a/api/server/utils/handleText.js +++ b/api/server/utils/handleText.js @@ -1,4 +1,10 @@ -const { Capabilities, defaultRetrievalModels } = require('librechat-data-provider'); +const { + Capabilities, + EModelEndpoint, + isAssistantsEndpoint, + defaultRetrievalModels, + defaultAssistantsVersion, +} = require('librechat-data-provider'); const { getCitations, citeText } = require('./citations'); const partialRight = require('lodash/partialRight'); const { sendMessage } = require('./streamResponse'); @@ -154,9 +160,10 @@ const isUserProvided = (value) => value === 'user_provided'; * Generate the configuration for a given key and base URL. * @param {string} key * @param {string} baseURL + * @param {string} endpoint * @returns {boolean | { userProvide: boolean, userProvideURL?: boolean }} */ -function generateConfig(key, baseURL, assistants = false) { +function generateConfig(key, baseURL, endpoint) { if (!key) { return false; } @@ -168,6 +175,8 @@ function generateConfig(key, baseURL, assistants = false) { config.userProvideURL = isUserProvided(baseURL); } + const assistants = isAssistantsEndpoint(endpoint); + if (assistants) { config.retrievalModels = defaultRetrievalModels; config.capabilities = [ @@ -179,6 +188,12 @@ function generateConfig(key, baseURL, assistants = false) { ]; } + if (assistants && endpoint === EModelEndpoint.azureAssistants) { + config.version = defaultAssistantsVersion.azureAssistants; + } else if (assistants) { + config.version = defaultAssistantsVersion.assistants; + } + return config; } diff --git a/api/typedefs.js b/api/typedefs.js index f7970be4f3b..5c83cab1598 100644 --- a/api/typedefs.js +++ b/api/typedefs.js @@ -657,6 +657,12 @@ * @memberof typedefs */ +/** + * @exports OpenAISpecClient + * @typedef {import('./app/clients/OpenAIClient')} OpenAISpecClient + * @memberof typedefs + */ + /** * @exports ImportBatchBuilder * @typedef {import('./server/utils/import/importBatchBuilder.js').ImportBatchBuilder} ImportBatchBuilder diff --git a/client/src/common/assistants-types.ts b/client/src/common/assistants-types.ts index 3b9ad27da36..e4edf025e03 100644 --- a/client/src/common/assistants-types.ts +++ b/client/src/common/assistants-types.ts @@ -4,7 +4,11 @@ import type { Option, ExtendedFile } from './types'; export type TAssistantOption = | string - | (Option & Assistant & { files?: Array<[string, ExtendedFile]> }); + | (Option & + Assistant & { + files?: Array<[string, ExtendedFile]>; + code_files?: Array<[string, ExtendedFile]>; + }); export type Actions = { [Capabilities.code_interpreter]: boolean; diff --git a/client/src/common/types.ts b/client/src/common/types.ts index e574e90d89f..62aae7f14be 100644 --- a/client/src/common/types.ts +++ b/client/src/common/types.ts @@ -8,10 +8,12 @@ import type { TPreset, TPlugin, TMessage, + Assistant, TLoginUser, AuthTypeEnum, TConversation, EModelEndpoint, + AssistantsEndpoint, AuthorizationTypeEnum, TSetOption as SetOption, TokenExchangeMethodEnum, @@ -19,6 +21,13 @@ import type { import type { UseMutationResult } from '@tanstack/react-query'; import type { LucideIcon } from 'lucide-react'; +export type AssistantListItem = { + id: string; + name: string; + metadata: Assistant['metadata']; + model: string; +}; + export type TPluginMap = Record; export type GenericSetter = (value: T | ((currentValue: T) => T)) => void; @@ -101,6 +110,8 @@ export type AssistantPanelProps = { actions?: Action[]; assistant_id?: string; activePanel?: string; + endpoint: AssistantsEndpoint; + version: number | string; setAction: React.Dispatch>; setCurrentAssistantId: React.Dispatch>; setActivePanel: React.Dispatch>; @@ -315,6 +326,7 @@ export type IconProps = Pick & iconURL?: string; message?: boolean; className?: string; + iconClassName?: string; endpoint?: EModelEndpoint | string | null; endpointType?: EModelEndpoint | null; assistantName?: string; @@ -327,7 +339,11 @@ export type Option = Record & { }; export type OptionWithIcon = Option & { icon?: React.ReactNode }; -export type MentionOption = OptionWithIcon & { type: string; value: string; description?: string }; +export type MentionOption = OptionWithIcon & { + type: string; + value: string; + description?: string; +}; export type TOptionSettings = { showExamples?: boolean; diff --git a/client/src/components/Chat/Input/ChatForm.tsx b/client/src/components/Chat/Input/ChatForm.tsx index f05fd72793e..f12284cc7fd 100644 --- a/client/src/components/Chat/Input/ChatForm.tsx +++ b/client/src/components/Chat/Input/ChatForm.tsx @@ -3,8 +3,8 @@ import { useForm } from 'react-hook-form'; import { memo, useCallback, useRef, useMemo } from 'react'; import { supportsFiles, - EModelEndpoint, mergeFileConfig, + isAssistantsEndpoint, fileConfig as defaultFileConfig, } from 'librechat-data-provider'; import { useChatContext, useAssistantsMapContext } from '~/Providers'; @@ -74,8 +74,9 @@ const ChatForm = ({ index = 0 }) => { const endpointFileConfig = fileConfig.endpoints[endpoint ?? '']; const invalidAssistant = useMemo( () => - conversation?.endpoint === EModelEndpoint.assistants && - (!conversation?.assistant_id || !assistantMap?.[conversation?.assistant_id ?? '']), + isAssistantsEndpoint(conversation?.endpoint) && + (!conversation?.assistant_id || + !assistantMap?.[conversation?.endpoint ?? '']?.[conversation?.assistant_id ?? '']), [conversation?.assistant_id, conversation?.endpoint, assistantMap], ); const disableInputs = useMemo( diff --git a/client/src/components/Chat/Input/Files/FilePreview.tsx b/client/src/components/Chat/Input/Files/FilePreview.tsx index 55c66b9a826..e1060e89785 100644 --- a/client/src/components/Chat/Input/Files/FilePreview.tsx +++ b/client/src/components/Chat/Input/Files/FilePreview.tsx @@ -2,6 +2,7 @@ import type { TFile } from 'librechat-data-provider'; import type { ExtendedFile } from '~/common'; import FileIcon from '~/components/svg/Files/FileIcon'; import ProgressCircle from './ProgressCircle'; +import SourceIcon from './SourceIcon'; import { useProgress } from '~/hooks'; import { cn } from '~/utils'; @@ -20,8 +21,7 @@ const FilePreview = ({ }) => { const radius = 55; // Radius of the SVG circle const circumference = 2 * Math.PI * radius; - const progress = useProgress(file?.['progress'] ?? 1, 0.001, file?.size ?? 1); - console.log(progress); + const progress = useProgress(file?.['progress'] ?? 1, 0.001, (file as ExtendedFile)?.size ?? 1); // Calculate the offset based on the loading progress const offset = circumference - progress * circumference; @@ -32,6 +32,7 @@ const FilePreview = ({ return (
+ {progress < 1 && ( >; fileFilter?: (file: ExtendedFile) => boolean; assistant_id?: string; + tool_resource?: EToolResources; Wrapper?: React.FC<{ children: React.ReactNode }>; }) { const files = Array.from(_files.values()).filter((file) => @@ -25,7 +28,8 @@ export default function FileRow({ ); const { mutateAsync } = useDeleteFilesMutation({ - onMutate: async () => console.log('Deleting files: assistant_id', assistant_id), + onMutate: async () => + console.log('Deleting files: assistant_id, tool_resource', assistant_id, tool_resource), onSuccess: () => { console.log('Files deleted'); }, @@ -34,7 +38,7 @@ export default function FileRow({ }, }); - const { deleteFile } = useFileDeletion({ mutateAsync, assistant_id }); + const { deleteFile } = useFileDeletion({ mutateAsync, assistant_id, tool_resource }); useEffect(() => { if (!files) { @@ -82,6 +86,7 @@ export default function FileRow({ url={file.preview} onDelete={handleDelete} progress={file.progress} + source={file.source} /> ); } diff --git a/client/src/components/Chat/Input/Files/FilesView.tsx b/client/src/components/Chat/Input/Files/FilesView.tsx index efd9ec2a824..8791e6c9155 100644 --- a/client/src/components/Chat/Input/Files/FilesView.tsx +++ b/client/src/components/Chat/Input/Files/FilesView.tsx @@ -12,16 +12,9 @@ export default function Files({ open, onOpenChange }) { const { data: files = [] } = useGetFiles({ select: (files) => files.map((file) => { - if (file.source === FileSources.local || file.source === FileSources.openai) { - file.context = file.context ?? FileContext.unknown; - return file; - } else { - return { - ...file, - context: file.context ?? FileContext.unknown, - source: FileSources.local, - }; - } + file.context = file.context ?? FileContext.unknown; + file.filterSource = file.source === FileSources.firebase ? FileSources.local : file.source; + return file; }), }); diff --git a/client/src/components/Chat/Input/Files/Image.tsx b/client/src/components/Chat/Input/Files/Image.tsx index 1cd13c83324..22c03b5373e 100644 --- a/client/src/components/Chat/Input/Files/Image.tsx +++ b/client/src/components/Chat/Input/Files/Image.tsx @@ -1,3 +1,4 @@ +import { FileSources } from 'librechat-data-provider'; import ImagePreview from './ImagePreview'; import RemoveFile from './RemoveFile'; @@ -6,16 +7,18 @@ const Image = ({ url, onDelete, progress = 1, + source = FileSources.local, }: { imageBase64?: string; url?: string; onDelete: () => void; progress: number; // between 0 and 1 + source?: FileSources; }) => { return (
- +
diff --git a/client/src/components/Chat/Input/Files/ImagePreview.tsx b/client/src/components/Chat/Input/Files/ImagePreview.tsx index 47948123585..2876c2aef7b 100644 --- a/client/src/components/Chat/Input/Files/ImagePreview.tsx +++ b/client/src/components/Chat/Input/Files/ImagePreview.tsx @@ -1,4 +1,6 @@ +import { FileSources } from 'librechat-data-provider'; import ProgressCircle from './ProgressCircle'; +import SourceIcon from './SourceIcon'; import { cn } from '~/utils'; type styleProps = { @@ -13,11 +15,13 @@ const ImagePreview = ({ url, progress = 1, className = '', + source, }: { imageBase64?: string; url?: string; progress?: number; // between 0 and 1 className?: string; + source?: FileSources; }) => { let style: styleProps = { backgroundSize: 'cover', @@ -65,6 +69,7 @@ const ImagePreview = ({ circleCSSProperties={circleCSSProperties} /> )} +
); }; diff --git a/client/src/components/Chat/Input/Files/SourceIcon.tsx b/client/src/components/Chat/Input/Files/SourceIcon.tsx new file mode 100644 index 00000000000..23cc4d81663 --- /dev/null +++ b/client/src/components/Chat/Input/Files/SourceIcon.tsx @@ -0,0 +1,45 @@ +import { EModelEndpoint, FileSources } from 'librechat-data-provider'; +import { MinimalIcon } from '~/components/Endpoints'; +import { cn } from '~/utils'; + +const sourceToEndpoint = { + [FileSources.openai]: EModelEndpoint.openAI, + [FileSources.azure]: EModelEndpoint.azureOpenAI, +}; +const sourceToClassname = { + [FileSources.openai]: 'bg-black/65', + [FileSources.azure]: 'azure-bg-color opacity-85', +}; + +const defaultClassName = + 'absolute right-0 bottom-0 rounded-full p-[0.15rem] text-gray-600 transition-colors'; + +export default function SourceIcon({ + source, + className = defaultClassName, +}: { + source?: FileSources; + className?: string; +}) { + if (source === FileSources.local || source === FileSources.firebase) { + return null; + } + + const endpoint = sourceToEndpoint[source ?? '']; + + if (!endpoint) { + return null; + } + return ( + + ); +} diff --git a/client/src/components/Chat/Input/Files/Table/Columns.tsx b/client/src/components/Chat/Input/Files/Table/Columns.tsx index 5b53a06f46a..7284f293105 100644 --- a/client/src/components/Chat/Input/Files/Table/Columns.tsx +++ b/client/src/components/Chat/Input/Files/Table/Columns.tsx @@ -7,6 +7,7 @@ import ImagePreview from '~/components/Chat/Input/Files/ImagePreview'; import FilePreview from '~/components/Chat/Input/Files/FilePreview'; import { SortFilterHeader } from './SortFilterHeader'; import { OpenAIMinimalIcon } from '~/components/svg'; +import { AzureMinimalIcon } from '~/components/svg'; import { Button, Checkbox } from '~/components/ui'; import { formatDate, getFileType } from '~/utils'; import useLocalize from '~/hooks/useLocalize'; @@ -71,10 +72,11 @@ export const columns: ColumnDef[] = [ const file = row.original; if (file.type?.startsWith('image')) { return ( -
+
{file.filename}
@@ -84,7 +86,7 @@ export const columns: ColumnDef[] = [ const fileType = getFileType(file.type); return (
- {fileType && } + {fileType && } {file.filename}
); @@ -108,7 +110,7 @@ export const columns: ColumnDef[] = [ cell: ({ row }) => formatDate(row.original.updatedAt), }, { - accessorKey: 'source', + accessorKey: 'filterSource', header: ({ column }) => { const localize = useLocalize(); return ( @@ -117,10 +119,14 @@ export const columns: ColumnDef[] = [ title={localize('com_ui_storage')} filters={{ Storage: Object.values(FileSources).filter( - (value) => value === FileSources.local || value === FileSources.openai, + (value) => + value === FileSources.local || + value === FileSources.openai || + value === FileSources.azure, ), }} valueMap={{ + [FileSources.azure]: 'Azure', [FileSources.openai]: 'OpenAI', [FileSources.local]: 'com_ui_host', }} @@ -137,6 +143,13 @@ export const columns: ColumnDef[] = [ {'OpenAI'}
); + } else if (source === FileSources.azure) { + return ( +
+ + {'Azure'} +
+ ); } return (
diff --git a/client/src/components/Chat/Input/Files/Table/DataTable.tsx b/client/src/components/Chat/Input/Files/Table/DataTable.tsx index 347006b4846..1886ffc8750 100644 --- a/client/src/components/Chat/Input/Files/Table/DataTable.tsx +++ b/client/src/components/Chat/Input/Files/Table/DataTable.tsx @@ -48,7 +48,12 @@ const contextMap = { [FileContext.bytes]: 'com_ui_size', }; -type Style = { width?: number | string; maxWidth?: number | string; minWidth?: number | string }; +type Style = { + width?: number | string; + maxWidth?: number | string; + minWidth?: number | string; + zIndex?: number; +}; export default function DataTable({ columns, data }: DataTableProps) { const localize = useLocalize(); @@ -142,7 +147,7 @@ export default function DataTable({ columns, data }: DataTablePro {table.getHeaderGroups().map((headerGroup) => ( {headerGroup.headers.map((header, index) => { - const style: Style = { maxWidth: '32px', minWidth: '125px' }; + const style: Style = { maxWidth: '32px', minWidth: '125px', zIndex: 50 }; if (header.id === 'filename') { style.maxWidth = '50%'; style.width = '50%'; diff --git a/client/src/components/Chat/Input/Mention.tsx b/client/src/components/Chat/Input/Mention.tsx index 229dd5a5451..93fec743054 100644 --- a/client/src/components/Chat/Input/Mention.tsx +++ b/client/src/components/Chat/Input/Mention.tsx @@ -17,7 +17,9 @@ export default function Mention({ }) { const localize = useLocalize(); const assistantMap = useAssistantsMapContext(); - const { options, modelsConfig, assistants, onSelectMention } = useMentions({ assistantMap }); + const { options, modelsConfig, assistantListMap, onSelectMention } = useMentions({ + assistantMap, + }); const [activeIndex, setActiveIndex] = useState(0); const timeoutRef = useRef(null); @@ -47,7 +49,12 @@ export default function Mention({ if (mention.type === 'endpoint' && mention.value === EModelEndpoint.assistants) { setSearchValue(''); - setInputOptions(assistants); + setInputOptions(assistantListMap[EModelEndpoint.assistants]); + setActiveIndex(0); + inputRef.current?.focus(); + } else if (mention.type === 'endpoint' && mention.value === EModelEndpoint.azureAssistants) { + setSearchValue(''); + setInputOptions(assistantListMap[EModelEndpoint.azureAssistants]); setActiveIndex(0); inputRef.current?.focus(); } else if (mention.type === 'endpoint') { diff --git a/client/src/components/Chat/Landing.tsx b/client/src/components/Chat/Landing.tsx index 5e2392bfcfb..2202a28adf8 100644 --- a/client/src/components/Chat/Landing.tsx +++ b/client/src/components/Chat/Landing.tsx @@ -1,4 +1,4 @@ -import { EModelEndpoint } from 'librechat-data-provider'; +import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider'; import { useGetEndpointsQuery, useGetStartupConfig } from 'librechat-data-provider/react-query'; import type { ReactNode } from 'react'; import { TooltipProvider, Tooltip, TooltipTrigger, TooltipContent } from '~/components/ui'; @@ -30,7 +30,8 @@ export default function Landing({ Header }: { Header?: ReactNode }) { const iconURL = conversation?.iconURL; endpoint = getIconEndpoint({ endpointsConfig, iconURL, endpoint }); - const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? '']; + const isAssistant = isAssistantsEndpoint(endpoint); + const assistant = isAssistant && assistantMap?.[endpoint]?.[assistant_id ?? '']; const assistantName = (assistant && assistant?.name) || ''; const assistantDesc = (assistant && assistant?.description) || ''; const avatar = (assistant && (assistant?.metadata?.avatar as string)) || ''; @@ -77,7 +78,7 @@ export default function Landing({ Header }: { Header?: ReactNode }) {
) : (
- {endpoint === EModelEndpoint.assistants + {isAssistant ? conversation?.greeting ?? localize('com_nav_welcome_assistant') : conversation?.greeting ?? localize('com_nav_welcome_message')}
diff --git a/client/src/components/Chat/Menus/Endpoints/Icons.tsx b/client/src/components/Chat/Menus/Endpoints/Icons.tsx index 4a700bd9777..4e88cceef47 100644 --- a/client/src/components/Chat/Menus/Endpoints/Icons.tsx +++ b/client/src/components/Chat/Menus/Endpoints/Icons.tsx @@ -15,6 +15,24 @@ import { import UnknownIcon from './UnknownIcon'; import { cn } from '~/utils'; +const AssistantAvatar = ({ className = '', assistantName, avatar, size }: IconMapProps) => { + if (assistantName && avatar) { + return ( + {assistantName} + ); + } else if (assistantName) { + return ; + } + + return ; +}; + export const icons = { [EModelEndpoint.azureOpenAI]: AzureMinimalIcon, [EModelEndpoint.openAI]: GPTIcon, @@ -24,22 +42,7 @@ export const icons = { [EModelEndpoint.google]: GoogleMinimalIcon, [EModelEndpoint.bingAI]: BingAIMinimalIcon, [EModelEndpoint.custom]: CustomMinimalIcon, - [EModelEndpoint.assistants]: ({ className = '', assistantName, avatar, size }: IconMapProps) => { - if (assistantName && avatar) { - return ( - {assistantName} - ); - } else if (assistantName) { - return ; - } - - return ; - }, + [EModelEndpoint.assistants]: AssistantAvatar, + [EModelEndpoint.azureAssistants]: AssistantAvatar, unknown: UnknownIcon, }; diff --git a/client/src/components/Chat/Menus/EndpointsMenu.tsx b/client/src/components/Chat/Menus/EndpointsMenu.tsx index 6d73c80e721..ab5eb4633ea 100644 --- a/client/src/components/Chat/Menus/EndpointsMenu.tsx +++ b/client/src/components/Chat/Menus/EndpointsMenu.tsx @@ -1,5 +1,5 @@ import { Content, Portal, Root } from '@radix-ui/react-popover'; -import { alternateName, EModelEndpoint } from 'librechat-data-provider'; +import { alternateName, isAssistantsEndpoint } from 'librechat-data-provider'; import { useGetEndpointsQuery } from 'librechat-data-provider/react-query'; import type { FC } from 'react'; import { useChatContext, useAssistantsMapContext } from '~/Providers'; @@ -16,7 +16,8 @@ const EndpointsMenu: FC = () => { const { endpoint = '', assistant_id = null } = conversation ?? {}; const assistantMap = useAssistantsMapContext(); - const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? '']; + const assistant = + isAssistantsEndpoint(endpoint) && assistantMap?.[endpoint ?? '']?.[assistant_id ?? '']; const assistantName = (assistant && assistant?.name) || 'Assistant'; if (!endpoint) { diff --git a/client/src/components/Chat/Messages/Content/CodeAnalyze.tsx b/client/src/components/Chat/Messages/Content/CodeAnalyze.tsx index c0ab640dd43..492d86d6248 100644 --- a/client/src/components/Chat/Messages/Content/CodeAnalyze.tsx +++ b/client/src/components/Chat/Messages/Content/CodeAnalyze.tsx @@ -1,6 +1,7 @@ import { useState } from 'react'; import { useRecoilValue } from 'recoil'; import ProgressCircle from './ProgressCircle'; +import CancelledIcon from './CancelledIcon'; import ProgressText from './ProgressText'; import FinishedIcon from './FinishedIcon'; import MarkdownLite from './MarkdownLite'; @@ -11,10 +12,12 @@ export default function CodeAnalyze({ initialProgress = 0.1, code, outputs = [], + isSubmitting, }: { initialProgress: number; code: string; outputs: Record[]; + isSubmitting: boolean; }) { const showCodeDefault = useRecoilValue(store.showCode); const [showCode, setShowCode] = useState(showCodeDefault); @@ -35,7 +38,13 @@ export default function CodeAnalyze({
{progress < 1 ? ( - + ) : ( )} @@ -74,18 +83,25 @@ const CodeInProgress = ({ offset, circumference, radius, + isSubmitting, + progress, }: { + progress: number; offset: number; circumference: number; radius: number; + isSubmitting: boolean; }) => { + if (progress < 1 && !isSubmitting) { + return ; + } return (
-
+
); } else if ( part.type === ContentTypes.TOOL_CALL && - part[ContentTypes.TOOL_CALL].type === ToolCallTypes.RETRIEVAL + (part[ContentTypes.TOOL_CALL].type === ToolCallTypes.RETRIEVAL || + part[ContentTypes.TOOL_CALL].type === ToolCallTypes.FILE_SEARCH) ) { const toolCall = part[ContentTypes.TOOL_CALL]; return ; diff --git a/client/src/components/Chat/Messages/HoverButtons.tsx b/client/src/components/Chat/Messages/HoverButtons.tsx index 7a593202b42..35fa10df783 100644 --- a/client/src/components/Chat/Messages/HoverButtons.tsx +++ b/client/src/components/Chat/Messages/HoverButtons.tsx @@ -1,5 +1,4 @@ import { useState } from 'react'; -import { EModelEndpoint } from 'librechat-data-provider'; import type { TConversation, TMessage } from 'librechat-data-provider'; import { Clipboard, CheckMark, EditIcon, RegenerateIcon, ContinueIcon } from '~/components/svg'; import { useGenerationsByLatest, useLocalize } from '~/hooks'; @@ -35,14 +34,19 @@ export default function HoverButtons({ const { endpoint: _endpoint, endpointType } = conversation ?? {}; const endpoint = endpointType ?? _endpoint; const [isCopied, setIsCopied] = useState(false); - const { hideEditButton, regenerateEnabled, continueSupported, forkingSupported } = - useGenerationsByLatest({ - isEditing, - isSubmitting, - message, - endpoint: endpoint ?? '', - latestMessage, - }); + const { + hideEditButton, + regenerateEnabled, + continueSupported, + forkingSupported, + isEditableEndpoint, + } = useGenerationsByLatest({ + isEditing, + isSubmitting, + message, + endpoint: endpoint ?? '', + latestMessage, + }); if (!conversation) { return null; } @@ -58,7 +62,7 @@ export default function HoverButtons({ return (
- {endpoint !== EModelEndpoint.assistants && ( + {isEditableEndpoint && ( +
+
+
+ ); +} diff --git a/client/src/components/SidePanel/Builder/ContextButton.tsx b/client/src/components/SidePanel/Builder/ContextButton.tsx index 9e6102dd995..7e7ba4d0ed0 100644 --- a/client/src/components/SidePanel/Builder/ContextButton.tsx +++ b/client/src/components/SidePanel/Builder/ContextButton.tsx @@ -1,26 +1,29 @@ import * as Popover from '@radix-ui/react-popover'; -import type { Assistant, AssistantCreateParams } from 'librechat-data-provider'; +import type { Assistant, AssistantCreateParams, AssistantsEndpoint } from 'librechat-data-provider'; import type { UseMutationResult } from '@tanstack/react-query'; import { Dialog, DialogTrigger, Label } from '~/components/ui'; -import DialogTemplate from '~/components/ui/DialogTemplate'; +import { useChatContext, useToastContext } from '~/Providers'; import { useDeleteAssistantMutation } from '~/data-provider'; +import DialogTemplate from '~/components/ui/DialogTemplate'; import { useLocalize, useSetIndexOptions } from '~/hooks'; import { cn, removeFocusOutlines } from '~/utils/'; import { NewTrashIcon } from '~/components/svg'; -import { useChatContext } from '~/Providers'; export default function ContextButton({ activeModel, assistant_id, setCurrentAssistantId, createMutation, + endpoint, }: { activeModel: string; assistant_id: string; setCurrentAssistantId: React.Dispatch>; createMutation: UseMutationResult; + endpoint: AssistantsEndpoint; }) { const localize = useLocalize(); + const { showToast } = useToastContext(); const { conversation } = useChatContext(); const { setOption } = useSetIndexOptions(); @@ -31,6 +34,11 @@ export default function ContextButton({ return; } + showToast({ + message: localize('com_ui_assistant_deleted'), + status: 'success', + }); + if (createMutation.data?.id) { console.log('[deleteAssistant] resetting createMutation'); createMutation.reset(); @@ -55,6 +63,13 @@ export default function ContextButton({ setCurrentAssistantId(firstAssistant.id); }, + onError: (error) => { + console.error(error); + showToast({ + message: localize('com_ui_assistant_delete_error'), + status: 'error', + }); + }, }); if (!assistant_id) { @@ -138,7 +153,8 @@ export default function ContextButton({ } selection={{ - selectHandler: () => deleteAssistant.mutate({ assistant_id, model: activeModel }), + selectHandler: () => + deleteAssistant.mutate({ assistant_id, model: activeModel, endpoint }), selectClasses: 'bg-red-600 hover:bg-red-700 dark:hover:bg-red-800 text-white', selectText: localize('com_ui_delete'), }} diff --git a/client/src/components/SidePanel/Builder/ImageVision.tsx b/client/src/components/SidePanel/Builder/ImageVision.tsx new file mode 100644 index 00000000000..865743cbb99 --- /dev/null +++ b/client/src/components/SidePanel/Builder/ImageVision.tsx @@ -0,0 +1,43 @@ +import { useFormContext, Controller } from 'react-hook-form'; +import { Capabilities } from 'librechat-data-provider'; +import type { AssistantForm } from '~/common'; +import { Checkbox, QuestionMark } from '~/components/ui'; +import { useLocalize } from '~/hooks'; + +export default function ImageVision() { + const localize = useLocalize(); + const methods = useFormContext(); + const { control, setValue, getValues } = methods; + + return ( +
+ ( + + )} + /> + +
+ ); +} diff --git a/client/src/components/SidePanel/Builder/Images.tsx b/client/src/components/SidePanel/Builder/Images.tsx index 8201ed0eeb5..502ec2f907d 100644 --- a/client/src/components/SidePanel/Builder/Images.tsx +++ b/client/src/components/SidePanel/Builder/Images.tsx @@ -41,10 +41,10 @@ export const AssistantAvatar = ({ return (
-
+
GPT { }; export default function Knowledge({ + endpoint, assistant_id, files: _files, }: { + endpoint: AssistantsEndpoint; assistant_id: string; files?: [string, ExtendedFile][]; }) { @@ -40,7 +42,7 @@ export default function Knowledge({ select: (data) => mergeFileConfig(data), }); const { handleFileChange } = useFileHandling({ - overrideEndpoint: EModelEndpoint.assistants, + overrideEndpoint: endpoint, additionalMetadata: { assistant_id }, fileSetter: setFiles, }); @@ -51,7 +53,7 @@ export default function Knowledge({ } }, [_files]); - const endpointFileConfig = fileConfig.endpoints[EModelEndpoint.assistants]; + const endpointFileConfig = fileConfig.endpoints[endpoint]; if (endpointFileConfig?.disabled) { return null; diff --git a/client/src/components/SidePanel/Builder/PanelSwitch.tsx b/client/src/components/SidePanel/Builder/PanelSwitch.tsx index ad7b9a48498..1dddb4a9a21 100644 --- a/client/src/components/SidePanel/Builder/PanelSwitch.tsx +++ b/client/src/components/SidePanel/Builder/PanelSwitch.tsx @@ -1,5 +1,7 @@ -import { useState, useEffect } from 'react'; -import type { Action } from 'librechat-data-provider'; +import { useState, useEffect, useMemo } from 'react'; +import { defaultAssistantsVersion } from 'librechat-data-provider'; +import { useGetEndpointsQuery } from 'librechat-data-provider/react-query'; +import type { Action, AssistantsEndpoint, TEndpointsConfig } from 'librechat-data-provider'; import { useGetActionsQuery } from '~/data-provider'; import AssistantPanel from './AssistantPanel'; import { useChatContext } from '~/Providers'; @@ -9,11 +11,18 @@ import { Panel } from '~/common'; export default function PanelSwitch() { const { conversation, index } = useChatContext(); const [activePanel, setActivePanel] = useState(Panel.builder); + const [action, setAction] = useState(undefined); const [currentAssistantId, setCurrentAssistantId] = useState( conversation?.assistant_id, ); - const [action, setAction] = useState(undefined); - const { data: actions = [] } = useGetActionsQuery(); + + const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery(); + const { data: actions = [] } = useGetActionsQuery(conversation?.endpoint as AssistantsEndpoint); + + const assistantsConfig = useMemo( + () => endpointsConfig?.[conversation?.endpoint ?? ''], + [conversation?.endpoint, endpointsConfig], + ); useEffect(() => { if (conversation?.assistant_id) { @@ -21,6 +30,12 @@ export default function PanelSwitch() { } }, [conversation?.assistant_id]); + if (!conversation?.endpoint) { + return null; + } + + const version = assistantsConfig?.version ?? defaultAssistantsVersion[conversation.endpoint]; + if (activePanel === Panel.actions || action) { return ( ); } else if (activePanel === Panel.builder) { @@ -45,6 +62,9 @@ export default function PanelSwitch() { setActivePanel={setActivePanel} assistant_id={currentAssistantId} setCurrentAssistantId={setCurrentAssistantId} + endpoint={conversation.endpoint as AssistantsEndpoint} + assistantsConfig={assistantsConfig} + version={version} /> ); } diff --git a/client/src/components/SidePanel/Builder/Retrieval.tsx b/client/src/components/SidePanel/Builder/Retrieval.tsx new file mode 100644 index 00000000000..dd51bf78fd2 --- /dev/null +++ b/client/src/components/SidePanel/Builder/Retrieval.tsx @@ -0,0 +1,94 @@ +import { useEffect, useMemo } from 'react'; +import { useFormContext, Controller, useWatch } from 'react-hook-form'; +import { Capabilities } from 'librechat-data-provider'; +import type { AssistantsEndpoint } from 'librechat-data-provider'; +import type { AssistantForm } from '~/common'; +import OptionHover from '~/components/SidePanel/Parameters/OptionHover'; +import { Checkbox, HoverCard, HoverCardTrigger } from '~/components/ui'; +import { useLocalize } from '~/hooks'; +import { ESide } from '~/common'; +import { cn } from '~/utils/'; + +export default function Retrieval({ + version, + retrievalModels, +}: { + version: number | string; + retrievalModels: Set; + endpoint: AssistantsEndpoint; +}) { + const localize = useLocalize(); + const methods = useFormContext(); + const { control, setValue, getValues } = methods; + const model = useWatch({ control, name: 'model' }); + const assistant = useWatch({ control, name: 'assistant' }); + + const vectorStores = useMemo(() => { + if (typeof assistant === 'string') { + return []; + } + return assistant.tool_resources?.file_search; + }, [assistant]); + + const isDisabled = useMemo(() => !retrievalModels.has(model), [model, retrievalModels]); + + useEffect(() => { + if (model && isDisabled) { + setValue(Capabilities.retrieval, false); + } + }, [model, setValue, isDisabled]); + + return ( + <> +
+ ( + + )} + /> + + + + + + +
+ {version == 2 && ( +
+ {localize('com_assistants_file_search_info')} +
+ )} + + ); +} diff --git a/client/src/components/SidePanel/Files/PanelFileCell.tsx b/client/src/components/SidePanel/Files/PanelFileCell.tsx index 4d5d02979c9..979d45f609b 100644 --- a/client/src/components/SidePanel/Files/PanelFileCell.tsx +++ b/client/src/components/SidePanel/Files/PanelFileCell.tsx @@ -1,8 +1,10 @@ import { useCallback } from 'react'; import { fileConfig as defaultFileConfig, + checkOpenAIStorage, mergeFileConfig, megabyte, + isAssistantsEndpoint, } from 'librechat-data-provider'; import type { Row } from '@tanstack/react-table'; import type { TFile } from 'librechat-data-provider'; @@ -36,6 +38,18 @@ export default function PanelFileCell({ row }: { row: Row }) { return showToast({ message: localize('com_ui_attach_error'), status: 'error' }); } + if (checkOpenAIStorage(fileData?.source ?? '') && !isAssistantsEndpoint(endpoint)) { + return showToast({ + message: localize('com_ui_attach_error_openai'), + status: 'error', + }); + } else if (!checkOpenAIStorage(fileData?.source ?? '') && isAssistantsEndpoint(endpoint)) { + showToast({ + message: localize('com_ui_attach_warn_endpoint'), + status: 'warning', + }); + } + const { fileSizeLimit, supportedMimeTypes } = fileConfig.endpoints[endpoint] ?? fileConfig.endpoints.default; @@ -81,7 +95,8 @@ export default function PanelFileCell({ row }: { row: Row }) { > {file.filename}
@@ -94,7 +109,7 @@ export default function PanelFileCell({ row }: { row: Row }) { onClick={handleFileClick} className="flex cursor-pointer gap-2 rounded-md dark:hover:bg-gray-700" > - {fileType && } + {fileType && } {file.filename}
); diff --git a/client/src/components/SidePanel/Parameters/OptionHover.tsx b/client/src/components/SidePanel/Parameters/OptionHover.tsx index 1a3714a0dc4..16e33fff92f 100644 --- a/client/src/components/SidePanel/Parameters/OptionHover.tsx +++ b/client/src/components/SidePanel/Parameters/OptionHover.tsx @@ -7,11 +7,21 @@ type TOptionHoverProps = { description: string; langCode?: boolean; sideOffset?: number; + disabled?: boolean; side: ESide; }; -function OptionHover({ side, description, langCode, sideOffset = 30 }: TOptionHoverProps) { +function OptionHover({ + side, + description, + disabled, + langCode, + sideOffset = 30, +}: TOptionHoverProps) { const localize = useLocalize(); + if (disabled) { + return null; + } const text = langCode ? localize(description) : description; return ( diff --git a/client/src/components/SidePanel/SidePanel.tsx b/client/src/components/SidePanel/SidePanel.tsx index 13ac875975c..d2592e34958 100644 --- a/client/src/components/SidePanel/SidePanel.tsx +++ b/client/src/components/SidePanel/SidePanel.tsx @@ -1,5 +1,5 @@ import throttle from 'lodash/throttle'; -import { EModelEndpoint, getConfigDefaults } from 'librechat-data-provider'; +import { getConfigDefaults } from 'librechat-data-provider'; import { useState, useRef, useCallback, useEffect, useMemo, memo } from 'react'; import { useGetEndpointsQuery, @@ -61,7 +61,7 @@ const SidePanel = ({ return activePanel ? activePanel : undefined; }, []); - const assistants = useMemo(() => endpointsConfig?.[EModelEndpoint.assistants], [endpointsConfig]); + const assistants = useMemo(() => endpointsConfig?.[endpoint ?? ''], [endpoint, endpointsConfig]); const userProvidesKey = useMemo( () => !!endpointsConfig?.[endpoint ?? '']?.userProvide, [endpointsConfig, endpoint], diff --git a/client/src/components/SidePanel/Switcher.tsx b/client/src/components/SidePanel/Switcher.tsx index 4fdaea21b87..cf31b889320 100644 --- a/client/src/components/SidePanel/Switcher.tsx +++ b/client/src/components/SidePanel/Switcher.tsx @@ -1,18 +1,18 @@ -import { EModelEndpoint } from 'librechat-data-provider'; +import { isAssistantsEndpoint } from 'librechat-data-provider'; import type { SwitcherProps } from '~/common'; import { Separator } from '~/components/ui/Separator'; import AssistantSwitcher from './AssistantSwitcher'; import ModelSwitcher from './ModelSwitcher'; export default function Switcher(props: SwitcherProps) { - if (props.endpoint === EModelEndpoint.assistants && props.endpointKeyProvided) { + if (isAssistantsEndpoint(props.endpoint) && props.endpointKeyProvided) { return ( <> ); - } else if (props.endpoint === EModelEndpoint.assistants) { + } else if (isAssistantsEndpoint(props.endpoint)) { return null; } diff --git a/client/src/components/Tools/ToolSelectDialog.tsx b/client/src/components/Tools/ToolSelectDialog.tsx index b40ca7ea77c..c6427630a1a 100644 --- a/client/src/components/Tools/ToolSelectDialog.tsx +++ b/client/src/components/Tools/ToolSelectDialog.tsx @@ -3,7 +3,7 @@ import { Search, X } from 'lucide-react'; import { Dialog } from '@headlessui/react'; import { useFormContext } from 'react-hook-form'; import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; -import type { TError, TPluginAction } from 'librechat-data-provider'; +import type { AssistantsEndpoint, TError, TPluginAction } from 'librechat-data-provider'; import type { TPluginStoreDialogProps } from '~/common/types'; import { PluginPagination, PluginAuthForm } from '~/components/Plugins/Store'; import { useLocalize, usePluginDialogHelpers } from '~/hooks'; @@ -13,10 +13,11 @@ import ToolItem from './ToolItem'; function ToolSelectDialog({ isOpen, setIsOpen, -}: TPluginStoreDialogProps & { assistant_id?: string }) { + endpoint, +}: TPluginStoreDialogProps & { assistant_id?: string; endpoint: AssistantsEndpoint }) { const localize = useLocalize(); const { getValues, setValue } = useFormContext(); - const { data: tools = [] } = useAvailableToolsQuery(); + const { data: tools = [] } = useAvailableToolsQuery(endpoint); const { maxPage, diff --git a/client/src/data-provider/mutations.ts b/client/src/data-provider/mutations.ts index 436914706f0..01e3a45ecb1 100644 --- a/client/src/data-provider/mutations.ts +++ b/client/src/data-provider/mutations.ts @@ -1,4 +1,8 @@ -import { LocalStorageKeys } from 'librechat-data-provider'; +import { + EToolResources, + LocalStorageKeys, + defaultAssistantsVersion, +} from 'librechat-data-provider'; import { useMutation, useQueryClient } from '@tanstack/react-query'; import type { UseMutationResult } from '@tanstack/react-query'; import type t from 'librechat-data-provider'; @@ -376,9 +380,10 @@ export const useUploadFileMutation = ( const { onSuccess, ...options } = _options || {}; return useMutation([MutationKeys.fileUpload], { mutationFn: (body: FormData) => { - const height = body.get('height'); const width = body.get('width'); - if (height && width) { + const height = body.get('height'); + const version = body.get('version') as number | string; + if (height && width && (!version || version != 2)) { return dataService.uploadImage(body); } @@ -391,8 +396,10 @@ export const useUploadFileMutation = ( ...(_files ?? []), ]); + const endpoint = formData.get('endpoint'); const assistant_id = formData.get('assistant_id'); const message_file = formData.get('message_file'); + const tool_resource = formData.get('tool_resource'); if (!assistant_id || message_file === 'true') { onSuccess?.(data, formData, context); @@ -400,7 +407,7 @@ export const useUploadFileMutation = ( } queryClient.setQueryData( - [QueryKeys.assistants, defaultOrderQuery], + [QueryKeys.assistants, endpoint, defaultOrderQuery], (prev) => { if (!prev) { return prev; @@ -409,13 +416,29 @@ export const useUploadFileMutation = ( return { ...prev, data: prev?.data.map((assistant) => { - if (assistant.id === assistant_id) { - return { - ...assistant, - file_ids: [...assistant.file_ids, data.file_id], + if (assistant.id !== assistant_id) { + return assistant; + } + + const update = {}; + if (!tool_resource) { + update['file_ids'] = [...assistant.file_ids, data.file_id]; + } + if (tool_resource === EToolResources.code_interpreter) { + const prevResources = assistant.tool_resources ?? {}; + const prevResource = assistant.tool_resources?.[tool_resource as string] ?? { + file_ids: [], + }; + prevResource.file_ids.push(data.file_id); + update['tool_resources'] = { + ...prevResources, + [tool_resource as string]: prevResource, }; } - return assistant; + return { + ...assistant, + ...update, + }; }), }; }, @@ -436,7 +459,8 @@ export const useDeleteFilesMutation = ( const queryClient = useQueryClient(); const { onSuccess, ...options } = _options || {}; return useMutation([MutationKeys.fileDelete], { - mutationFn: (body: t.DeleteFilesBody) => dataService.deleteFiles(body.files, body.assistant_id), + mutationFn: (body: t.DeleteFilesBody) => + dataService.deleteFiles(body.files, body.assistant_id, body.tool_resource), ...(options || {}), onSuccess: (data, ...args) => { queryClient.setQueryData([QueryKeys.files], (cachefiles) => { @@ -542,6 +566,7 @@ export const useCreateAssistantMutation = ( onSuccess: (newAssistant, variables, context) => { const listRes = queryClient.getQueryData([ QueryKeys.assistants, + variables.endpoint, defaultOrderQuery, ]); @@ -552,7 +577,7 @@ export const useCreateAssistantMutation = ( const currentAssistants = [newAssistant, ...JSON.parse(JSON.stringify(listRes.data))]; queryClient.setQueryData( - [QueryKeys.assistants, defaultOrderQuery], + [QueryKeys.assistants, variables.endpoint, defaultOrderQuery], { ...listRes, data: currentAssistants, @@ -576,14 +601,23 @@ export const useUpdateAssistantMutation = ( > => { const queryClient = useQueryClient(); return useMutation( - ({ assistant_id, data }: { assistant_id: string; data: t.AssistantUpdateParams }) => - dataService.updateAssistant(assistant_id, data), + ({ assistant_id, data }: { assistant_id: string; data: t.AssistantUpdateParams }) => { + const { endpoint } = data; + const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; + return dataService.updateAssistant({ + data, + version, + assistant_id, + }); + }, { onMutate: (variables) => options?.onMutate?.(variables), onError: (error, variables, context) => options?.onError?.(error, variables, context), onSuccess: (updatedAssistant, variables, context) => { const listRes = queryClient.getQueryData([ QueryKeys.assistants, + variables.data.endpoint, defaultOrderQuery, ]); @@ -592,7 +626,7 @@ export const useUpdateAssistantMutation = ( } queryClient.setQueryData( - [QueryKeys.assistants, defaultOrderQuery], + [QueryKeys.assistants, variables.data.endpoint, defaultOrderQuery], { ...listRes, data: listRes.data.map((assistant) => { @@ -617,14 +651,18 @@ export const useDeleteAssistantMutation = ( ): UseMutationResult => { const queryClient = useQueryClient(); return useMutation( - ({ assistant_id, model }: t.DeleteAssistantBody) => - dataService.deleteAssistant(assistant_id, model), + ({ assistant_id, model, endpoint }: t.DeleteAssistantBody) => { + const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; + return dataService.deleteAssistant({ assistant_id, model, version, endpoint }); + }, { onMutate: (variables) => options?.onMutate?.(variables), onError: (error, variables, context) => options?.onError?.(error, variables, context), onSuccess: (_data, variables, context) => { const listRes = queryClient.getQueryData([ QueryKeys.assistants, + variables.endpoint, defaultOrderQuery, ]); @@ -635,7 +673,7 @@ export const useDeleteAssistantMutation = ( const data = listRes.data.filter((assistant) => assistant.id !== variables.assistant_id); queryClient.setQueryData( - [QueryKeys.assistants, defaultOrderQuery], + [QueryKeys.assistants, variables.endpoint, defaultOrderQuery], { ...listRes, data, @@ -687,6 +725,7 @@ export const useUpdateAction = ( onSuccess: (updateActionResponse, variables, context) => { const listRes = queryClient.getQueryData([ QueryKeys.assistants, + variables.endpoint, defaultOrderQuery, ]); @@ -696,15 +735,18 @@ export const useUpdateAction = ( const updatedAssistant = updateActionResponse[1]; - queryClient.setQueryData([QueryKeys.assistants, defaultOrderQuery], { - ...listRes, - data: listRes.data.map((assistant) => { - if (assistant.id === variables.assistant_id) { - return updatedAssistant; - } - return assistant; - }), - }); + queryClient.setQueryData( + [QueryKeys.assistants, variables.endpoint, defaultOrderQuery], + { + ...listRes, + data: listRes.data.map((assistant) => { + if (assistant.id === variables.assistant_id) { + return updatedAssistant; + } + return assistant; + }), + }, + ); queryClient.setQueryData([QueryKeys.actions], (prev) => { return prev @@ -735,8 +777,15 @@ export const useDeleteAction = ( > => { const queryClient = useQueryClient(); return useMutation([MutationKeys.deleteAction], { - mutationFn: (variables: t.DeleteActionVariables) => - dataService.deleteAction(variables.assistant_id, variables.action_id, variables.model), + mutationFn: (variables: t.DeleteActionVariables) => { + const { endpoint } = variables; + const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; + return dataService.deleteAction({ + ...variables, + version, + }); + }, onMutate: (variables) => options?.onMutate?.(variables), onError: (error, variables, context) => options?.onError?.(error, variables, context), @@ -750,7 +799,7 @@ export const useDeleteAction = ( }); queryClient.setQueryData( - [QueryKeys.assistants, defaultOrderQuery], + [QueryKeys.assistants, variables.endpoint, defaultOrderQuery], (prev) => { if (!prev) { return prev; diff --git a/client/src/data-provider/queries.ts b/client/src/data-provider/queries.ts index 50302ad7edc..5358e39bb4b 100644 --- a/client/src/data-provider/queries.ts +++ b/client/src/data-provider/queries.ts @@ -1,4 +1,9 @@ -import { EModelEndpoint, QueryKeys, dataService, defaultOrderQuery } from 'librechat-data-provider'; +import { + QueryKeys, + dataService, + defaultOrderQuery, + defaultAssistantsVersion, +} from 'librechat-data-provider'; import { useQuery, useInfiniteQuery, useQueryClient } from '@tanstack/react-query'; import type { UseInfiniteQueryOptions, @@ -194,43 +199,46 @@ export const useSharedLinksInfiniteQuery = ( /** * Hook for getting all available tools for Assistants */ -export const useAvailableToolsQuery = (): QueryObserverResult => { +export const useAvailableToolsQuery = ( + endpoint: t.AssistantsEndpoint, +): QueryObserverResult => { const queryClient = useQueryClient(); const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); - const keyExpiry = queryClient.getQueryData([ - QueryKeys.name, - EModelEndpoint.assistants, - ]); - const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide; + const keyExpiry = queryClient.getQueryData([QueryKeys.name, endpoint]); + const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide; const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true; - const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided; - return useQuery([QueryKeys.tools], () => dataService.getAvailableTools(), { - refetchOnWindowFocus: false, - refetchOnReconnect: false, - refetchOnMount: false, - enabled, - }); + const enabled = !!endpointsConfig?.[endpoint] && keyProvided; + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; + return useQuery( + [QueryKeys.tools], + () => dataService.getAvailableTools(version, endpoint), + { + refetchOnWindowFocus: false, + refetchOnReconnect: false, + refetchOnMount: false, + enabled, + }, + ); }; /** * Hook for listing all assistants, with optional parameters provided for pagination and sorting */ export const useListAssistantsQuery = ( - params: AssistantListParams = defaultOrderQuery, + endpoint: t.AssistantsEndpoint, + params: Omit = defaultOrderQuery, config?: UseQueryOptions, ): QueryObserverResult => { const queryClient = useQueryClient(); const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); - const keyExpiry = queryClient.getQueryData([ - QueryKeys.name, - EModelEndpoint.assistants, - ]); - const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide; + const keyExpiry = queryClient.getQueryData([QueryKeys.name, endpoint]); + const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide; const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true; - const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided; + const enabled = !!endpointsConfig?.[endpoint] && keyProvided; + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; return useQuery( - [QueryKeys.assistants, params], - () => dataService.listAssistants(params), + [QueryKeys.assistants, endpoint, params], + () => dataService.listAssistants({ ...params, endpoint }, version), { // Example selector to sort them by created_at // select: (res) => { @@ -246,6 +254,7 @@ export const useListAssistantsQuery = ( ); }; +/* export const useListAssistantsInfiniteQuery = ( params?: AssistantListParams, config?: UseInfiniteQueryOptions, @@ -275,26 +284,31 @@ export const useListAssistantsInfiniteQuery = ( }, ); }; +*/ /** * Hook for retrieving details about a single assistant */ export const useGetAssistantByIdQuery = ( + endpoint: t.AssistantsEndpoint, assistant_id: string, config?: UseQueryOptions, ): QueryObserverResult => { const queryClient = useQueryClient(); const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); - const keyExpiry = queryClient.getQueryData([ - QueryKeys.name, - EModelEndpoint.assistants, - ]); - const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide; + const keyExpiry = queryClient.getQueryData([QueryKeys.name, endpoint]); + const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide; const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true; - const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided; + const enabled = !!endpointsConfig?.[endpoint] && keyProvided; + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; return useQuery( [QueryKeys.assistant, assistant_id], - () => dataService.getAssistantById(assistant_id), + () => + dataService.getAssistantById({ + endpoint, + assistant_id, + version, + }), { refetchOnWindowFocus: false, refetchOnReconnect: false, @@ -311,43 +325,53 @@ export const useGetAssistantByIdQuery = ( * Hook for retrieving user's saved Assistant Actions */ export const useGetActionsQuery = ( + endpoint: t.AssistantsEndpoint, config?: UseQueryOptions, ): QueryObserverResult => { const queryClient = useQueryClient(); const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); - const keyExpiry = queryClient.getQueryData([ - QueryKeys.name, - EModelEndpoint.assistants, - ]); - const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide; + const keyExpiry = queryClient.getQueryData([QueryKeys.name, endpoint]); + const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide; const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true; - const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided; - return useQuery([QueryKeys.actions], () => dataService.getActions(), { - refetchOnWindowFocus: false, - refetchOnReconnect: false, - refetchOnMount: false, - ...config, - enabled: config?.enabled !== undefined ? config?.enabled && enabled : enabled, - }); + const enabled = !!endpointsConfig?.[endpoint] && keyProvided; + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; + return useQuery( + [QueryKeys.actions], + () => + dataService.getActions({ + endpoint, + version, + }), + { + refetchOnWindowFocus: false, + refetchOnReconnect: false, + refetchOnMount: false, + ...config, + enabled: config?.enabled !== undefined ? config?.enabled && enabled : enabled, + }, + ); }; /** * Hook for retrieving user's saved Assistant Documents (metadata saved to Database) */ export const useGetAssistantDocsQuery = ( + endpoint: t.AssistantsEndpoint, config?: UseQueryOptions, ): QueryObserverResult => { const queryClient = useQueryClient(); const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); - const keyExpiry = queryClient.getQueryData([ - QueryKeys.name, - EModelEndpoint.assistants, - ]); - const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide; + const keyExpiry = queryClient.getQueryData([QueryKeys.name, endpoint]); + const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide; const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true; - const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided; + const enabled = !!endpointsConfig?.[endpoint] && keyProvided; + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; return useQuery( [QueryKeys.assistantDocs], - () => dataService.getAssistantDocs(), + () => + dataService.getAssistantDocs({ + endpoint, + version, + }), { refetchOnWindowFocus: false, refetchOnReconnect: false, diff --git a/client/src/hooks/Assistants/index.ts b/client/src/hooks/Assistants/index.ts index 198473ffcf5..078e4f018d5 100644 --- a/client/src/hooks/Assistants/index.ts +++ b/client/src/hooks/Assistants/index.ts @@ -1,2 +1,3 @@ export { default as useAssistantsMap } from './useAssistantsMap'; export { default as useSelectAssistant } from './useSelectAssistant'; +export { default as useAssistantListMap } from './useAssistantListMap'; diff --git a/client/src/hooks/Assistants/useAssistantListMap.ts b/client/src/hooks/Assistants/useAssistantListMap.ts new file mode 100644 index 00000000000..5b1b975a472 --- /dev/null +++ b/client/src/hooks/Assistants/useAssistantListMap.ts @@ -0,0 +1,44 @@ +import { useMemo } from 'react'; +import { EModelEndpoint } from 'librechat-data-provider'; +import type { AssistantListResponse, AssistantsEndpoint } from 'librechat-data-provider'; +import type { AssistantListItem } from '~/common'; +import { useListAssistantsQuery } from '~/data-provider'; + +const selectAssistantsResponse = (res: AssistantListResponse): AssistantListItem[] => + res.data.map(({ id, name, metadata, model }) => ({ + id, + name: name ?? '', + metadata, + model, + })); + +export default function useAssistantListMap( + selector: (res: AssistantListResponse) => T = selectAssistantsResponse as ( + res: AssistantListResponse, + ) => T, +): Record { + const { data: assistantsList = null } = useListAssistantsQuery( + EModelEndpoint.assistants, + undefined, + { + select: selector, + }, + ); + + const { data: azureAssistants = null } = useListAssistantsQuery( + EModelEndpoint.azureAssistants, + undefined, + { + select: selector, + }, + ); + + const assistantListMap = useMemo(() => { + return { + [EModelEndpoint.assistants]: assistantsList as T, + [EModelEndpoint.azureAssistants]: azureAssistants as T, + }; + }, [assistantsList, azureAssistants]); + + return assistantListMap; +} diff --git a/client/src/hooks/Assistants/useAssistantsMap.ts b/client/src/hooks/Assistants/useAssistantsMap.ts index d88d6eea4a7..46ddd6cc33c 100644 --- a/client/src/hooks/Assistants/useAssistantsMap.ts +++ b/client/src/hooks/Assistants/useAssistantsMap.ts @@ -1,12 +1,28 @@ -import { defaultOrderQuery } from 'librechat-data-provider'; +import { EModelEndpoint } from 'librechat-data-provider'; +import type { TAssistantsMap } from 'librechat-data-provider'; import { useListAssistantsQuery } from '~/data-provider'; import { mapAssistants } from '~/utils'; -export default function useAssistantsMap({ isAuthenticated }: { isAuthenticated: boolean }) { - const { data: assistantMap = {} } = useListAssistantsQuery(defaultOrderQuery, { +export default function useAssistantsMap({ + isAuthenticated, +}: { + isAuthenticated: boolean; +}): TAssistantsMap { + const { data: assistants = {} } = useListAssistantsQuery(EModelEndpoint.assistants, undefined, { select: (res) => mapAssistants(res.data), enabled: isAuthenticated, }); + const { data: azureAssistants = {} } = useListAssistantsQuery( + EModelEndpoint.azureAssistants, + undefined, + { + select: (res) => mapAssistants(res.data), + enabled: isAuthenticated, + }, + ); - return assistantMap; + return { + [EModelEndpoint.assistants]: assistants, + [EModelEndpoint.azureAssistants]: azureAssistants, + }; } diff --git a/client/src/hooks/Assistants/useSelectAssistant.ts b/client/src/hooks/Assistants/useSelectAssistant.ts index 929d2df55b0..327f0f582ce 100644 --- a/client/src/hooks/Assistants/useSelectAssistant.ts +++ b/client/src/hooks/Assistants/useSelectAssistant.ts @@ -1,32 +1,30 @@ import { useCallback } from 'react'; -import { EModelEndpoint, defaultOrderQuery } from 'librechat-data-provider'; -import type { TConversation, TPreset } from 'librechat-data-provider'; +import { isAssistantsEndpoint } from 'librechat-data-provider'; +import type { AssistantsEndpoint, TConversation, TPreset } from 'librechat-data-provider'; import useDefaultConvo from '~/hooks/Conversations/useDefaultConvo'; -import { useListAssistantsQuery } from '~/data-provider'; import { useChatContext } from '~/Providers/ChatContext'; +import useAssistantListMap from './useAssistantListMap'; import { mapAssistants } from '~/utils'; -export default function useSelectAssistant() { +export default function useSelectAssistant(endpoint: AssistantsEndpoint) { const getDefaultConversation = useDefaultConvo(); const { conversation, newConversation } = useChatContext(); - const { data: assistantMap = {} } = useListAssistantsQuery(defaultOrderQuery, { - select: (res) => mapAssistants(res.data), - }); + const assistantMap = useAssistantListMap((res) => mapAssistants(res.data)); const onSelect = useCallback( (value: string) => { - const assistant = assistantMap?.[value]; + const assistant = assistantMap?.[endpoint]?.[value]; if (!assistant) { return; } const template: Partial = { - endpoint: EModelEndpoint.assistants, + endpoint, assistant_id: assistant.id, model: assistant.model, conversationId: 'new', }; - if (conversation?.endpoint === EModelEndpoint.assistants) { + if (isAssistantsEndpoint(conversation?.endpoint)) { const currentConvo = getDefaultConversation({ conversation: { ...(conversation ?? {}) }, preset: template, @@ -44,7 +42,7 @@ export default function useSelectAssistant() { preset: template as Partial, }); }, - [assistantMap, conversation, getDefaultConversation, newConversation], + [endpoint, assistantMap, conversation, getDefaultConversation, newConversation], ); return { onSelect }; diff --git a/client/src/hooks/Conversations/useConversation.ts b/client/src/hooks/Conversations/useConversation.ts index 1a977f14562..7f58a136f9a 100644 --- a/client/src/hooks/Conversations/useConversation.ts +++ b/client/src/hooks/Conversations/useConversation.ts @@ -1,4 +1,5 @@ import { useCallback } from 'react'; +import { useNavigate } from 'react-router-dom'; import { useSetRecoilState, useResetRecoilState, useRecoilCallback } from 'recoil'; import { useGetEndpointsQuery, useGetModelsQuery } from 'librechat-data-provider/react-query'; import type { @@ -10,11 +11,10 @@ import type { TEndpointsConfig, } from 'librechat-data-provider'; import { buildDefaultConvo, getDefaultEndpoint, getEndpointField } from '~/utils'; -import useOriginNavigate from '../useOriginNavigate'; import store from '~/store'; const useConversation = () => { - const navigate = useOriginNavigate(); + const navigate = useNavigate(); const setConversation = useSetRecoilState(store.conversation); const resetLatestMessage = useResetRecoilState(store.latestMessage); const setMessages = useSetRecoilState(store.messages); @@ -59,7 +59,7 @@ const useConversation = () => { resetLatestMessage(); if (conversation.conversationId === 'new' && !modelsData) { - navigate('new'); + navigate('/c/new'); } }, [endpointsConfig, modelsQuery.data], diff --git a/client/src/hooks/Conversations/useNavigateToConvo.tsx b/client/src/hooks/Conversations/useNavigateToConvo.tsx index f2384becac3..17f2563ab43 100644 --- a/client/src/hooks/Conversations/useNavigateToConvo.tsx +++ b/client/src/hooks/Conversations/useNavigateToConvo.tsx @@ -1,14 +1,14 @@ +import { useNavigate } from 'react-router-dom'; import { useQueryClient } from '@tanstack/react-query'; import { useSetRecoilState, useResetRecoilState } from 'recoil'; import { QueryKeys, EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider'; import type { TConversation, TEndpointsConfig, TModelsConfig } from 'librechat-data-provider'; import { buildDefaultConvo, getDefaultEndpoint, getEndpointField } from '~/utils'; -import useOriginNavigate from '../useOriginNavigate'; import store from '~/store'; const useNavigateToConvo = (index = 0) => { + const navigate = useNavigate(); const queryClient = useQueryClient(); - const navigate = useOriginNavigate(); const { setConversation } = store.useCreateConversationAtom(index); const setSubmission = useSetRecoilState(store.submissionByIndex(index)); const resetLatestMessage = useResetRecoilState(store.latestMessageFamily(index)); @@ -48,7 +48,7 @@ const useNavigateToConvo = (index = 0) => { }); } setConversation(convo); - navigate(convo?.conversationId); + navigate(`/c/${convo.conversationId ?? 'new'}`); }; const navigateWithLastTools = (conversation: TConversation) => { diff --git a/client/src/hooks/Conversations/usePresets.ts b/client/src/hooks/Conversations/usePresets.ts index 51253efece5..be7ea69f37c 100644 --- a/client/src/hooks/Conversations/usePresets.ts +++ b/client/src/hooks/Conversations/usePresets.ts @@ -3,7 +3,7 @@ import exportFromJSON from 'export-from-json'; import { useCallback, useEffect, useRef } from 'react'; import { useQueryClient } from '@tanstack/react-query'; import { useRecoilState, useSetRecoilState, useRecoilValue } from 'recoil'; -import { QueryKeys, modularEndpoints, EModelEndpoint } from 'librechat-data-provider'; +import { QueryKeys, modularEndpoints, isAssistantsEndpoint } from 'librechat-data-provider'; import { useCreatePresetMutation, useGetModelsQuery } from 'librechat-data-provider/react-query'; import type { TPreset, TEndpointsConfig } from 'librechat-data-provider'; import { @@ -174,8 +174,8 @@ export default function usePresets() { const currentEndpointType = getEndpointField(endpointsConfig, endpoint, 'type'); const endpointType = getEndpointField(endpointsConfig, newPreset.endpoint, 'type'); const isAssistantSwitch = - newPreset.endpoint === EModelEndpoint.assistants && - conversation?.endpoint === EModelEndpoint.assistants && + isAssistantsEndpoint(newPreset.endpoint) && + isAssistantsEndpoint(conversation?.endpoint) && conversation?.endpoint === newPreset.endpoint; if ( diff --git a/client/src/hooks/Files/useFileDeletion.ts b/client/src/hooks/Files/useFileDeletion.ts index 81a46fbdcdf..13eb5dd4c4b 100644 --- a/client/src/hooks/Files/useFileDeletion.ts +++ b/client/src/hooks/Files/useFileDeletion.ts @@ -1,5 +1,5 @@ import debounce from 'lodash/debounce'; -import { FileSources } from 'librechat-data-provider'; +import { FileSources, EToolResources } from 'librechat-data-provider'; import { useCallback, useState, useEffect } from 'react'; import type { BatchFile, @@ -16,18 +16,20 @@ type FileMapSetter = GenericSetter>; const useFileDeletion = ({ mutateAsync, assistant_id, + tool_resource, }: { mutateAsync: UseMutateAsyncFunction; assistant_id?: string; + tool_resource?: EToolResources; }) => { // eslint-disable-next-line @typescript-eslint/no-unused-vars const [_batch, setFileDeleteBatch] = useState([]); const setFilesToDelete = useSetFilesToDelete(); const executeBatchDelete = useCallback( - (filesToDelete: BatchFile[], assistant_id?: string) => { - console.log('Deleting files:', filesToDelete, assistant_id); - mutateAsync({ files: filesToDelete, assistant_id }); + (filesToDelete: BatchFile[], assistant_id?: string, tool_resource?: EToolResources) => { + console.log('Deleting files:', filesToDelete, assistant_id, tool_resource); + mutateAsync({ files: filesToDelete, assistant_id, tool_resource }); setFileDeleteBatch([]); }, [mutateAsync], @@ -81,11 +83,11 @@ const useFileDeletion = ({ setFileDeleteBatch((prevBatch) => { const newBatch = [...prevBatch, file]; - debouncedDelete(newBatch, assistant_id); + debouncedDelete(newBatch, assistant_id, tool_resource); return newBatch; }); }, - [debouncedDelete, setFilesToDelete, assistant_id], + [debouncedDelete, setFilesToDelete, assistant_id, tool_resource], ); const deleteFiles = useCallback( diff --git a/client/src/hooks/Files/useFileHandling.ts b/client/src/hooks/Files/useFileHandling.ts index 482f6c373cf..3a83b452b87 100644 --- a/client/src/hooks/Files/useFileHandling.ts +++ b/client/src/hooks/Files/useFileHandling.ts @@ -1,13 +1,18 @@ import { v4 } from 'uuid'; import debounce from 'lodash/debounce'; +import { useQueryClient } from '@tanstack/react-query'; import { useState, useEffect, useCallback } from 'react'; import { megabyte, + QueryKeys, EModelEndpoint, codeTypeMapping, mergeFileConfig, + isAssistantsEndpoint, + defaultAssistantsVersion, fileConfig as defaultFileConfig, } from 'librechat-data-provider'; +import type { TEndpointsConfig } from 'librechat-data-provider'; import type { ExtendedFile, FileSetter } from '~/common'; import { useUploadFileMutation, useGetFileConfig } from '~/data-provider'; import { useDelayedUploadToast } from './useDelayedUploadToast'; @@ -20,10 +25,12 @@ const { checkType } = defaultFileConfig; type UseFileHandling = { overrideEndpoint?: EModelEndpoint; fileSetter?: FileSetter; - additionalMetadata?: Record; + fileFilter?: (file: File) => boolean; + additionalMetadata?: Record; }; const useFileHandling = (params?: UseFileHandling) => { + const queryClient = useQueryClient(); const { showToast } = useToastContext(); const [errors, setErrors] = useState([]); const { startUploadTimer, clearUploadTimer } = useDelayedUploadToast(); @@ -141,15 +148,20 @@ const useFileHandling = (params?: UseFileHandling) => { if (params?.additionalMetadata) { for (const [key, value] of Object.entries(params.additionalMetadata)) { - formData.append(key, value); + if (value) { + formData.append(key, value); + } } } if ( - endpoint === EModelEndpoint.assistants && + isAssistantsEndpoint(endpoint) && !formData.get('assistant_id') && conversation?.assistant_id ) { + const endpointsConfig = queryClient.getQueryData([QueryKeys.endpoints]); + const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]; + formData.append('version', version); formData.append('assistant_id', conversation.assistant_id); formData.append('model', conversation?.model ?? ''); formData.append('message_file', 'true'); diff --git a/client/src/hooks/Input/useMentions.ts b/client/src/hooks/Input/useMentions.ts index bd9b956104b..3cb616acae4 100644 --- a/client/src/hooks/Input/useMentions.ts +++ b/client/src/hooks/Input/useMentions.ts @@ -5,15 +5,42 @@ import { useGetEndpointsQuery, } from 'librechat-data-provider/react-query'; import { getConfigDefaults, EModelEndpoint, alternateName } from 'librechat-data-provider'; -import type { Assistant } from 'librechat-data-provider'; -import { useGetPresetsQuery, useListAssistantsQuery } from '~/data-provider'; +import type { AssistantsEndpoint, TAssistantsMap, TEndpointsConfig } from 'librechat-data-provider'; +import type { MentionOption } from '~/common'; +import useAssistantListMap from '~/hooks/Assistants/useAssistantListMap'; import { mapEndpoints, getPresetTitle } from '~/utils'; import { EndpointIcon } from '~/components/Endpoints'; +import { useGetPresetsQuery } from '~/data-provider'; import useSelectMention from './useSelectMention'; const defaultInterface = getConfigDefaults().interface; -export default function useMentions({ assistantMap }: { assistantMap: Record }) { +const assistantMapFn = + ({ + endpoint, + assistantMap, + endpointsConfig, + }: { + endpoint: AssistantsEndpoint; + assistantMap: TAssistantsMap; + endpointsConfig: TEndpointsConfig; + }) => + ({ id, name, description }) => ({ + type: endpoint, + label: name ?? '', + value: id, + description: description ?? '', + icon: EndpointIcon({ + conversation: { assistant_id: id, endpoint }, + containerClassName: 'shadow-stroke overflow-hidden rounded-full', + endpointsConfig: endpointsConfig, + context: 'menu-item', + assistantMap, + size: 20, + }), + }); + +export default function useMentions({ assistantMap }: { assistantMap: TAssistantsMap }) { const { data: presets } = useGetPresetsQuery(); const { data: modelsConfig } = useGetModelsQuery(); const { data: startupConfig } = useGetStartupConfig(); @@ -21,30 +48,43 @@ export default function useMentions({ assistantMap }: { assistantMap: Record - res.data - .map(({ id, name, description }) => ({ - type: 'assistant', - label: name ?? '', - value: id, - description: description ?? '', - icon: EndpointIcon({ - conversation: { assistant_id: id, endpoint: EModelEndpoint.assistants }, - containerClassName: 'shadow-stroke overflow-hidden rounded-full', - endpointsConfig: endpointsConfig, - context: 'menu-item', + const listMap = useAssistantListMap((res) => + res.data.map(({ id, name, description }) => ({ + id, + name, + description, + })), + ); + const assistantListMap = useMemo( + () => ({ + [EModelEndpoint.assistants]: listMap[EModelEndpoint.assistants] + ?.map( + assistantMapFn({ + endpoint: EModelEndpoint.assistants, assistantMap, - size: 20, + endpointsConfig, }), - })) - .filter(Boolean), - }); + ) + ?.filter(Boolean), + [EModelEndpoint.azureAssistants]: listMap[EModelEndpoint.azureAssistants] + ?.map( + assistantMapFn({ + endpoint: EModelEndpoint.azureAssistants, + assistantMap, + endpointsConfig, + }), + ) + ?.filter(Boolean), + }), + [listMap, assistantMap, endpointsConfig], + ); + const modelSpecs = useMemo(() => startupConfig?.modelSpecs?.list ?? [], [startupConfig]); const interfaceConfig = useMemo( () => startupConfig?.interface ?? defaultInterface, [startupConfig], ); + const { onSelectMention } = useSelectMention({ modelSpecs, endpointsConfig, @@ -52,7 +92,7 @@ export default function useMentions({ assistantMap }: { assistantMap: Record { + const options: MentionOption[] = useMemo(() => { const mentions = [ ...(modelSpecs?.length > 0 ? modelSpecs : []).map((modelSpec) => ({ value: modelSpec.name, @@ -67,12 +107,12 @@ export default function useMentions({ assistantMap }: { assistantMap: Record ({ value: endpoint, label: alternateName[endpoint] ?? endpoint ?? '', - type: 'endpoint', + type: 'endpoint' as const, icon: EndpointIcon({ conversation: { endpoint }, endpointsConfig, @@ -80,7 +120,12 @@ export default function useMentions({ assistantMap }: { assistantMap: Record ({ value: preset.presetId ?? `preset-${index}`, label: preset.title ?? preset.modelLabel ?? preset.chatGptLabel ?? '', @@ -93,7 +138,7 @@ export default function useMentions({ assistantMap }: { assistantMap: Record; + assistantMap: TAssistantsMap; }) { const { conversation } = useChatContext(); const { newConversation } = useNewConvo(); @@ -194,10 +194,10 @@ export default function useSelectMention({ onSelectEndpoint(key, { model: option.label }); } else if (option.type === 'endpoint') { onSelectEndpoint(key); - } else if (option.type === 'assistant') { - onSelectEndpoint(EModelEndpoint.assistants, { + } else if (isAssistantsEndpoint(option.type)) { + onSelectEndpoint(option.type, { assistant_id: key, - model: assistantMap?.[key]?.model ?? '', + model: assistantMap?.[option.type]?.[key]?.model ?? '', }); } }, diff --git a/client/src/hooks/Input/useTextarea.ts b/client/src/hooks/Input/useTextarea.ts index 8c39e76ddee..9e84d076e92 100644 --- a/client/src/hooks/Input/useTextarea.ts +++ b/client/src/hooks/Input/useTextarea.ts @@ -1,6 +1,6 @@ import debounce from 'lodash/debounce'; import { useEffect, useRef, useCallback } from 'react'; -import { EModelEndpoint } from 'librechat-data-provider'; +import { isAssistantsEndpoint } from 'librechat-data-provider'; import { useRecoilValue, useSetRecoilState } from 'recoil'; import type { TEndpointOption } from 'librechat-data-provider'; import type { KeyboardEvent } from 'react'; @@ -45,10 +45,11 @@ export default function useTextarea({ const { conversationId, jailbreak, endpoint = '', assistant_id } = conversation || {}; const isNotAppendable = ((latestMessage?.unfinished && !isSubmitting) || latestMessage?.error) && - endpoint !== EModelEndpoint.assistants; + !isAssistantsEndpoint(endpoint); // && (conversationId?.length ?? 0) > 6; // also ensures that we don't show the wrong placeholder - const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? '']; + const assistant = + isAssistantsEndpoint(endpoint) && assistantMap?.[endpoint ?? '']?.[assistant_id ?? '']; const assistantName = (assistant && assistant?.name) || ''; // auto focus to input, when enter a conversation. @@ -86,9 +87,11 @@ export default function useTextarea({ if (disabled) { return localize('com_endpoint_config_placeholder'); } + const currentEndpoint = conversation?.endpoint ?? ''; + const currentAssistantId = conversation?.assistant_id ?? ''; if ( - conversation?.endpoint === EModelEndpoint.assistants && - (!conversation?.assistant_id || !assistantMap?.[conversation?.assistant_id ?? '']) + isAssistantsEndpoint(currentEndpoint) && + (!currentAssistantId || !assistantMap?.[currentEndpoint]?.[currentAssistantId ?? '']) ) { return localize('com_endpoint_assistant_placeholder'); } @@ -97,10 +100,9 @@ export default function useTextarea({ return localize('com_endpoint_message_not_appendable'); } - const sender = - conversation?.endpoint === EModelEndpoint.assistants - ? getAssistantName({ name: assistantName, localize }) - : getSender(conversation as TEndpointOption); + const sender = isAssistantsEndpoint(currentEndpoint) + ? getAssistantName({ name: assistantName, localize }) + : getSender(conversation as TEndpointOption); return `${localize('com_endpoint_message')} ${sender ? sender : 'ChatGPT'}…`; }; diff --git a/client/src/hooks/Messages/useMessageHelpers.tsx b/client/src/hooks/Messages/useMessageHelpers.tsx index f3e42ac4cd3..c8b86522afd 100644 --- a/client/src/hooks/Messages/useMessageHelpers.tsx +++ b/client/src/hooks/Messages/useMessageHelpers.tsx @@ -1,5 +1,5 @@ import { useEffect, useRef, useCallback } from 'react'; -import { EModelEndpoint } from 'librechat-data-provider'; +import { isAssistantsEndpoint } from 'librechat-data-provider'; import type { TMessageProps } from '~/common'; import { useChatContext, useAssistantsMapContext } from '~/Providers'; import useCopyToClipboard from './useCopyToClipboard'; @@ -55,7 +55,8 @@ export default function useMessageHelpers(props: TMessageProps) { }, [isSubmitting, setAbortScroll]); const assistant = - conversation?.endpoint === EModelEndpoint.assistants && assistantMap?.[message?.model ?? '']; + isAssistantsEndpoint(conversation?.endpoint) && + assistantMap?.[conversation?.endpoint ?? '']?.[message?.model ?? '']; const regenerateMessage = () => { if ((isSubmitting && isCreatedByUser) || !message) { diff --git a/client/src/hooks/Messages/useProgress.ts b/client/src/hooks/Messages/useProgress.ts index 0be816d304c..4a1e254bf8b 100644 --- a/client/src/hooks/Messages/useProgress.ts +++ b/client/src/hooks/Messages/useProgress.ts @@ -1,35 +1,44 @@ -import { useState, useEffect } from 'react'; +import { useState, useEffect, useMemo, useCallback } from 'react'; export default function useProgress(initialProgress = 0.01, increment = 0.007, fileSize?: number) { - const calculateIncrement = (size?: number) => { - const baseRate = 0.05; - const minRate = 0.002; - const sizeMB = size ? size / (1024 * 1024) : 0; + const calculateIncrement = useCallback( + (size?: number) => { + const baseRate = 0.05; + const minRate = 0.002; + const sizeMB = size ? size / (1024 * 1024) : 0; - if (!size) { - return increment; - } + if (!size) { + return increment; + } - if (sizeMB <= 1) { - return baseRate * 2; - } else { - return Math.max(baseRate / Math.sqrt(sizeMB), minRate); - } - }; + if (sizeMB <= 1) { + return baseRate * 2; + } else { + return Math.max(baseRate / Math.sqrt(sizeMB), minRate); + } + }, + [increment], + ); - const incrementValue = calculateIncrement(fileSize); + const incrementValue = useMemo( + () => calculateIncrement(fileSize), + [fileSize, calculateIncrement], + ); const [progress, setProgress] = useState(initialProgress); - const getDynamicIncrement = (currentProgress: number) => { - if (!fileSize) { - return incrementValue; - } - if (currentProgress < 0.7) { - return incrementValue; - } else { - return Math.max(0.0005, incrementValue * (1 - currentProgress)); - } - }; + const getDynamicIncrement = useCallback( + (currentProgress: number) => { + if (!fileSize) { + return incrementValue; + } + if (currentProgress < 0.7) { + return incrementValue; + } else { + return Math.max(0.0005, incrementValue * (1 - currentProgress)); + } + }, + [incrementValue, fileSize], + ); useEffect(() => { let timeout: ReturnType; @@ -58,7 +67,7 @@ export default function useProgress(initialProgress = 0.01, increment = 0.007, f clearInterval(timer); clearTimeout(timeout); }; - }, [progress, initialProgress, incrementValue, fileSize]); + }, [progress, initialProgress, incrementValue, fileSize, getDynamicIncrement]); return progress; } diff --git a/client/src/hooks/Nav/useSideNavLinks.ts b/client/src/hooks/Nav/useSideNavLinks.ts index 4c1a8265075..2f31bdfcd73 100644 --- a/client/src/hooks/Nav/useSideNavLinks.ts +++ b/client/src/hooks/Nav/useSideNavLinks.ts @@ -3,7 +3,7 @@ import { ArrowRightToLine, // Settings2, } from 'lucide-react'; -import { EModelEndpoint } from 'librechat-data-provider'; +import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider'; import type { TConfig, TInterfaceConfig } from 'librechat-data-provider'; import type { NavLink } from '~/common'; import PanelSwitch from '~/components/SidePanel/Builder/PanelSwitch'; @@ -26,7 +26,7 @@ export default function useSideNavLinks({ }) { const Links = useMemo(() => { const links: NavLink[] = []; - // if (endpoint !== EModelEndpoint.assistants) { + // if (!isAssistantsEndpoint(endpoint)) { // links.push({ // title: 'com_sidepanel_parameters', // label: '', @@ -36,7 +36,7 @@ export default function useSideNavLinks({ // }); // } if ( - endpoint === EModelEndpoint.assistants && + isAssistantsEndpoint(endpoint) && assistants && assistants.disableBuilder !== true && keyProvided && diff --git a/client/src/hooks/SSE/useSSE.ts b/client/src/hooks/SSE/useSSE.ts index f12e0996703..9e8c34d09ee 100644 --- a/client/src/hooks/SSE/useSSE.ts +++ b/client/src/hooks/SSE/useSSE.ts @@ -12,10 +12,10 @@ import { createPayload, tPresetSchema, tMessageSchema, - EModelEndpoint, LocalStorageKeys, tConvoUpdateSchema, removeNullishValues, + isAssistantsEndpoint, } from 'librechat-data-provider'; import { useGetUserBalance, useGetStartupConfig } from 'librechat-data-provider/react-query'; import type { @@ -441,7 +441,7 @@ export default function useSSE(submission: TSubmission | null, index = 0) { Authorization: `Bearer ${token}`, }, body: JSON.stringify({ - abortKey: _endpoint === EModelEndpoint.assistants ? runAbortKey : conversationId, + abortKey: isAssistantsEndpoint(_endpoint) ? runAbortKey : conversationId, endpoint, }), }); @@ -513,7 +513,7 @@ export default function useSSE(submission: TSubmission | null, index = 0) { const payloadData = createPayload(submission); let { payload } = payloadData; - if (payload.endpoint === EModelEndpoint.assistants) { + if (isAssistantsEndpoint(payload.endpoint)) { payload = removeNullishValues(payload); } diff --git a/client/src/hooks/index.ts b/client/src/hooks/index.ts index eeeef0164da..8925a379128 100644 --- a/client/src/hooks/index.ts +++ b/client/src/hooks/index.ts @@ -18,10 +18,8 @@ export { default as useNewConvo } from './useNewConvo'; export { default as useLocalize } from './useLocalize'; export { default as useMediaQuery } from './useMediaQuery'; export { default as useChatHelpers } from './useChatHelpers'; -export { default as useGenerations } from './useGenerations'; export { default as useScrollToRef } from './useScrollToRef'; export { default as useLocalStorage } from './useLocalStorage'; export { default as useDelayedRender } from './useDelayedRender'; export { default as useOnClickOutside } from './useOnClickOutside'; -export { default as useOriginNavigate } from './useOriginNavigate'; export { default as useGenerationsByLatest } from './useGenerationsByLatest'; diff --git a/client/src/hooks/useChatHelpers.ts b/client/src/hooks/useChatHelpers.ts index 0a3d42b957e..1a9a7b72d14 100644 --- a/client/src/hooks/useChatHelpers.ts +++ b/client/src/hooks/useChatHelpers.ts @@ -3,10 +3,10 @@ import { useCallback, useState } from 'react'; import { useQueryClient } from '@tanstack/react-query'; import { Constants, - EModelEndpoint, QueryKeys, - parseCompactConvo, ContentTypes, + parseCompactConvo, + isAssistantsEndpoint, } from 'librechat-data-provider'; import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil'; import { useGetMessagesByConvoId } from 'librechat-data-provider/react-query'; @@ -215,7 +215,7 @@ export default function useChatHelpers(index = 0, paramId: string | undefined) { error: false, }; - if (endpoint === EModelEndpoint.assistants) { + if (isAssistantsEndpoint(endpoint)) { initialResponse.model = conversation?.assistant_id ?? ''; initialResponse.text = ''; initialResponse.content = [ diff --git a/client/src/hooks/useGenerations.ts b/client/src/hooks/useGenerations.ts deleted file mode 100644 index f0744256ab1..00000000000 --- a/client/src/hooks/useGenerations.ts +++ /dev/null @@ -1,68 +0,0 @@ -import type { TMessage } from 'librechat-data-provider'; -import { EModelEndpoint } from 'librechat-data-provider'; -import { useRecoilValue } from 'recoil'; -import store from '~/store'; - -type TUseGenerations = { - endpoint?: string; - message: TMessage; - isSubmitting: boolean; - isEditing?: boolean; - latestMessage?: TMessage | null; -}; - -export default function useGenerations({ - endpoint, - message, - isSubmitting, - isEditing = false, - latestMessage: _latestMessage, -}: TUseGenerations) { - const latestMessage = useRecoilValue(store.latestMessage) ?? _latestMessage; - - const { error, messageId, searchResult, finish_reason, isCreatedByUser } = message ?? {}; - const isEditableEndpoint = !![ - EModelEndpoint.openAI, - EModelEndpoint.google, - EModelEndpoint.assistants, - EModelEndpoint.anthropic, - EModelEndpoint.gptPlugins, - EModelEndpoint.azureOpenAI, - ].find((e) => e === endpoint); - - const continueSupported = - latestMessage?.messageId === messageId && - finish_reason && - finish_reason !== 'stop' && - !isEditing && - !searchResult && - isEditableEndpoint; - - const branchingSupported = - // 5/21/23: Bing is allowing editing and Message regenerating - !![ - EModelEndpoint.azureOpenAI, - EModelEndpoint.openAI, - EModelEndpoint.chatGPTBrowser, - EModelEndpoint.google, - EModelEndpoint.bingAI, - EModelEndpoint.gptPlugins, - EModelEndpoint.anthropic, - ].find((e) => e === endpoint); - - const regenerateEnabled = - !isCreatedByUser && !searchResult && !isEditing && !isSubmitting && branchingSupported; - - const hideEditButton = - isSubmitting || - error || - searchResult || - !branchingSupported || - (!isEditableEndpoint && !isCreatedByUser); - - return { - continueSupported, - regenerateEnabled, - hideEditButton, - }; -} diff --git a/client/src/hooks/useGenerationsByLatest.ts b/client/src/hooks/useGenerationsByLatest.ts index 361370e509a..8dc37062f26 100644 --- a/client/src/hooks/useGenerationsByLatest.ts +++ b/client/src/hooks/useGenerationsByLatest.ts @@ -1,5 +1,5 @@ import type { TMessage } from 'librechat-data-provider'; -import { EModelEndpoint } from 'librechat-data-provider'; +import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider'; type TUseGenerations = { endpoint?: string; @@ -21,7 +21,6 @@ export default function useGenerationsByLatest({ EModelEndpoint.openAI, EModelEndpoint.custom, EModelEndpoint.google, - EModelEndpoint.assistants, EModelEndpoint.anthropic, EModelEndpoint.gptPlugins, EModelEndpoint.azureOpenAI, @@ -58,12 +57,13 @@ export default function useGenerationsByLatest({ !branchingSupported || (!isEditableEndpoint && !isCreatedByUser); - const forkingSupported = endpoint !== EModelEndpoint.assistants && !searchResult; + const forkingSupported = !isAssistantsEndpoint(endpoint) && !searchResult; return { forkingSupported, continueSupported, regenerateEnabled, + isEditableEndpoint, hideEditButton, }; } diff --git a/client/src/hooks/useNewConvo.ts b/client/src/hooks/useNewConvo.ts index 6923240e281..6b47b545d5a 100644 --- a/client/src/hooks/useNewConvo.ts +++ b/client/src/hooks/useNewConvo.ts @@ -4,12 +4,8 @@ import { useGetStartupConfig, useGetEndpointsQuery, } from 'librechat-data-provider/react-query'; -import { - FileSources, - EModelEndpoint, - LocalStorageKeys, - defaultOrderQuery, -} from 'librechat-data-provider'; +import { useNavigate } from 'react-router-dom'; +import { FileSources, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider'; import { useRecoilState, useRecoilValue, @@ -24,6 +20,7 @@ import type { TConversation, TEndpointsConfig, } from 'librechat-data-provider'; +import type { AssistantListItem } from '~/common'; import { getEndpointField, buildDefaultConvo, @@ -32,13 +29,14 @@ import { getModelSpecIconURL, updateLastSelectedModel, } from '~/utils'; -import { useDeleteFilesMutation, useListAssistantsQuery } from '~/data-provider'; -import useOriginNavigate from './useOriginNavigate'; +import useAssistantListMap from './Assistants/useAssistantListMap'; +import { useDeleteFilesMutation } from '~/data-provider'; + import { mainTextareaId } from '~/common'; import store from '~/store'; const useNewConvo = (index = 0) => { - const navigate = useOriginNavigate(); + const navigate = useNavigate(); const { data: startupConfig } = useGetStartupConfig(); const defaultPreset = useRecoilValue(store.defaultPreset); const { setConversation } = store.useCreateConversationAtom(index); @@ -48,11 +46,7 @@ const useNewConvo = (index = 0) => { const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery(); const modelsQuery = useGetModelsQuery(); const timeoutIdRef = useRef(); - - const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, { - select: (res) => - res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })), - }); + const assistantsListMap = useAssistantListMap(); const { mutateAsync } = useDeleteFilesMutation({ onSuccess: () => { @@ -100,12 +94,21 @@ const useNewConvo = (index = 0) => { conversation.endpointType = undefined; } - const isAssistantEndpoint = defaultEndpoint === EModelEndpoint.assistants; + const isAssistantEndpoint = isAssistantsEndpoint(defaultEndpoint); + const assistants: AssistantListItem[] = assistantsListMap[defaultEndpoint] ?? []; + + if ( + conversation.assistant_id && + !assistantsListMap[defaultEndpoint]?.[conversation.assistant_id] + ) { + conversation.assistant_id = undefined; + } if (!conversation.assistant_id && isAssistantEndpoint) { conversation.assistant_id = - localStorage.getItem(`${LocalStorageKeys.ASST_ID_PREFIX}${index}`) ?? - assistants[0]?.id; + localStorage.getItem( + `${LocalStorageKeys.ASST_ID_PREFIX}${index}${defaultEndpoint}`, + ) ?? assistants[0]?.id; } if ( @@ -116,7 +119,7 @@ const useNewConvo = (index = 0) => { const assistant = assistants.find((asst) => asst.id === conversation.assistant_id); conversation.model = assistant?.model; updateLastSelectedModel({ - endpoint: EModelEndpoint.assistants, + endpoint: defaultEndpoint, model: conversation.model, }); } @@ -145,7 +148,7 @@ const useNewConvo = (index = 0) => { if (appTitle) { document.title = appTitle; } - navigate('new'); + navigate('/c/new'); } clearTimeout(timeoutIdRef.current); @@ -156,7 +159,7 @@ const useNewConvo = (index = 0) => { } }, 150); }, - [endpointsConfig, defaultPreset, assistants, modelsQuery.data], + [endpointsConfig, defaultPreset, assistantsListMap, modelsQuery.data], ); const newConversation = useCallback( diff --git a/client/src/hooks/useOriginNavigate.ts b/client/src/hooks/useOriginNavigate.ts deleted file mode 100644 index 9c5ca68e504..00000000000 --- a/client/src/hooks/useOriginNavigate.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { useNavigate, useLocation } from 'react-router-dom'; - -const useOriginNavigate = () => { - const _navigate = useNavigate(); - const location = useLocation(); - - const navigate = (url?: string | null, opts = {}) => { - if (!url) { - return; - } - const path = location.pathname.match(/^\/[^/]+\//); - _navigate(`${path ? path[0] : '/c/'}${url}`, opts); - }; - - return navigate; -}; - -export default useOriginNavigate; diff --git a/client/src/localization/languages/Ar.ts b/client/src/localization/languages/Ar.ts index 8307181b26e..a755fa7ab8f 100644 --- a/client/src/localization/languages/Ar.ts +++ b/client/src/localization/languages/Ar.ts @@ -297,6 +297,15 @@ export default { com_nav_setting_general: 'عام', com_nav_setting_data: 'تحكم في البيانات', /* The following are AI translated */ + com_assistants_file_search: 'بحث الملفات', + com_assistants_file_search_info: + 'لا يتم دعم إرفاق مخازن الكتل الرقمية لميزة البحث في الملفات بعد. يمكنك إرفاقها من ملعب المزود أو إرفاق ملفات إلى الرسائل للبحث في الملفات على أساس المحادثة.', + com_assistants_non_retrieval_model: + 'البحث في الملفات غير مُمكّن على هذا النموذج. يرجى تحديد نموذج آخر.', + com_ui_attach_error_openai: 'لا يمكن إرفاق ملفات المساعد إلى نقاط نهائية أخرى', + com_ui_attach_warn_endpoint: 'قد يتم تجاهل الملفات غير المساعدة دون وجود أداة متوافقة', + com_ui_assistant_deleted: 'تم حذف المساعد بنجاح', + com_ui_assistant_delete_error: 'حدث خطأ أثناء حذف المساعد', com_ui_copied: 'تم النسخ', com_ui_copy_code: 'نسخ الكود', com_ui_copy_link: 'نسخ الرابط', @@ -1636,6 +1645,36 @@ export const comparisons = { english: 'Data controls', translated: 'تحكم في البيانات', }, + com_assistants_file_search: { + english: 'File Search', + translated: 'بحث الملفات', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + 'لا يتم دعم إرفاق مخازن الكتل الرقمية لميزة البحث في الملفات بعد. يمكنك إرفاقها من ملعب المزود أو إرفاق ملفات إلى الرسائل للبحث في الملفات على أساس المحادثة.', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: 'البحث في الملفات غير مُمكّن على هذا النموذج. يرجى تحديد نموذج آخر.', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: 'لا يمكن إرفاق ملفات المساعد إلى نقاط نهائية أخرى', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: 'قد يتم تجاهل الملفات غير المساعدة دون وجود أداة متوافقة', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: 'تم حذف المساعد بنجاح', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: 'حدث خطأ أثناء حذف المساعد', + }, com_ui_copied: { english: 'Copied!', translated: 'تم النسخ', diff --git a/client/src/localization/languages/De.ts b/client/src/localization/languages/De.ts index 08e023d7ccd..a548fcb3d00 100644 --- a/client/src/localization/languages/De.ts +++ b/client/src/localization/languages/De.ts @@ -481,6 +481,16 @@ export default { com_nav_setting_account: 'Konto', com_nav_language: 'Sprache', /* The following are AI Translated */ + com_assistants_file_search: 'Dateisuche', + com_assistants_file_search_info: + 'Das Anhängen von Vektorspeichern für die Dateisuche wird derzeit noch nicht unterstützt. Du kannst sie im Provider Playground anhängen oder Dateien für die Dateisuche pro Thread anhängen.', + com_assistants_non_retrieval_model: + 'Die Dateisuche ist für dieses Modell nicht aktiviert. Bitte wähle ein anderes Modell aus.', + com_ui_attach_error_openai: 'Assistent-Dateien können nicht an andere Endpunkte angehängt werden', + com_ui_attach_warn_endpoint: + 'Nicht-Assistent-Dateien könnten ohne ein kompatibles Werkzeug ignoriert werden', + com_ui_assistant_deleted: 'Assistent erfolgreich gelöscht', + com_ui_assistant_delete_error: 'Beim Löschen des Assistenten ist ein Fehler aufgetreten.', com_ui_copied: 'Kopiert', com_ui_copy_code: 'Code kopieren', com_ui_copy_link: 'Link kopieren', @@ -2305,6 +2315,37 @@ export const comparisons = { english: 'Language', translated: 'Sprache', }, + com_assistants_file_search: { + english: 'File Search', + translated: 'Dateisuche', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + 'Das Anhängen von Vektorspeichern für die Dateisuche wird derzeit noch nicht unterstützt. Du kannst sie im Provider Playground anhängen oder Dateien für die Dateisuche pro Thread anhängen.', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: + 'Die Dateisuche ist für dieses Modell nicht aktiviert. Bitte wähle ein anderes Modell aus.', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: 'Assistent-Dateien können nicht an andere Endpunkte angehängt werden', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: 'Nicht-Assistent-Dateien könnten ohne ein kompatibles Werkzeug ignoriert werden', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: 'Assistent erfolgreich gelöscht', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: 'Beim Löschen des Assistenten ist ein Fehler aufgetreten.', + }, com_ui_copied: { english: 'Copied!', translated: 'Kopiert', diff --git a/client/src/localization/languages/Eng.ts b/client/src/localization/languages/Eng.ts index f1b3374a25b..00d63238560 100644 --- a/client/src/localization/languages/Eng.ts +++ b/client/src/localization/languages/Eng.ts @@ -20,6 +20,9 @@ export default { com_sidepanel_attach_files: 'Attach Files', com_sidepanel_manage_files: 'Manage Files', com_assistants_capabilities: 'Capabilities', + com_assistants_file_search: 'File Search', + com_assistants_file_search_info: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', com_assistants_knowledge: 'Knowledge', com_assistants_knowledge_info: 'If you upload files under Knowledge, conversations with your Assistant may include file contents.', @@ -35,6 +38,8 @@ export default { com_assistants_actions: 'Actions', com_assistants_add_tools: 'Add Tools', com_assistants_add_actions: 'Add Actions', + com_assistants_non_retrieval_model: + 'File search is not enabled on this model. Please select another model.', com_assistants_available_actions: 'Available Actions', com_assistants_running_action: 'Running action', com_assistants_completed_action: 'Talked to {0}', @@ -73,6 +78,8 @@ export default { com_ui_field_required: 'This field is required', com_ui_download_error: 'Error downloading file. The file may have been deleted.', com_ui_attach_error_type: 'Unsupported file type for endpoint:', + com_ui_attach_error_openai: 'Cannot attach Assistant files to other endpoints', + com_ui_attach_warn_endpoint: 'Non-Assistant files may be ignored without a compatible tool', com_ui_attach_error_size: 'File size limit exceeded for endpoint:', com_ui_attach_error: 'Cannot attach file. Create or select a conversation, or try refreshing the page.', @@ -196,6 +203,8 @@ export default { com_ui_result: 'Result', com_ui_image_gen: 'Image Gen', com_ui_assistant: 'Assistant', + com_ui_assistant_deleted: 'Successfully deleted assistant', + com_ui_assistant_delete_error: 'There was an error deleting the assistant', com_ui_assistants: 'Assistants', com_ui_attachment: 'Attachment', com_ui_assistants_output: 'Assistants Output', diff --git a/client/src/localization/languages/Es.ts b/client/src/localization/languages/Es.ts index 6b258b4337b..b708d1efa43 100644 --- a/client/src/localization/languages/Es.ts +++ b/client/src/localization/languages/Es.ts @@ -475,6 +475,17 @@ export default { com_nav_lang_auto: 'Detección automática', com_nav_lang_spanish: 'Español', /* The following are AI Translated */ + com_assistants_file_search: 'Búsqueda de Archivos', + com_assistants_file_search_info: + 'Adjuntar almacenes vectoriales para la Búsqueda de Archivos aún no está soportado. Puede adjuntarlos desde el Área de Pruebas del Proveedor o adjuntar archivos a los mensajes para la búsqueda de archivos en una conversación específica.', + com_assistants_non_retrieval_model: + 'La búsqueda de archivos no está habilitada en este modelo. Por favor, seleccione otro modelo.', + com_ui_attach_error_openai: + 'No se pueden adjuntar archivos del Asistente a otros puntos de conexión', + com_ui_attach_warn_endpoint: + 'Es posible que los archivos no compatibles con la herramienta sean ignorados', + com_ui_assistant_deleted: 'Asistente eliminado con éxito', + com_ui_assistant_delete_error: 'Hubo un error al eliminar el asistente', com_ui_copied: '¡Copiado!', com_ui_copy_code: 'Copiar código', com_ui_copy_link: 'Copiar enlace', @@ -2286,6 +2297,37 @@ export const comparisons = { english: 'Español', translated: 'Español', }, + com_assistants_file_search: { + english: 'File Search', + translated: 'Búsqueda de Archivos', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + 'Adjuntar almacenes vectoriales para la Búsqueda de Archivos aún no está soportado. Puede adjuntarlos desde el Área de Pruebas del Proveedor o adjuntar archivos a los mensajes para la búsqueda de archivos en una conversación específica.', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: + 'La búsqueda de archivos no está habilitada en este modelo. Por favor, seleccione otro modelo.', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: 'No se pueden adjuntar archivos del Asistente a otros puntos de conexión', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: 'Es posible que los archivos no compatibles con la herramienta sean ignorados', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: 'Asistente eliminado con éxito', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: 'Hubo un error al eliminar el asistente', + }, com_ui_copied: { english: 'Copied!', translated: '¡Copiado!', diff --git a/client/src/localization/languages/Fr.ts b/client/src/localization/languages/Fr.ts index fb193547d91..1eb2b6c8eaf 100644 --- a/client/src/localization/languages/Fr.ts +++ b/client/src/localization/languages/Fr.ts @@ -364,6 +364,16 @@ export default { com_nav_setting_data: 'Contrôles des données', com_nav_setting_account: 'Compte', /* The following are AI Translated */ + com_assistants_file_search: 'Recherche de fichiers', + com_assistants_file_search_info: + 'L\'ajout de vecteurs de stockage pour la recherche de fichiers n\'est pas encore pris en charge. Vous pouvez les ajouter depuis le terrain de jeu du fournisseur ou joindre des fichiers aux messages pour une recherche de fichiers au niveau du fil de discussion.', + com_assistants_non_retrieval_model: + 'La recherche de fichiers n\'est pas activée pour ce modèle. Veuillez sélectionner un autre modèle.', + com_ui_attach_error_openai: + 'Impossible de joindre les fichiers de l\'Assistant à d\'autres points d\'accès', + com_ui_attach_warn_endpoint: 'Les fichiers non compatibles avec l\'outil peuvent être ignorés', + com_ui_assistant_deleted: 'Assistant supprimé avec succès', + com_ui_assistant_delete_error: 'Une erreur s\'est produite lors de la suppression de l\'assistant.', com_ui_copied: 'Copié !', com_ui_copy_code: 'Copier le code', com_ui_copy_link: 'Copier le lien', @@ -1863,6 +1873,37 @@ export const comparisons = { english: 'Account', translated: 'Compte', }, + com_assistants_file_search: { + english: 'File Search', + translated: 'Recherche de fichiers', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + 'L\'ajout de vecteurs de stockage pour la recherche de fichiers n\'est pas encore pris en charge. Vous pouvez les ajouter depuis le terrain de jeu du fournisseur ou joindre des fichiers aux messages pour une recherche de fichiers au niveau du fil de discussion.', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: + 'La recherche de fichiers n\'est pas activée pour ce modèle. Veuillez sélectionner un autre modèle.', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: 'Impossible de joindre les fichiers de l\'Assistant à d\'autres points d\'accès', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: 'Les fichiers non compatibles avec l\'outil peuvent être ignorés', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: 'Assistant supprimé avec succès', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: 'Une erreur s\'est produite lors de la suppression de l\'assistant.', + }, com_ui_copied: { english: 'Copied!', translated: 'Copié !', diff --git a/client/src/localization/languages/It.ts b/client/src/localization/languages/It.ts index c5ea45bafe7..73d79050996 100644 --- a/client/src/localization/languages/It.ts +++ b/client/src/localization/languages/It.ts @@ -525,6 +525,16 @@ export default { com_nav_setting_data: 'Controlli dati', com_nav_setting_account: 'Account', /* The following are AI Translated */ + com_assistants_file_search: 'Ricerca File', + com_assistants_file_search_info: + 'L\'aggiunta di archivi vettoriali per la Ricerca File non è ancora supportata. Puoi aggiungerli dal Provider Playground o allegare file ai messaggi per la ricerca file su base di thread.', + com_assistants_non_retrieval_model: + 'La ricerca di file non è abilitata su questo modello. Seleziona un altro modello.', + com_ui_attach_error_openai: 'Non è possibile allegare file dell\'Assistente ad altri endpoint', + com_ui_attach_warn_endpoint: + 'Attenzione: i file non compatibili con lo strumento potrebbero essere ignorati', + com_ui_assistant_deleted: 'Assistente eliminato con successo', + com_ui_assistant_delete_error: 'Si è verificato un errore durante l\'eliminazione dell\'assistente', com_ui_copied: 'Copiato!', com_ui_copy_code: 'Copia codice', com_ui_copy_link: 'Copia link', @@ -2443,6 +2453,36 @@ export const comparisons = { english: 'Account', translated: 'Account', }, + com_assistants_file_search: { + english: 'File Search', + translated: 'Ricerca File', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + 'L\'aggiunta di archivi vettoriali per la Ricerca File non è ancora supportata. Puoi aggiungerli dal Provider Playground o allegare file ai messaggi per la ricerca file su base di thread.', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: 'La ricerca di file non è abilitata su questo modello. Seleziona un altro modello.', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: 'Non è possibile allegare file dell\'Assistente ad altri endpoint', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: 'Attenzione: i file non compatibili con lo strumento potrebbero essere ignorati', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: 'Assistente eliminato con successo', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: 'Si è verificato un errore durante l\'eliminazione dell\'assistente', + }, com_ui_copied: { english: 'Copied!', translated: 'Copiato!', diff --git a/client/src/localization/languages/Jp.ts b/client/src/localization/languages/Jp.ts index f99d2d8f478..bd73c541941 100644 --- a/client/src/localization/languages/Jp.ts +++ b/client/src/localization/languages/Jp.ts @@ -473,6 +473,16 @@ export default { com_nav_setting_data: 'データ管理', com_nav_setting_account: 'アカウント', /* The following are AI translated */ + com_assistants_file_search: 'ファイル検索', + com_assistants_file_search_info: + 'ファイル検索用のベクトル ストアを添付することはまだサポートされていません。Provider Playgroundからそれらを添付するか、スレッド単位でメッセージにファイルを添付してファイル検索を行うことができます。', + com_assistants_non_retrieval_model: + 'このモデルではファイル検索機能は有効になっていません。別のモデルを選択してください。', + com_ui_attach_error_openai: '他のエンドポイントにAssistantファイルを添付することはできません', + com_ui_attach_warn_endpoint: + '互換性のあるツールがない場合、非アシスタントのファイルは無視される可能性があります', + com_ui_assistant_deleted: 'アシスタントが正常に削除されました', + com_ui_assistant_delete_error: 'アシスタントの削除中にエラーが発生しました。', com_ui_copied: 'コピーしました', com_ui_copy_code: 'コードをコピーする', com_ui_copy_link: 'リンクをコピー', @@ -2296,6 +2306,38 @@ export const comparisons = { english: 'Account', translated: 'アカウント', }, + com_assistants_file_search: { + english: 'File Search', + translated: 'ファイル検索', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + 'ファイル検索用のベクトル ストアを添付することはまだサポートされていません。Provider Playgroundからそれらを添付するか、スレッド単位でメッセージにファイルを添付してファイル検索を行うことができます。', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: + 'このモデルではファイル検索機能は有効になっていません。別のモデルを選択してください。', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: '他のエンドポイントにAssistantファイルを添付することはできません', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: + '互換性のあるツールがない場合、非アシスタントのファイルは無視される可能性があります', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: 'アシスタントが正常に削除されました', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: 'アシスタントの削除中にエラーが発生しました。', + }, com_ui_copied: { english: 'Copied!', translated: 'コピーしました', diff --git a/client/src/localization/languages/Ko.ts b/client/src/localization/languages/Ko.ts index ef0aef5f8e9..0c2bc6a95ce 100644 --- a/client/src/localization/languages/Ko.ts +++ b/client/src/localization/languages/Ko.ts @@ -278,6 +278,15 @@ export default { com_nav_setting_general: '일반', com_nav_setting_data: '데이터 제어', /* The following are AI Translated */ + com_assistants_file_search: '파일 검색', + com_assistants_file_search_info: + '파일 검색을 위한 벡터 저장소 연결은 아직 지원되지 않습니다. Provider Playground에서 연결하거나 스레드 기반으로 메시지에 파일을 첨부하여 파일 검색을 할 수 있습니다.', + com_assistants_non_retrieval_model: + '이 모델에서는 파일 검색 기능을 사용할 수 없습니다. 다른 모델을 선택하세요.', + com_ui_attach_error_openai: '어시스턴트 파일을 다른 엔드포인트에 첨부할 수 없습니다.', + com_ui_attach_warn_endpoint: '호환되는 도구가 없으면 비어시스턴트 파일이 무시될 수 있습니다.', + com_ui_assistant_deleted: '어시스턴트가 성공적으로 삭제되었습니다', + com_ui_assistant_delete_error: '어시스턴트 삭제 중 오류가 발생했습니다.', com_ui_copied: '복사됨', com_ui_copy_code: '코드 복사', com_ui_copy_link: '링크 복사', @@ -1581,6 +1590,36 @@ export const comparisons = { english: 'Data controls', translated: '데이터 제어', }, + com_assistants_file_search: { + english: 'File Search', + translated: '파일 검색', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + '파일 검색을 위한 벡터 저장소 연결은 아직 지원되지 않습니다. Provider Playground에서 연결하거나 스레드 기반으로 메시지에 파일을 첨부하여 파일 검색을 할 수 있습니다.', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: '이 모델에서는 파일 검색 기능을 사용할 수 없습니다. 다른 모델을 선택하세요.', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: '어시스턴트 파일을 다른 엔드포인트에 첨부할 수 없습니다.', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: '호환되는 도구가 없으면 비어시스턴트 파일이 무시될 수 있습니다.', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: '어시스턴트가 성공적으로 삭제되었습니다', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: '어시스턴트 삭제 중 오류가 발생했습니다.', + }, com_ui_copied: { english: 'Copied!', translated: '복사됨', diff --git a/client/src/localization/languages/Ru.ts b/client/src/localization/languages/Ru.ts index 0c5b2eabd97..862802c89a2 100644 --- a/client/src/localization/languages/Ru.ts +++ b/client/src/localization/languages/Ru.ts @@ -381,6 +381,16 @@ export default { com_ui_upload_error: 'Произошла ошибка при загрузке вашего файла', com_user_message: 'Вы', /* The following are AI Translated */ + com_assistants_file_search: 'Поиск файлов', + com_assistants_file_search_info: + 'Прикрепление векторных хранилищ для Поиска по файлам пока не поддерживается. Вы можете прикрепить их из Песочницы провайдера или прикрепить файлы к сообщениям для поиска по файлам в отдельных диалогах.', + com_assistants_non_retrieval_model: + 'Поиск по файлам недоступен для этой модели. Пожалуйста, выберите другую модель.', + com_ui_attach_error_openai: 'Невозможно прикрепить файлы ассистента к другим режимам', + com_ui_attach_warn_endpoint: + 'Файлы сторонних приложений могут быть проигнорированы без совместимого плагина', + com_ui_assistant_deleted: 'Ассистент успешно удален', + com_ui_assistant_delete_error: 'Произошла ошибка при удалении ассистента', com_ui_copied: 'Скопировано', com_ui_copy_code: 'Копировать код', com_ui_copy_link: 'Копировать ссылку', @@ -1948,6 +1958,36 @@ export const comparisons = { english: 'You', translated: 'Вы', }, + com_assistants_file_search: { + english: 'File Search', + translated: 'Поиск файлов', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + 'Прикрепление векторных хранилищ для Поиска по файлам пока не поддерживается. Вы можете прикрепить их из Песочницы провайдера или прикрепить файлы к сообщениям для поиска по файлам в отдельных диалогах.', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: 'Поиск по файлам недоступен для этой модели. Пожалуйста, выберите другую модель.', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: 'Невозможно прикрепить файлы ассистента к другим режимам', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: 'Файлы сторонних приложений могут быть проигнорированы без совместимого плагина', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: 'Ассистент успешно удален', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: 'Произошла ошибка при удалении ассистента', + }, com_ui_copied: { english: 'Copied!', translated: 'Скопировано', diff --git a/client/src/localization/languages/Zh.ts b/client/src/localization/languages/Zh.ts index 4df8edc696b..0093c761b26 100644 --- a/client/src/localization/languages/Zh.ts +++ b/client/src/localization/languages/Zh.ts @@ -434,6 +434,14 @@ export default { com_nav_setting_data: '数据管理', com_nav_setting_account: '账户', /* The following are AI Translated */ + com_assistants_file_search: '文件搜索', + com_assistants_file_search_info: + '暂不支持为文件搜索附加向量存储。您可以从提供程序游乐场附加它们,或者在线程基础上为文件搜索附加文件。', + com_assistants_non_retrieval_model: '此模型未启用文件搜索功能。请选择其他模型。', + com_ui_attach_error_openai: '无法将助手文件附加到其他渠道', + com_ui_attach_warn_endpoint: '不兼容的工具可能会忽略非助手文件', + com_ui_assistant_deleted: '助手已成功删除', + com_ui_assistant_delete_error: '删除助手时出错。', com_ui_date_october: '十月', com_ui_date_november: '十一月', com_ui_date_december: '十二月', @@ -2198,6 +2206,36 @@ export const comparisons = { english: 'Account', translated: '账户', }, + com_assistants_file_search: { + english: 'File Search', + translated: '文件搜索', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + '暂不支持为文件搜索附加向量存储。您可以从提供程序游乐场附加它们,或者在线程基础上为文件搜索附加文件。', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: '此模型未启用文件搜索功能。请选择其他模型。', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: '无法将助手文件附加到其他渠道', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: '不兼容的工具可能会忽略非助手文件', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: '助手已成功删除', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: '删除助手时出错。', + }, com_ui_date_october: { english: 'October', translated: '十月', diff --git a/client/src/localization/languages/ZhTraditional.ts b/client/src/localization/languages/ZhTraditional.ts index 23a59dc3112..5150436922e 100644 --- a/client/src/localization/languages/ZhTraditional.ts +++ b/client/src/localization/languages/ZhTraditional.ts @@ -283,6 +283,14 @@ export default { com_nav_setting_general: '一般', com_nav_setting_data: '資料控制', /* The following are AI translated */ + com_assistants_file_search: '檔案搜尋', + com_assistants_file_search_info: + '目前尚不支援為檔案搜尋附加向量存儲。您可以從提供者遊樂場附加它們,或在每個主題的基礎上為檔案搜尋附加檔案。', + com_assistants_non_retrieval_model: '此模型未啟用檔案搜尋功能。請選擇其他模型。', + com_ui_attach_error_openai: '無法將助理檔案附加至其他端點', + com_ui_attach_warn_endpoint: '非相容工具的非助理檔案可能會被忽略', + com_ui_assistant_deleted: '已成功刪除助理', + com_ui_assistant_delete_error: '刪除助理時發生錯誤', com_ui_copied: '已複製!', com_ui_copy_code: '複製程式碼', com_ui_copy_link: '複製連結', @@ -1611,6 +1619,36 @@ export const comparisons = { english: 'Data controls', translated: '資料控制', }, + com_assistants_file_search: { + english: 'File Search', + translated: '檔案搜尋', + }, + com_assistants_file_search_info: { + english: + 'Attaching vector stores for File Search is not yet supported. You can attach them from the Provider Playground or attach files to messages for file search on a thread basis.', + translated: + '目前尚不支援為檔案搜尋附加向量存儲。您可以從提供者遊樂場附加它們,或在每個主題的基礎上為檔案搜尋附加檔案。', + }, + com_assistants_non_retrieval_model: { + english: 'File search is not enabled on this model. Please select another model.', + translated: '此模型未啟用檔案搜尋功能。請選擇其他模型。', + }, + com_ui_attach_error_openai: { + english: 'Cannot attach Assistant files to other endpoints', + translated: '無法將助理檔案附加至其他端點', + }, + com_ui_attach_warn_endpoint: { + english: 'Non-Assistant files may be ignored without a compatible tool', + translated: '非相容工具的非助理檔案可能會被忽略', + }, + com_ui_assistant_deleted: { + english: 'Successfully deleted assistant', + translated: '已成功刪除助理', + }, + com_ui_assistant_delete_error: { + english: 'There was an error deleting the assistant', + translated: '刪除助理時發生錯誤', + }, com_ui_copied: { english: 'Copied!', translated: '已複製!', diff --git a/client/src/mobile.css b/client/src/mobile.css index 50d84c1efed..80235fc107b 100644 --- a/client/src/mobile.css +++ b/client/src/mobile.css @@ -270,4 +270,8 @@ .radix-side-top\:animate-slideDownAndFade[data-side=top] { -webkit-animation:slideDownAndFade .4s cubic-bezier(.16,1,.3,1); animation:slideDownAndFade .4s cubic-bezier(.16,1,.3,1) +} + +.azure-bg-color { + background: linear-gradient(0.375turn, #61bde2, #4389d0); } \ No newline at end of file diff --git a/client/src/routes/ChatRoute.tsx b/client/src/routes/ChatRoute.tsx index 8b215e67b3d..3f9e0a50243 100644 --- a/client/src/routes/ChatRoute.tsx +++ b/client/src/routes/ChatRoute.tsx @@ -1,15 +1,15 @@ import { useEffect, useRef } from 'react'; import { useParams } from 'react-router-dom'; -import { defaultOrderQuery } from 'librechat-data-provider'; +import { EModelEndpoint } from 'librechat-data-provider'; import { useGetModelsQuery, useGetStartupConfig, useGetEndpointsQuery, } from 'librechat-data-provider/react-query'; import type { TPreset } from 'librechat-data-provider'; -import { useGetConvoIdQuery, useListAssistantsQuery } from '~/data-provider'; +import { useNewConvo, useAppStartup, useAssistantListMap } from '~/hooks'; import { getDefaultModelSpec, getModelSpecIconURL } from '~/utils'; -import { useNewConvo, useAppStartup } from '~/hooks'; +import { useGetConvoIdQuery } from '~/data-provider'; import ChatView from '~/components/Chat/ChatView'; import useAuthRedirect from './useAuthRedirect'; import { Spinner } from '~/components/svg'; @@ -35,10 +35,7 @@ export default function ChatRoute() { enabled: isAuthenticated && conversationId !== 'new', }); const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated }); - const { data: assistants = null } = useListAssistantsQuery(defaultOrderQuery, { - select: (res) => - res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })), - }); + const assistantListMap = useAssistantListMap(); useEffect(() => { if ( @@ -87,7 +84,8 @@ export default function ChatRoute() { !hasSetConversation.current && !modelsQuery.data?.initial && conversationId === 'new' && - assistants + assistantListMap[EModelEndpoint.assistants] && + assistantListMap[EModelEndpoint.azureAssistants] ) { const spec = getDefaultModelSpec(startupConfig.modelSpecs?.list); newConversation({ @@ -108,7 +106,8 @@ export default function ChatRoute() { startupConfig && !hasSetConversation.current && !modelsQuery.data?.initial && - assistants + assistantListMap[EModelEndpoint.assistants] && + assistantListMap[EModelEndpoint.azureAssistants] ) { newConversation({ template: initialConvoQuery.data, @@ -120,7 +119,13 @@ export default function ChatRoute() { } /* Creates infinite render if all dependencies included due to newConversation invocations exceeding call stack before hasSetConversation.current becomes truthy */ // eslint-disable-next-line react-hooks/exhaustive-deps - }, [startupConfig, initialConvoQuery.data, endpointsQuery.data, modelsQuery.data, assistants]); + }, [ + startupConfig, + initialConvoQuery.data, + endpointsQuery.data, + modelsQuery.data, + assistantListMap, + ]); if (endpointsQuery.isLoading || modelsQuery.isLoading) { return ; diff --git a/client/src/store/endpoints.ts b/client/src/store/endpoints.ts index aff35169871..b06a0c8d9ff 100644 --- a/client/src/store/endpoints.ts +++ b/client/src/store/endpoints.ts @@ -4,6 +4,7 @@ import type { TEndpointsConfig } from 'librechat-data-provider'; const defaultConfig: TEndpointsConfig = { [EModelEndpoint.azureOpenAI]: null, + [EModelEndpoint.azureAssistants]: null, [EModelEndpoint.assistants]: null, [EModelEndpoint.openAI]: null, [EModelEndpoint.bingAI]: null, diff --git a/client/src/store/families.ts b/client/src/store/families.ts index bfbde64cf14..7657d5b5635 100644 --- a/client/src/store/families.ts +++ b/client/src/store/families.ts @@ -21,7 +21,10 @@ const conversationByIndex = atomFamily({ onSet(async (newValue) => { const index = Number(node.key.split('__')[1]); if (newValue?.assistant_id) { - localStorage.setItem(`${LocalStorageKeys.ASST_ID_PREFIX}${index}`, newValue.assistant_id); + localStorage.setItem( + `${LocalStorageKeys.ASST_ID_PREFIX}${index}${newValue?.endpoint}`, + newValue.assistant_id, + ); } if (newValue?.spec) { localStorage.setItem(LocalStorageKeys.LAST_SPEC, newValue.spec); diff --git a/client/src/utils/buildDefaultConvo.ts b/client/src/utils/buildDefaultConvo.ts index c8b00b18f9a..b7dc50475f2 100644 --- a/client/src/utils/buildDefaultConvo.ts +++ b/client/src/utils/buildDefaultConvo.ts @@ -1,4 +1,4 @@ -import { parseConvo, EModelEndpoint } from 'librechat-data-provider'; +import { parseConvo, EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider'; import type { TConversation } from 'librechat-data-provider'; import getLocalStorageItems from './getLocalStorageItems'; @@ -65,7 +65,7 @@ const buildDefaultConvo = ({ }; // Ensures assistant_id is always defined - if (endpoint === EModelEndpoint.assistants && !defaultConvo.assistant_id && convo.assistant_id) { + if (isAssistantsEndpoint(endpoint) && !defaultConvo.assistant_id && convo.assistant_id) { defaultConvo.assistant_id = convo.assistant_id; } diff --git a/client/src/utils/buildTree.ts b/client/src/utils/buildTree.ts index a38907665b6..d75b29f8cba 100644 --- a/client/src/utils/buildTree.ts +++ b/client/src/utils/buildTree.ts @@ -31,7 +31,7 @@ export default function buildTree({ if (message.files && fileMap) { messageMap[message.messageId].files = message.files.map( - (file) => fileMap[file.file_id] ?? file, + (file) => fileMap[file.file_id ?? ''] ?? file, ); } diff --git a/client/src/utils/endpoints.ts b/client/src/utils/endpoints.ts index cfbf1a0942d..fd557295c1c 100644 --- a/client/src/utils/endpoints.ts +++ b/client/src/utils/endpoints.ts @@ -3,6 +3,7 @@ import { defaultEndpoints, modularEndpoints, LocalStorageKeys, + isAssistantsEndpoint, } from 'librechat-data-provider'; import type { TConfig, @@ -139,8 +140,8 @@ export function getConvoSwitchLogic(params: ConversationInitParams): InitiatedTe }; const isAssistantSwitch = - newEndpoint === EModelEndpoint.assistants && - currentEndpoint === EModelEndpoint.assistants && + isAssistantsEndpoint(newEndpoint) && + isAssistantsEndpoint(currentEndpoint) && currentEndpoint === newEndpoint; const conversationId = conversation?.conversationId; diff --git a/librechat.example.yaml b/librechat.example.yaml index a2720b7fe7b..2699d3146aa 100644 --- a/librechat.example.yaml +++ b/librechat.example.yaml @@ -41,7 +41,7 @@ registration: endpoints: # assistants: # disableBuilder: false # Disable Assistants Builder Interface by setting to `true` - # pollIntervalMs: 750 # Polling interval for checking assistant updates + # pollIntervalMs: 3000 # Polling interval for checking assistant updates # timeoutMs: 180000 # Timeout for assistant operations # # Should only be one or the other, either `supportedIds` or `excludedIds` # supportedIds: ["asst_supportedAssistantId1", "asst_supportedAssistantId2"] diff --git a/package-lock.json b/package-lock.json index 64254638c88..83167f3b780 100644 --- a/package-lock.json +++ b/package-lock.json @@ -84,7 +84,7 @@ "nodejs-gpt": "^1.37.4", "nodemailer": "^6.9.4", "ollama": "^0.5.0", - "openai": "4.36.0", + "openai": "^4.47.1", "openai-chat-tokens": "^0.2.8", "openid-client": "^5.4.2", "passport": "^0.6.0", @@ -1071,9 +1071,9 @@ } }, "api/node_modules/openai": { - "version": "4.36.0", - "resolved": "https://registry.npmjs.org/openai/-/openai-4.36.0.tgz", - "integrity": "sha512-AtYrhhWY64LhB9P6f3H0nV8nTSaQJ89mWPnfNU5CnYg81zlYaV8nkyO+aTNfprdqP/9xv10woNNUgefXINT4Dg==", + "version": "4.47.1", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.47.1.tgz", + "integrity": "sha512-WWSxhC/69ZhYWxH/OBsLEirIjUcfpQ5+ihkXKp06hmeYXgBBIUCa9IptMzYx6NdkiOCsSGYCnTIsxaic3AjRCQ==", "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", diff --git a/packages/data-provider/package.json b/packages/data-provider/package.json index add6ed869a5..8f638a75850 100644 --- a/packages/data-provider/package.json +++ b/packages/data-provider/package.json @@ -1,6 +1,6 @@ { "name": "librechat-data-provider", - "version": "0.6.4", + "version": "0.6.5", "description": "data services for librechat apps", "main": "dist/index.js", "module": "dist/index.es.js", diff --git a/packages/data-provider/src/actions.ts b/packages/data-provider/src/actions.ts index a13a9790aa7..120ab3d7b4c 100644 --- a/packages/data-provider/src/actions.ts +++ b/packages/data-provider/src/actions.ts @@ -212,6 +212,10 @@ export function resolveRef( return schema as OpenAPIV3.SchemaObject; } +function sanitizeOperationId(input: string) { + return input.replace(/[^a-zA-Z0-9_-]/g, ''); +} + /** Function to convert OpenAPI spec to function signatures and request builders */ export function openapiToFunction(openapiSpec: OpenAPIV3.Document): { functionSignatures: FunctionSignature[]; @@ -231,7 +235,8 @@ export function openapiToFunction(openapiSpec: OpenAPIV3.Document): { }; // Operation ID is used as the function name - const operationId = operationObj.operationId || `${method}_${path}`; + const defaultOperationId = `${method}_${path}`; + const operationId = operationObj.operationId || sanitizeOperationId(defaultOperationId); const description = operationObj.summary || operationObj.description || ''; const parametersSchema: ParametersSchema = { type: 'object', properties: {}, required: [] }; diff --git a/packages/data-provider/src/api-endpoints.ts b/packages/data-provider/src/api-endpoints.ts index e1fdd55fb06..d1f035d782e 100644 --- a/packages/data-provider/src/api-endpoints.ts +++ b/packages/data-provider/src/api-endpoints.ts @@ -1,3 +1,5 @@ +import type { AssistantsEndpoint } from './schemas'; + export const user = () => '/api/user'; export const balance = () => '/api/balance'; @@ -83,15 +85,32 @@ export const plugins = () => '/api/plugins'; export const config = () => '/api/config'; -export const assistants = (id?: string, options?: Record) => { - let url = '/api/assistants'; +export const assistants = ({ + path, + options, + version, + endpoint, +}: { + path?: string; + options?: object; + endpoint?: AssistantsEndpoint; + version: number | string; +}) => { + let url = `/api/assistants/v${version}`; + + if (path) { + url += `/${path}`; + } - if (id) { - url += `/${id}`; + if (endpoint) { + options = { + ...(options ?? {}), + endpoint, + }; } if (options && Object.keys(options).length > 0) { - const queryParams = new URLSearchParams(options).toString(); + const queryParams = new URLSearchParams(options as Record).toString(); url += `?${queryParams}`; } diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index a6a402b2aaf..7deccbed0b6 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -10,6 +10,8 @@ import { TModelsConfig } from './types'; export const defaultSocialLogins = ['google', 'facebook', 'openid', 'github', 'discord']; export const defaultRetrievalModels = [ + 'gpt-4o', + 'gpt-4o-2024-05-13', 'gpt-4-turbo-preview', 'gpt-3.5-turbo-0125', 'gpt-4-0125-preview', @@ -129,11 +131,17 @@ export enum Capabilities { tools = 'tools', } +export const defaultAssistantsVersion = { + [EModelEndpoint.assistants]: 2, + [EModelEndpoint.azureAssistants]: 1, +}; + export const assistantEndpointSchema = z.object({ /* assistants specific */ disableBuilder: z.boolean().optional(), pollIntervalMs: z.number().optional(), timeoutMs: z.number().optional(), + version: z.union([z.string(), z.number()]).default(2), supportedIds: z.array(z.string()).min(1).optional(), excludedIds: z.array(z.string()).min(1).optional(), retrievalModels: z.array(z.string()).min(1).optional().default(defaultRetrievalModels), @@ -287,6 +295,7 @@ export const configSchema = z.object({ endpoints: z .object({ [EModelEndpoint.azureOpenAI]: azureEndpointSchema.optional(), + [EModelEndpoint.azureAssistants]: assistantEndpointSchema.optional(), [EModelEndpoint.assistants]: assistantEndpointSchema.optional(), custom: z.array(endpointSchema.partial()).optional(), }) @@ -324,6 +333,7 @@ export enum FetchTokenConfig { export const defaultEndpoints: EModelEndpoint[] = [ EModelEndpoint.openAI, EModelEndpoint.assistants, + EModelEndpoint.azureAssistants, EModelEndpoint.azureOpenAI, EModelEndpoint.bingAI, EModelEndpoint.chatGPTBrowser, @@ -336,6 +346,7 @@ export const defaultEndpoints: EModelEndpoint[] = [ export const alternateName = { [EModelEndpoint.openAI]: 'OpenAI', [EModelEndpoint.assistants]: 'Assistants', + [EModelEndpoint.azureAssistants]: 'Azure Assistants', [EModelEndpoint.azureOpenAI]: 'Azure OpenAI', [EModelEndpoint.bingAI]: 'Bing', [EModelEndpoint.chatGPTBrowser]: 'ChatGPT', @@ -345,24 +356,27 @@ export const alternateName = { [EModelEndpoint.custom]: 'Custom', }; +const sharedOpenAIModels = [ + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-0125', + 'gpt-4-turbo', + 'gpt-4-turbo-2024-04-09', + 'gpt-4-0125-preview', + 'gpt-4-turbo-preview', + 'gpt-4-1106-preview', + 'gpt-3.5-turbo-1106', + 'gpt-3.5-turbo-16k-0613', + 'gpt-3.5-turbo-16k', + 'gpt-4', + 'gpt-4-0314', + 'gpt-4-32k-0314', + 'gpt-4-0613', + 'gpt-3.5-turbo-0613', +]; + export const defaultModels = { - [EModelEndpoint.assistants]: [ - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-0125', - 'gpt-4-turbo', - 'gpt-4-turbo-2024-04-09', - 'gpt-4-0125-preview', - 'gpt-4-turbo-preview', - 'gpt-4-1106-preview', - 'gpt-3.5-turbo-1106', - 'gpt-3.5-turbo-16k-0613', - 'gpt-3.5-turbo-16k', - 'gpt-4', - 'gpt-4-0314', - 'gpt-4-32k-0314', - 'gpt-4-0613', - 'gpt-3.5-turbo-0613', - ], + [EModelEndpoint.azureAssistants]: sharedOpenAIModels, + [EModelEndpoint.assistants]: ['gpt-4o', ...sharedOpenAIModels], [EModelEndpoint.google]: [ 'gemini-pro', 'gemini-pro-vision', @@ -391,25 +405,12 @@ export const defaultModels = { ], [EModelEndpoint.openAI]: [ 'gpt-4o', - 'gpt-3.5-turbo-0125', - 'gpt-4-turbo', - 'gpt-4-turbo-2024-04-09', - 'gpt-3.5-turbo-16k-0613', - 'gpt-3.5-turbo-16k', - 'gpt-4-turbo-preview', - 'gpt-4-0125-preview', - 'gpt-4-1106-preview', - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-1106', + ...sharedOpenAIModels, 'gpt-4-vision-preview', - 'gpt-4', 'gpt-3.5-turbo-instruct-0914', - 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-0301', 'gpt-3.5-turbo-instruct', - 'gpt-4-0613', 'text-davinci-003', - 'gpt-4-0314', ], }; @@ -440,7 +441,8 @@ export const EndpointURLs: { [key in EModelEndpoint]: string } = { [EModelEndpoint.gptPlugins]: `/api/ask/${EModelEndpoint.gptPlugins}`, [EModelEndpoint.azureOpenAI]: `/api/ask/${EModelEndpoint.azureOpenAI}`, [EModelEndpoint.chatGPTBrowser]: `/api/ask/${EModelEndpoint.chatGPTBrowser}`, - [EModelEndpoint.assistants]: '/api/assistants/chat', + [EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat', + [EModelEndpoint.assistants]: '/api/assistants/v2/chat', }; export const modularEndpoints = new Set([ @@ -458,6 +460,7 @@ export const supportsBalanceCheck = { [EModelEndpoint.anthropic]: true, [EModelEndpoint.gptPlugins]: true, [EModelEndpoint.assistants]: true, + [EModelEndpoint.azureAssistants]: true, [EModelEndpoint.azureOpenAI]: true, }; @@ -680,7 +683,7 @@ export enum Constants { /** Key for the app's version. */ VERSION = 'v0.7.2', /** Key for the Custom Config's version (librechat.yaml). */ - CONFIG_VERSION = '1.1.0', + CONFIG_VERSION = '1.1.1', /** Standard value for the first message's `parentMessageId` value, to indicate no parent exists. */ NO_PARENT = '00000000-0000-0000-0000-000000000000', /** Fixed, encoded domain length for Azure OpenAI Assistants Function name parsing. */ diff --git a/packages/data-provider/src/createPayload.ts b/packages/data-provider/src/createPayload.ts index 5ab4f3cfc82..32b222747d0 100644 --- a/packages/data-provider/src/createPayload.ts +++ b/packages/data-provider/src/createPayload.ts @@ -1,5 +1,5 @@ import type { TSubmission, TMessage, TEndpointOption } from './types'; -import { tConvoUpdateSchema, EModelEndpoint } from './schemas'; +import { tConvoUpdateSchema, EModelEndpoint, isAssistantsEndpoint } from './schemas'; import { EndpointURLs } from './config'; export default function createPayload(submission: TSubmission) { @@ -12,7 +12,7 @@ export default function createPayload(submission: TSubmission) { let server = EndpointURLs[endpointType ?? endpoint]; - if (isEdited && endpoint === EModelEndpoint.assistants) { + if (isEdited && isAssistantsEndpoint(endpoint)) { server += '/modify'; } else if (isEdited) { server = server.replace('/ask/', '/edit/'); diff --git a/packages/data-provider/src/data-service.ts b/packages/data-provider/src/data-service.ts index 55fe8f459b1..3554884184f 100644 --- a/packages/data-provider/src/data-service.ts +++ b/packages/data-provider/src/data-service.ts @@ -166,39 +166,105 @@ export const getEndpointsConfigOverride = (): Promise => { /* Assistants */ -export const createAssistant = (data: a.AssistantCreateParams): Promise => { - return request.post(endpoints.assistants(), data); -}; - -export const getAssistantById = (assistant_id: string): Promise => { - return request.get(endpoints.assistants(assistant_id)); +export const createAssistant = ({ + version, + ...data +}: a.AssistantCreateParams): Promise => { + return request.post(endpoints.assistants({ version }), data); +}; + +export const getAssistantById = ({ + endpoint, + assistant_id, + version, +}: { + endpoint: s.AssistantsEndpoint; + assistant_id: string; + version: number | string | number; +}): Promise => { + return request.get( + endpoints.assistants({ + path: assistant_id, + endpoint, + version, + }), + ); }; -export const updateAssistant = ( - assistant_id: string, - data: a.AssistantUpdateParams, -): Promise => { - return request.patch(endpoints.assistants(assistant_id), data); +export const updateAssistant = ({ + assistant_id, + data, + version, +}: { + assistant_id: string; + data: a.AssistantUpdateParams; + version: number | string; +}): Promise => { + return request.patch( + endpoints.assistants({ + path: assistant_id, + version, + }), + data, + ); }; -export const deleteAssistant = (assistant_id: string, model: string): Promise => { - return request.delete(endpoints.assistants(assistant_id, { model })); +export const deleteAssistant = ({ + assistant_id, + model, + endpoint, + version, +}: m.DeleteAssistantBody & { version: number | string }): Promise => { + return request.delete( + endpoints.assistants({ + path: assistant_id, + options: { model, endpoint }, + version, + }), + ); }; export const listAssistants = ( - params?: a.AssistantListParams, + params: a.AssistantListParams, + version: number | string, ): Promise => { - return request.get(endpoints.assistants(), { params }); + return request.get( + endpoints.assistants({ + version, + options: params, + }), + ); }; -export function getAssistantDocs(): Promise { - return request.get(endpoints.assistants('documents')); +export function getAssistantDocs({ + endpoint, + version, +}: { + endpoint: s.AssistantsEndpoint; + version: number | string; +}): Promise { + return request.get( + endpoints.assistants({ + path: 'documents', + version, + endpoint, + }), + ); } /* Tools */ -export const getAvailableTools = (): Promise => { - return request.get(`${endpoints.assistants()}/tools`); +export const getAvailableTools = ( + version: number | string, + endpoint: s.AssistantsEndpoint, +): Promise => { + return request.get( + endpoints.assistants({ + path: 'tools', + endpoint, + version, + }), + ); }; /* Files */ @@ -247,7 +313,11 @@ export const uploadAvatar = (data: FormData): Promise => export const uploadAssistantAvatar = (data: m.AssistantAvatarVariables): Promise => { return request.postMultiPart( - endpoints.assistants(`avatar/${data.assistant_id}`, { model: data.model }), + endpoints.assistants({ + path: `avatar/${data.assistant_id}`, + options: { model: data.model, endpoint: data.endpoint }, + version: data.version, + }), data.formData, ); }; @@ -264,28 +334,55 @@ export const getFileDownload = async (userId: string, file_id: string): Promise< export const deleteFiles = async ( files: f.BatchFile[], assistant_id?: string, + tool_resource?: a.EToolResources, ): Promise => request.deleteWithOptions(endpoints.files(), { - data: { files, assistant_id }, + data: { files, assistant_id, tool_resource }, }); /* actions */ export const updateAction = (data: m.UpdateActionVariables): Promise => { - const { assistant_id, ...body } = data; - return request.post(endpoints.assistants(`actions/${assistant_id}`), body); + const { assistant_id, version, ...body } = data; + return request.post( + endpoints.assistants({ + path: `actions/${assistant_id}`, + version, + }), + body, + ); }; -export function getActions(): Promise { - return request.get(endpoints.assistants('actions')); +export function getActions({ + endpoint, + version, +}: { + endpoint: s.AssistantsEndpoint; + version: number | string; +}): Promise { + return request.get( + endpoints.assistants({ + path: 'actions', + version, + endpoint, + }), + ); } -export const deleteAction = async ( - assistant_id: string, - action_id: string, - model: string, -): Promise => - request.delete(endpoints.assistants(`actions/${assistant_id}/${action_id}/${model}`)); +export const deleteAction = async ({ + assistant_id, + action_id, + model, + version, + endpoint, +}: m.DeleteActionVariables & { version: number | string }): Promise => + request.delete( + endpoints.assistants({ + path: `actions/${assistant_id}/${action_id}/${model}`, + version, + endpoint, + }), + ); /* conversations */ diff --git a/packages/data-provider/src/file-config.ts b/packages/data-provider/src/file-config.ts index 337f9256aba..dbaeb553273 100644 --- a/packages/data-provider/src/file-config.ts +++ b/packages/data-provider/src/file-config.ts @@ -7,6 +7,7 @@ export const supportsFiles = { [EModelEndpoint.openAI]: true, [EModelEndpoint.google]: true, [EModelEndpoint.assistants]: true, + [EModelEndpoint.azureAssistants]: true, [EModelEndpoint.azureOpenAI]: true, [EModelEndpoint.anthropic]: true, [EModelEndpoint.custom]: true, @@ -152,24 +153,28 @@ export const megabyte = 1024 * 1024; /** Helper function to get megabytes value */ export const mbToBytes = (mb: number): number => mb * megabyte; +const defaultSizeLimit = mbToBytes(512); +const assistantsFileConfig = { + fileLimit: 10, + fileSizeLimit: defaultSizeLimit, + totalSizeLimit: defaultSizeLimit, + supportedMimeTypes, + disabled: false, +}; + export const fileConfig = { endpoints: { - [EModelEndpoint.assistants]: { - fileLimit: 10, - fileSizeLimit: mbToBytes(512), - totalSizeLimit: mbToBytes(512), - supportedMimeTypes, - disabled: false, - }, + [EModelEndpoint.assistants]: assistantsFileConfig, + [EModelEndpoint.azureAssistants]: assistantsFileConfig, default: { fileLimit: 10, - fileSizeLimit: mbToBytes(512), - totalSizeLimit: mbToBytes(512), + fileSizeLimit: defaultSizeLimit, + totalSizeLimit: defaultSizeLimit, supportedMimeTypes, disabled: false, }, }, - serverFileSizeLimit: mbToBytes(512), + serverFileSizeLimit: defaultSizeLimit, avatarSizeLimit: mbToBytes(2), checkType: function (fileType: string, supportedTypes: RegExp[] = supportedMimeTypes) { return supportedTypes.some((regex) => regex.test(fileType)); diff --git a/packages/data-provider/src/parsers.ts b/packages/data-provider/src/parsers.ts index 72f20a063a8..5bf27cc1dc9 100644 --- a/packages/data-provider/src/parsers.ts +++ b/packages/data-provider/src/parsers.ts @@ -38,6 +38,7 @@ const endpointSchemas: Record = { [EModelEndpoint.chatGPTBrowser]: chatGPTBrowserSchema, [EModelEndpoint.gptPlugins]: gptPluginsSchema, [EModelEndpoint.assistants]: assistantSchema, + [EModelEndpoint.azureAssistants]: assistantSchema, }; // const schemaCreators: Record EndpointSchema> = { @@ -49,6 +50,7 @@ export function getEnabledEndpoints() { const defaultEndpoints: string[] = [ EModelEndpoint.openAI, EModelEndpoint.assistants, + EModelEndpoint.azureAssistants, EModelEndpoint.azureOpenAI, EModelEndpoint.google, EModelEndpoint.bingAI, @@ -273,6 +275,7 @@ const compactEndpointSchemas: Record = { [EModelEndpoint.azureOpenAI]: compactOpenAISchema, [EModelEndpoint.custom]: compactOpenAISchema, [EModelEndpoint.assistants]: compactAssistantSchema, + [EModelEndpoint.azureAssistants]: compactAssistantSchema, [EModelEndpoint.google]: compactGoogleSchema, /* BingAI needs all fields */ [EModelEndpoint.bingAI]: bingAISchema, diff --git a/packages/data-provider/src/react-query/react-query-service.ts b/packages/data-provider/src/react-query/react-query-service.ts index 796cf6c79b1..2ad3735edc3 100644 --- a/packages/data-provider/src/react-query/react-query-service.ts +++ b/packages/data-provider/src/react-query/react-query-service.ts @@ -1,13 +1,11 @@ -import { +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import type { UseQueryOptions, - useQuery, - useMutation, - useQueryClient, UseMutationResult, QueryObserverResult, } from '@tanstack/react-query'; -import { defaultOrderQuery } from '../types/assistants'; import { initialModelsConfig, LocalStorageKeys } from '../config'; +import { defaultOrderQuery } from '../types/assistants'; import * as dataService from '../data-service'; import * as m from '../types/mutations'; import { QueryKeys } from '../keys'; @@ -154,8 +152,8 @@ export const useRevokeUserKeyMutation = (name: string): UseMutationResult dataService.revokeUserKey(name), { onSuccess: () => { queryClient.invalidateQueries([QueryKeys.name, name]); - if (name === s.EModelEndpoint.assistants) { - queryClient.invalidateQueries([QueryKeys.assistants, defaultOrderQuery]); + if (s.isAssistantsEndpoint(name)) { + queryClient.invalidateQueries([QueryKeys.assistants, name, defaultOrderQuery]); queryClient.invalidateQueries([QueryKeys.assistantDocs]); queryClient.invalidateQueries([QueryKeys.assistants]); queryClient.invalidateQueries([QueryKeys.assistant]); @@ -171,7 +169,16 @@ export const useRevokeAllUserKeysMutation = (): UseMutationResult => { return useMutation(() => dataService.revokeAllUserKeys(), { onSuccess: () => { queryClient.invalidateQueries([QueryKeys.name]); - queryClient.invalidateQueries([QueryKeys.assistants, defaultOrderQuery]); + queryClient.invalidateQueries([ + QueryKeys.assistants, + s.EModelEndpoint.assistants, + defaultOrderQuery, + ]); + queryClient.invalidateQueries([ + QueryKeys.assistants, + s.EModelEndpoint.azureAssistants, + defaultOrderQuery, + ]); queryClient.invalidateQueries([QueryKeys.assistantDocs]); queryClient.invalidateQueries([QueryKeys.assistants]); queryClient.invalidateQueries([QueryKeys.assistant]); diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index 942b2f8fd1e..0b4c3dca526 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -22,9 +22,19 @@ export enum EModelEndpoint { gptPlugins = 'gptPlugins', anthropic = 'anthropic', assistants = 'assistants', + azureAssistants = 'azureAssistants', custom = 'custom', } +export type AssistantsEndpoint = EModelEndpoint.assistants | EModelEndpoint.azureAssistants; + +export const isAssistantsEndpoint = (endpoint?: AssistantsEndpoint | null | string): boolean => { + if (!endpoint) { + return false; + } + return endpoint.toLowerCase().endsWith(EModelEndpoint.assistants); +}; + export enum ImageDetail { low = 'low', auto = 'auto', diff --git a/packages/data-provider/src/types.ts b/packages/data-provider/src/types.ts index 5efdece1623..2c3e8a236ca 100644 --- a/packages/data-provider/src/types.ts +++ b/packages/data-provider/src/types.ts @@ -183,6 +183,7 @@ export type TConfig = { plugins?: Record; name?: string; iconURL?: string; + version?: string; modelDisplayLabel?: string; userProvide?: boolean | null; userProvideURL?: boolean | null; diff --git a/packages/data-provider/src/types/assistants.ts b/packages/data-provider/src/types/assistants.ts index 76017f44139..423e56b61ec 100644 --- a/packages/data-provider/src/types/assistants.ts +++ b/packages/data-provider/src/types/assistants.ts @@ -1,4 +1,5 @@ import type { OpenAPIV3 } from 'openapi-types'; +import type { AssistantsEndpoint } from 'src/schemas'; import type { TFile } from './files'; export type Schema = OpenAPIV3.SchemaObject & { description?: string }; @@ -10,10 +11,16 @@ export type Metadata = { export enum Tools { code_interpreter = 'code_interpreter', + file_search = 'file_search', retrieval = 'retrieval', function = 'function', } +export enum EToolResources { + code_interpreter = 'code_interpreter', + file_search = 'file_search', +} + export type Tool = { [type: string]: Tools; }; @@ -27,6 +34,35 @@ export type FunctionTool = { }; }; +/** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ +export interface ToolResources { + code_interpreter?: CodeInterpreterResource; + file_search?: FileSearchResource; +} +export interface CodeInterpreterResource { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter`` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; +} + +export interface FileSearchResource { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; +} + export type Assistant = { id: string; created_at: number; @@ -38,8 +74,11 @@ export type Assistant = { name: string | null; object: string; tools: FunctionTool[]; + tool_resources?: ToolResources; }; +export type TAssistantsMap = Record>; + export type AssistantCreateParams = { model: string; description?: string | null; @@ -48,6 +87,8 @@ export type AssistantCreateParams = { metadata?: Metadata | null; name?: string | null; tools?: Array; + endpoint: AssistantsEndpoint; + version: number | string; }; export type AssistantUpdateParams = { @@ -58,6 +99,8 @@ export type AssistantUpdateParams = { metadata?: Metadata | null; name?: string | null; tools?: Array; + tool_resources?: ToolResources; + endpoint: AssistantsEndpoint; }; export type AssistantListParams = { @@ -65,6 +108,7 @@ export type AssistantListParams = { before?: string | null; after?: string | null; order?: 'asc' | 'desc'; + endpoint: AssistantsEndpoint; }; export type AssistantListResponse = { @@ -123,12 +167,22 @@ export type RetrievalToolCall = { type: 'retrieval'; // The type of tool call, always 'retrieval'. }; +/** + * Details of a Retrieval tool call the run step was involved in. + * Includes the tool call ID and the type of tool call. + */ +export type FileSearchToolCall = { + id: string; // The ID of the tool call object. + file_search: unknown; // An empty object for now. + type: 'file_search'; // The type of tool call, always 'retrieval'. +}; + /** * Details of the tool calls involved in a run step. * Can be associated with one of three types of tools: `code_interpreter`, `retrieval`, or `function`. */ export type ToolCallsStepDetails = { - tool_calls: Array; // An array of tool calls the run step was involved in. + tool_calls: Array; // An array of tool calls the run step was involved in. type: 'tool_calls'; // Always 'tool_calls'. }; @@ -203,6 +257,7 @@ export enum StepTypes { export enum ToolCallTypes { FUNCTION = 'function', RETRIEVAL = 'retrieval', + FILE_SEARCH = 'file_search', CODE_INTERPRETER = 'code_interpreter', } @@ -239,7 +294,14 @@ export type PartMetadata = { action?: boolean; }; -export type ContentPart = (CodeToolCall | RetrievalToolCall | FunctionToolCall | ImageFile | Text) & +export type ContentPart = ( + | CodeToolCall + | RetrievalToolCall + | FileSearchToolCall + | FunctionToolCall + | ImageFile + | Text +) & PartMetadata; export type TMessageContentParts = @@ -247,7 +309,8 @@ export type TMessageContentParts = | { type: ContentTypes.TEXT; text: Text & PartMetadata } | { type: ContentTypes.TOOL_CALL; - tool_call: (CodeToolCall | RetrievalToolCall | FunctionToolCall) & PartMetadata; + tool_call: (CodeToolCall | RetrievalToolCall | FileSearchToolCall | FunctionToolCall) & + PartMetadata; } | { type: ContentTypes.IMAGE_FILE; image_file: ImageFile & PartMetadata }; @@ -315,6 +378,7 @@ export type Action = { type?: string; settings?: Record; metadata: ActionMetadata; + version: number | string; }; export type AssistantAvatar = { @@ -334,6 +398,7 @@ export type AssistantDocument = { }; export enum FilePurpose { + Vision = 'vision', FineTune = 'fine-tune', FineTuneResults = 'fine-tune-results', Assistants = 'assistants', diff --git a/packages/data-provider/src/types/files.ts b/packages/data-provider/src/types/files.ts index 484c471a4e1..3459b0782c8 100644 --- a/packages/data-provider/src/types/files.ts +++ b/packages/data-provider/src/types/files.ts @@ -1,11 +1,17 @@ +import { EToolResources } from './assistants'; + export enum FileSources { local = 'local', firebase = 'firebase', + azure = 'azure', openai = 'openai', s3 = 's3', vectordb = 'vectordb', } +export const checkOpenAIStorage = (source: string) => + source === FileSources.openai || source === FileSources.azure; + export enum FileContext { avatar = 'avatar', unknown = 'unknown', @@ -54,6 +60,7 @@ export type TFile = { usage: number; context?: FileContext; source?: FileSources; + filterSource?: FileSources; width?: number; height?: number; expiresAt?: string | Date; @@ -97,6 +104,7 @@ export type BatchFile = { export type DeleteFilesBody = { files: BatchFile[]; assistant_id?: string; + tool_resource?: EToolResources; }; export type DeleteMutationOptions = { diff --git a/packages/data-provider/src/types/mutations.ts b/packages/data-provider/src/types/mutations.ts index 421d0dd42cf..5ef8a925195 100644 --- a/packages/data-provider/src/types/mutations.ts +++ b/packages/data-provider/src/types/mutations.ts @@ -45,6 +45,8 @@ export type AssistantAvatarVariables = { model: string; formData: FormData; postCreation?: boolean; + endpoint: types.AssistantsEndpoint; + version: number | string; }; export type UpdateActionVariables = { @@ -53,6 +55,8 @@ export type UpdateActionVariables = { metadata: ActionMetadata; action_id?: string; model: string; + endpoint: types.AssistantsEndpoint; + version: number | string; }; export type UploadAssistantAvatarOptions = MutationOptions; @@ -66,7 +70,11 @@ export type UpdateAssistantVariables = { export type UpdateAssistantMutationOptions = MutationOptions; -export type DeleteAssistantBody = { assistant_id: string; model: string }; +export type DeleteAssistantBody = { + assistant_id: string; + model: string; + endpoint: types.AssistantsEndpoint; +}; export type DeleteAssistantMutationOptions = MutationOptions< void, @@ -77,6 +85,7 @@ export type UpdateActionResponse = [AssistantDocument, Assistant, Action]; export type UpdateActionOptions = MutationOptions; export type DeleteActionVariables = { + endpoint: types.AssistantsEndpoint; assistant_id: string; action_id: string; model: string;