Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🎉 feat: Code Interpreter API and Agents Release #4860

Merged
merged 27 commits into from
Dec 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
95d6bd2
feat: Code Interpreter API & File Search Agent Uploads
danny-avila Nov 4, 2024
d469d58
WIP: PoC, sequential agents
danny-avila Nov 18, 2024
d69a22e
refactor: tool context handling & update Code API Key Dialog
danny-avila Nov 26, 2024
0b4dcd5
feat: Agent Permissions Admin Controls
danny-avila Nov 27, 2024
3a9afeb
fix: update types in useAssistantListMap and useMentions hooks for be…
danny-avila Nov 29, 2024
4df6406
feat: mention agents
danny-avila Nov 29, 2024
0a01d54
fix: agent tool resource race conditions when deleting agent tool res…
danny-avila Nov 29, 2024
d744d37
feat: add error handling for code execution with user feedback
danny-avila Nov 29, 2024
42cba69
refactor: rename AdminControls to AdminSettings for clarity
danny-avila Nov 29, 2024
2e4aeca
style: add gap to button in AdminSettings for improved layout
danny-avila Nov 29, 2024
4daaf20
refactor: separate agent query hooks and check access to enable fetching
danny-avila Nov 29, 2024
53842c2
fix: remove unused provider from agent initialization options, create…
danny-avila Nov 29, 2024
a6180f7
refactor: remove redundant/deprecated modelOptions from AgentClient p…
danny-avila Dec 2, 2024
6f1ef57
chore: update @librechat/agents to version 1.8.5 in package.json and …
danny-avila Dec 2, 2024
5bb063f
fix: minor styling issues + agent panel uniformity
danny-avila Dec 2, 2024
fb14949
fix: agent edge cases when set endpoint is no longer defined
danny-avila Dec 3, 2024
b7e963a
refactor: remove unused cleanup function call from AppService
danny-avila Dec 3, 2024
47048a9
fix: update link in ApiKeyDialog to point to pricing page
danny-avila Dec 3, 2024
a13be3c
fix: improve type handling and layout calculations in SidePanel compo…
danny-avila Dec 3, 2024
b8c1add
fix: add missing localization string for agent selection in SidePanel
danny-avila Dec 3, 2024
96f3a00
chore: form styling and localizations for upload filesearch/code inte…
danny-avila Dec 3, 2024
63388b0
fix: model selection placeholder logic in AgentConfig component
danny-avila Dec 3, 2024
9d415d7
style: agent capabilities
danny-avila Dec 4, 2024
d9de06f
fix: add localization for provider selection and improve dropdown sty…
danny-avila Dec 4, 2024
bc5a0a1
refactor: use gpt-4o-mini > gpt-3.5-turbo
danny-avila Dec 4, 2024
3ee42aa
fix: agents configuration for loadDefaultInterface and update related…
danny-avila Dec 4, 2024
5d6e7e6
feat: DALLE Agents support
danny-avila Dec 4, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -177,10 +177,10 @@ OPENAI_API_KEY=user_provided
DEBUG_OPENAI=false

# TITLE_CONVO=false
# OPENAI_TITLE_MODEL=gpt-3.5-turbo
# OPENAI_TITLE_MODEL=gpt-4o-mini

# OPENAI_SUMMARIZE=true
# OPENAI_SUMMARY_MODEL=gpt-3.5-turbo
# OPENAI_SUMMARY_MODEL=gpt-4o-mini

# OPENAI_FORCE_PROMPT=true

Expand Down
45 changes: 40 additions & 5 deletions api/app/clients/BaseClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ class BaseClient {
/** The key for the usage object's output tokens
* @type {string} */
this.outputTokensKey = 'completion_tokens';
/** @type {Set<string>} */
this.savedMessageIds = new Set();
}

setOptions() {
Expand Down Expand Up @@ -84,7 +86,7 @@ class BaseClient {
return this.options.agent.id;
}

return this.modelOptions.model;
return this.modelOptions?.model ?? this.model;
}

/**
Expand Down Expand Up @@ -508,7 +510,7 @@ class BaseClient {
conversationId,
parentMessageId: userMessage.messageId,
isCreatedByUser: false,
model: this.modelOptions.model,
model: this.modelOptions?.model ?? this.model,
sender: this.sender,
text: generation,
};
Expand Down Expand Up @@ -545,6 +547,7 @@ class BaseClient {

if (!isEdited && !this.skipSaveUserMessage) {
this.userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
this.savedMessageIds.add(userMessage.messageId);
if (typeof opts?.getReqData === 'function') {
opts.getReqData({
userMessagePromise: this.userMessagePromise,
Expand All @@ -563,8 +566,8 @@ class BaseClient {
user: this.user,
tokenType: 'prompt',
amount: promptTokens,
model: this.modelOptions.model,
endpoint: this.options.endpoint,
model: this.modelOptions?.model ?? this.model,
endpointTokenConfig: this.options.endpointTokenConfig,
},
});
Expand All @@ -574,6 +577,7 @@ class BaseClient {
const completion = await this.sendCompletion(payload, opts);
this.abortController.requestCompleted = true;

/** @type {TMessage} */
const responseMessage = {
messageId: responseMessageId,
conversationId,
Expand Down Expand Up @@ -635,7 +639,16 @@ class BaseClient {
responseMessage.attachments = (await Promise.all(this.artifactPromises)).filter((a) => a);
}

if (this.options.attachments) {
try {
saveOptions.files = this.options.attachments.map((attachments) => attachments.file_id);
} catch (error) {
logger.error('[BaseClient] Error mapping attachments for conversation', error);
}
}

this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
this.savedMessageIds.add(responseMessage.messageId);
const messageCache = getLogStores(CacheKeys.MESSAGES);
messageCache.set(
responseMessageId,
Expand Down Expand Up @@ -902,8 +915,9 @@ class BaseClient {
// Note: gpt-3.5-turbo and gpt-4 may update over time. Use default for these as well as for unknown models
let tokensPerMessage = 3;
let tokensPerName = 1;
const model = this.modelOptions?.model ?? this.model;

if (this.modelOptions.model === 'gpt-3.5-turbo-0301') {
if (model === 'gpt-3.5-turbo-0301') {
tokensPerMessage = 4;
tokensPerName = -1;
}
Expand Down Expand Up @@ -961,6 +975,15 @@ class BaseClient {
return _messages;
}

const seen = new Set();
const attachmentsProcessed =
this.options.attachments && !(this.options.attachments instanceof Promise);
if (attachmentsProcessed) {
for (const attachment of this.options.attachments) {
seen.add(attachment.file_id);
}
}

/**
*
* @param {TMessage} message
Expand All @@ -971,7 +994,19 @@ class BaseClient {
this.message_file_map = {};
}

const fileIds = message.files.map((file) => file.file_id);
const fileIds = [];
for (const file of message.files) {
if (seen.has(file.file_id)) {
continue;
}
fileIds.push(file.file_id);
seen.add(file.file_id);
}

if (fileIds.length === 0) {
return message;
}

const files = await getFiles({
file_id: { $in: fileIds },
});
Expand Down
6 changes: 3 additions & 3 deletions api/app/clients/OpenAIClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -688,7 +688,7 @@ class OpenAIClient extends BaseClient {
}

initializeLLM({
model = 'gpt-3.5-turbo',
model = 'gpt-4o-mini',
modelName,
temperature = 0.2,
presence_penalty = 0,
Expand Down Expand Up @@ -793,7 +793,7 @@ class OpenAIClient extends BaseClient {

const { OPENAI_TITLE_MODEL } = process.env ?? {};

let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-4o-mini';
if (model === Constants.CURRENT_MODEL) {
model = this.modelOptions.model;
}
Expand Down Expand Up @@ -982,7 +982,7 @@ ${convo}
let prompt;

// TODO: remove the gpt fallback and make it specific to endpoint
const { OPENAI_SUMMARY_MODEL = 'gpt-3.5-turbo' } = process.env ?? {};
const { OPENAI_SUMMARY_MODEL = 'gpt-4o-mini' } = process.env ?? {};
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
if (model === Constants.CURRENT_MODEL) {
model = this.modelOptions.model;
Expand Down
7 changes: 5 additions & 2 deletions api/app/clients/PluginsClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ class PluginsClient extends OpenAIClient {
chatHistory: new ChatMessageHistory(pastMessages),
});

this.tools = await loadTools({
const { loadedTools } = await loadTools({
user,
model,
tools: this.options.tools,
Expand All @@ -119,12 +119,15 @@ class PluginsClient extends OpenAIClient {
processFileURL,
message,
},
useSpecs: true,
});

if (this.tools.length === 0) {
if (loadedTools.length === 0) {
return;
}

this.tools = loadedTools;

logger.debug('[PluginsClient] Requested Tools', this.options.tools);
logger.debug(
'[PluginsClient] Loaded Tools',
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/llm/createLLM.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const { isEnabled } = require('~/server/utils');
*
* @example
* const llm = createLLM({
* modelOptions: { modelName: 'gpt-3.5-turbo', temperature: 0.2 },
* modelOptions: { modelName: 'gpt-4o-mini', temperature: 0.2 },
* configOptions: { basePath: 'https://example.api/path' },
* callbacks: { onMessage: handleMessage },
* openAIApiKey: 'your-api-key'
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/memory/summaryBuffer.demo.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ const { ChatOpenAI } = require('@langchain/openai');
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');

const chatPromptMemory = new ConversationSummaryBufferMemory({
llm: new ChatOpenAI({ modelName: 'gpt-3.5-turbo', temperature: 0 }),
llm: new ChatOpenAI({ modelName: 'gpt-4o-mini', temperature: 0 }),
maxTokenLimit: 10,
returnMessages: true,
});
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/prompts/formatMessages.js
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ const formatAgentMessages = (payload) => {
new ToolMessage({
tool_call_id: tool_call.id,
name: tool_call.name,
content: output,
content: output || '',
}),
);
} else {
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/specs/BaseClient.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ describe('BaseClient', () => {
const options = {
// debug: true,
modelOptions: {
model: 'gpt-3.5-turbo',
model: 'gpt-4o-mini',
temperature: 0,
},
};
Expand Down
4 changes: 2 additions & 2 deletions api/app/clients/specs/OpenAIClient.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ describe('OpenAIClient', () => {

it('should set isChatCompletion based on useOpenRouter, reverseProxyUrl, or model', () => {
client.setOptions({ reverseProxyUrl: null });
// true by default since default model will be gpt-3.5-turbo
// true by default since default model will be gpt-4o-mini
expect(client.isChatCompletion).toBe(true);
client.isChatCompletion = undefined;

Expand All @@ -230,7 +230,7 @@ describe('OpenAIClient', () => {
expect(client.isChatCompletion).toBe(false);
client.isChatCompletion = undefined;

client.setOptions({ modelOptions: { model: 'gpt-3.5-turbo' }, reverseProxyUrl: null });
client.setOptions({ modelOptions: { model: 'gpt-4o-mini' }, reverseProxyUrl: null });
expect(client.isChatCompletion).toBe(true);
});

Expand Down
36 changes: 28 additions & 8 deletions api/app/clients/tools/structured/DALLE3.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ class DALLE3 extends Tool {

this.userId = fields.userId;
this.fileStrategy = fields.fileStrategy;
/** @type {boolean} */
this.isAgent = fields.isAgent;
if (fields.processFileURL) {
/** @type {processFileURL} Necessary for output to contain all image metadata. */
this.processFileURL = fields.processFileURL.bind(this);
Expand Down Expand Up @@ -108,6 +110,19 @@ class DALLE3 extends Tool {
return `![generated image](${imageUrl})`;
}

returnValue(value) {
if (this.isAgent === true && typeof value === 'string') {
return [value, {}];
} else if (this.isAgent === true && typeof value === 'object') {
return [
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.',
value,
];
}

return value;
}

async _call(data) {
const { prompt, quality = 'standard', size = '1024x1024', style = 'vivid' } = data;
if (!prompt) {
Expand All @@ -126,18 +141,23 @@ class DALLE3 extends Tool {
});
} catch (error) {
logger.error('[DALL-E-3] Problem generating the image:', error);
return `Something went wrong when trying to generate the image. The DALL-E API may be unavailable:
Error Message: ${error.message}`;
return this
.returnValue(`Something went wrong when trying to generate the image. The DALL-E API may be unavailable:
Error Message: ${error.message}`);
}

if (!resp) {
return 'Something went wrong when trying to generate the image. The DALL-E API may be unavailable';
return this.returnValue(
'Something went wrong when trying to generate the image. The DALL-E API may be unavailable',
);
}

const theImageUrl = resp.data[0].url;

if (!theImageUrl) {
return 'No image URL returned from OpenAI API. There may be a problem with the API or your configuration.';
return this.returnValue(
'No image URL returned from OpenAI API. There may be a problem with the API or your configuration.',
);
}

const imageBasename = getImageBasename(theImageUrl);
Expand All @@ -157,11 +177,11 @@ Error Message: ${error.message}`;

try {
const result = await this.processFileURL({
fileStrategy: this.fileStrategy,
userId: this.userId,
URL: theImageUrl,
fileName: imageName,
basePath: 'images',
userId: this.userId,
fileName: imageName,
fileStrategy: this.fileStrategy,
context: FileContext.image_generation,
});

Expand All @@ -175,7 +195,7 @@ Error Message: ${error.message}`;
this.result = `Failed to save the image locally. ${error.message}`;
}

return this.result;
return this.returnValue(this.result);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,50 @@ const { logger } = require('~/config');
* @param {Object} options
* @param {ServerRequest} options.req
* @param {Agent['tool_resources']} options.tool_resources
* @returns
* @returns {Promise<{
* files: Array<{ file_id: string; filename: string }>,
* toolContext: string
* }>}
*/
const createFileSearchTool = async (options) => {
const { req, tool_resources } = options;
const primeFiles = async (options) => {
const { tool_resources } = options;
const file_ids = tool_resources?.[EToolResources.file_search]?.file_ids ?? [];
const files = (await getFiles({ file_id: { $in: file_ids } })).map((file) => ({
file_id: file.file_id,
filename: file.filename,
}));
const agentResourceIds = new Set(file_ids);
const resourceFiles = tool_resources?.[EToolResources.file_search]?.files ?? [];
const dbFiles = ((await getFiles({ file_id: { $in: file_ids } })) ?? []).concat(resourceFiles);

let toolContext = `- Note: Semantic search is available through the ${Tools.file_search} tool but no files are currently loaded. Request the user to upload documents to search through.`;

const files = [];
for (let i = 0; i < dbFiles.length; i++) {
const file = dbFiles[i];
if (!file) {
continue;
}
if (i === 0) {
toolContext = `- Note: Use the ${Tools.file_search} tool to find relevant information within:`;
}
toolContext += `\n\t- ${file.filename}${
agentResourceIds.has(file.file_id) ? '' : ' (just attached by user)'
}`;
files.push({
file_id: file.file_id,
filename: file.filename,
});
}

const fileList = files.map((file) => `- ${file.filename}`).join('\n');
const toolDescription = `Performs a semantic search based on a natural language query across the following files:\n${fileList}`;
return { files, toolContext };
};

const FileSearch = tool(
/**
*
* @param {Object} options
* @param {ServerRequest} options.req
* @param {Array<{ file_id: string; filename: string }>} options.files
* @returns
*/
const createFileSearchTool = async ({ req, files }) => {
return tool(
async ({ query }) => {
if (files.length === 0) {
return 'No files to search. Instruct the user to add files for the search.';
Expand Down Expand Up @@ -87,7 +117,7 @@ const createFileSearchTool = async (options) => {
},
{
name: Tools.file_search,
description: toolDescription,
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.`,
schema: z.object({
query: z
.string()
Expand All @@ -97,8 +127,6 @@ const createFileSearchTool = async (options) => {
}),
},
);

return FileSearch;
};

module.exports = createFileSearchTool;
module.exports = { createFileSearchTool, primeFiles };
Loading
Loading