Skip to content

Commit

Permalink
🧠 feat: Reasoning UI for Agents (#5904)
Browse files Browse the repository at this point in the history
* chore: bump https-proxy-agent and @librechat/agents

* refactor: Improve error logging in OllamaClient for API fetch failures

* feat: Add DeepSeek provider support and enhance provider name handling

* refactor: Use Providers.OLLAMA constant for model name check in fetchModels function

* feat: Enhance formatAgentMessages to handle reasoning content type

* feat: OpenRouter Agent Reasoning

* hard work and dedicationgit add .env.example :)

* fix: Handle Google social login with missing last name

Social login with Google was previously displaying 'undefined' when
a user's last name was empty or not provided.

Changes:
- Conditionally render last name only if it exists
- Prevent displaying 'undefined' when last name is missing

* fix: add missing file endings for developers yml,yaml and log

---------

Co-authored-by: Mohamed Al-Duraji <[email protected]>
Co-authored-by: Deepak Kendole <[email protected]>
Co-authored-by: Peter Rothlaender <[email protected]>
  • Loading branch information
4 people authored Feb 15, 2025
1 parent e3b5c59 commit 350e72d
Show file tree
Hide file tree
Showing 16 changed files with 1,738 additions and 1,599 deletions.
4 changes: 2 additions & 2 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ FACEBOOK_CALLBACK_URL=/oauth/facebook/callback
GITHUB_CLIENT_ID=
GITHUB_CLIENT_SECRET=
GITHUB_CALLBACK_URL=/oauth/github/callback
# GitHub Eenterprise
# GitHub Enterprise
# GITHUB_ENTERPRISE_BASE_URL=
# GITHUB_ENTERPRISE_USER_AGENT=

Expand Down Expand Up @@ -527,4 +527,4 @@ HELP_AND_FAQ_URL=https://librechat.ai
#=====================================================#
# OpenWeather #
#=====================================================#
OPENWEATHER_API_KEY=
OPENWEATHER_API_KEY=
4 changes: 2 additions & 2 deletions api/app/clients/OllamaClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const { z } = require('zod');
const axios = require('axios');
const { Ollama } = require('ollama');
const { Constants } = require('librechat-data-provider');
const { deriveBaseURL } = require('~/utils');
const { deriveBaseURL, logAxiosError } = require('~/utils');
const { sleep } = require('~/server/utils');
const { logger } = require('~/config');

Expand Down Expand Up @@ -68,7 +68,7 @@ class OllamaClient {
} catch (error) {
const logMessage =
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).';
logger.error(logMessage, error);
logAxiosError({ message: logMessage, error });
return [];
}
}
Expand Down
7 changes: 2 additions & 5 deletions api/app/clients/OpenAIClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const {
ImageDetail,
EModelEndpoint,
resolveHeaders,
KnownEndpoints,
openAISettings,
ImageDetailCost,
CohereConstants,
Expand Down Expand Up @@ -116,11 +117,7 @@ class OpenAIClient extends BaseClient {

const { reverseProxyUrl: reverseProxy } = this.options;

if (
!this.useOpenRouter &&
reverseProxy &&
reverseProxy.includes('https://openrouter.ai/api/v1')
) {
if (!this.useOpenRouter && reverseProxy && reverseProxy.includes(KnownEndpoints.openrouter)) {
this.useOpenRouter = true;
}

Expand Down
43 changes: 43 additions & 0 deletions api/app/clients/prompts/formatAgentMessages.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -282,4 +282,47 @@ describe('formatAgentMessages', () => {
// Additional check to ensure the consecutive assistant messages were combined
expect(result[1].content).toHaveLength(2);
});

it('should skip THINK type content parts', () => {
const payload = [
{
role: 'assistant',
content: [
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Initial response' },
{ type: ContentTypes.THINK, [ContentTypes.THINK]: 'Reasoning about the problem...' },
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
],
},
];

const result = formatAgentMessages(payload);

expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(AIMessage);
expect(result[0].content).toEqual('Initial response\nFinal answer');
});

it('should join TEXT content as string when THINK content type is present', () => {
const payload = [
{
role: 'assistant',
content: [
{ type: ContentTypes.THINK, [ContentTypes.THINK]: 'Analyzing the problem...' },
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'First part of response' },
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Second part of response' },
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final part of response' },
],
},
];

const result = formatAgentMessages(payload);

expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(AIMessage);
expect(typeof result[0].content).toBe('string');
expect(result[0].content).toBe(
'First part of response\nSecond part of response\nFinal part of response',
);
expect(result[0].content).not.toContain('Analyzing the problem...');
});
});
15 changes: 15 additions & 0 deletions api/app/clients/prompts/formatMessages.js
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ const formatAgentMessages = (payload) => {
let currentContent = [];
let lastAIMessage = null;

let hasReasoning = false;
for (const part of message.content) {
if (part.type === ContentTypes.TEXT && part.tool_call_ids) {
/*
Expand Down Expand Up @@ -207,11 +208,25 @@ const formatAgentMessages = (payload) => {
content: output || '',
}),
);
} else if (part.type === ContentTypes.THINK) {
hasReasoning = true;
continue;
} else {
currentContent.push(part);
}
}

if (hasReasoning) {
currentContent = currentContent
.reduce((acc, curr) => {
if (curr.type === ContentTypes.TEXT) {
return `${acc}${curr[ContentTypes.TEXT]}\n`;
}
return acc;
}, '')
.trim();
}

if (currentContent.length > 0) {
messages.push(new AIMessage({ content: currentContent }));
}
Expand Down
3 changes: 2 additions & 1 deletion api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
"@langchain/google-genai": "^0.1.7",
"@langchain/google-vertexai": "^0.1.8",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.0.5",
"@librechat/agents": "^2.1.2",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "1.7.8",
"bcryptjs": "^2.4.3",
Expand All @@ -65,6 +65,7 @@
"firebase": "^11.0.2",
"googleapis": "^126.0.1",
"handlebars": "^4.7.7",
"https-proxy-agent": "^7.0.6",
"ioredis": "^5.3.2",
"js-yaml": "^4.1.0",
"jsonwebtoken": "^9.0.0",
Expand Down
16 changes: 16 additions & 0 deletions api/server/controllers/agents/callbacks.js
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,22 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
aggregateContent({ event, data });
},
},
[GraphEvents.ON_REASONING_DELTA]: {
/**
* Handle ON_REASONING_DELTA event.
* @param {string} event - The event name.
* @param {StreamEventData} data - The event data.
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
*/
handle: (event, data, metadata) => {
if (metadata?.last_agent_index === metadata?.agent_index) {
sendEvent(res, { event, data });
} else if (!metadata?.hide_sequential_outputs) {
sendEvent(res, { event, data });
}
aggregateContent({ event, data });
},
},
};

return handlers;
Expand Down
20 changes: 1 addition & 19 deletions api/server/controllers/agents/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,6 @@ const {
bedrockOutputParser,
removeNullishValues,
} = require('librechat-data-provider');
const {
extractBaseURL,
// constructAzureURL,
// genAzureChatCompletion,
} = require('~/utils');
const {
formatMessage,
formatAgentMessages,
Expand Down Expand Up @@ -477,19 +472,6 @@ class AgentClient extends BaseClient {
abortController = new AbortController();
}

const baseURL = extractBaseURL(this.completionsUrl);
logger.debug('[api/server/controllers/agents/client.js] chatCompletion', {
baseURL,
payload,
});

// if (this.useOpenRouter) {
// opts.defaultHeaders = {
// 'HTTP-Referer': 'https://librechat.ai',
// 'X-Title': 'LibreChat',
// };
// }

// if (this.options.headers) {
// opts.defaultHeaders = { ...opts.defaultHeaders, ...this.options.headers };
// }
Expand Down Expand Up @@ -626,7 +608,7 @@ class AgentClient extends BaseClient {
let systemContent = [
systemMessage,
agent.instructions ?? '',
i !== 0 ? agent.additional_instructions ?? '' : '',
i !== 0 ? (agent.additional_instructions ?? '') : '',
]
.join('\n')
.trim();
Expand Down
10 changes: 9 additions & 1 deletion api/server/controllers/agents/run.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
const { Run, Providers } = require('@librechat/agents');
const { providerEndpointMap } = require('librechat-data-provider');
const { providerEndpointMap, KnownEndpoints } = require('librechat-data-provider');

/**
* @typedef {import('@librechat/agents').t} t
* @typedef {import('@librechat/agents').StandardGraphConfig} StandardGraphConfig
* @typedef {import('@librechat/agents').StreamEventData} StreamEventData
* @typedef {import('@librechat/agents').EventHandler} EventHandler
* @typedef {import('@librechat/agents').GraphEvents} GraphEvents
* @typedef {import('@librechat/agents').LLMConfig} LLMConfig
* @typedef {import('@librechat/agents').IState} IState
*/

Expand All @@ -32,6 +33,7 @@ async function createRun({
streamUsage = true,
}) {
const provider = providerEndpointMap[agent.provider] ?? agent.provider;
/** @type {LLMConfig} */
const llmConfig = Object.assign(
{
provider,
Expand All @@ -41,6 +43,11 @@ async function createRun({
agent.model_parameters,
);

/** @type {'reasoning_content' | 'reasoning'} */
let reasoningKey;
if (llmConfig.configuration?.baseURL.includes(KnownEndpoints.openrouter)) {
reasoningKey = 'reasoning';
}
if (/o1(?!-(?:mini|preview)).*$/.test(llmConfig.model)) {
llmConfig.streaming = false;
llmConfig.disableStreaming = true;
Expand All @@ -50,6 +57,7 @@ async function createRun({
const graphConfig = {
signal,
llmConfig,
reasoningKey,
tools: agent.tools,
instructions: agent.instructions,
additional_instructions: agent.additional_instructions,
Expand Down
12 changes: 8 additions & 4 deletions api/server/services/Endpoints/agents/initialize.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,14 @@ const { getAgent } = require('~/models/Agent');
const { logger } = require('~/config');

const providerConfigMap = {
[Providers.OLLAMA]: initCustom,
[Providers.DEEPSEEK]: initCustom,
[Providers.OPENROUTER]: initCustom,
[EModelEndpoint.openAI]: initOpenAI,
[EModelEndpoint.google]: initGoogle,
[EModelEndpoint.azureOpenAI]: initOpenAI,
[EModelEndpoint.anthropic]: initAnthropic,
[EModelEndpoint.bedrock]: getBedrockOptions,
[EModelEndpoint.google]: initGoogle,
[Providers.OLLAMA]: initCustom,
};

/**
Expand Down Expand Up @@ -100,8 +102,10 @@ const initializeAgentOptions = async ({

const provider = agent.provider;
let getOptions = providerConfigMap[provider];

if (!getOptions) {
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
agent.provider = provider.toLowerCase();
getOptions = providerConfigMap[agent.provider];
} else if (!getOptions) {
const customEndpointConfig = await getCustomEndpointConfig(provider);
if (!customEndpointConfig) {
throw new Error(`Provider ${provider} not supported`);
Expand Down
8 changes: 4 additions & 4 deletions api/server/services/Endpoints/openAI/llm.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const { HttpsProxyAgent } = require('https-proxy-agent');
const { KnownEndpoints } = require('librechat-data-provider');
const { sanitizeModelName, constructAzureURL } = require('~/utils');
const { isEnabled } = require('~/server/utils');

Expand Down Expand Up @@ -57,10 +58,9 @@ function getLLMConfig(apiKey, options = {}) {

/** @type {OpenAIClientOptions['configuration']} */
const configOptions = {};

// Handle OpenRouter or custom reverse proxy
if (useOpenRouter || reverseProxyUrl === 'https://openrouter.ai/api/v1') {
configOptions.baseURL = 'https://openrouter.ai/api/v1';
if (useOpenRouter || reverseProxyUrl.includes(KnownEndpoints.openrouter)) {
llmConfig.include_reasoning = true;
configOptions.baseURL = reverseProxyUrl;
configOptions.defaultHeaders = Object.assign(
{
'HTTP-Referer': 'https://librechat.ai',
Expand Down
3 changes: 2 additions & 1 deletion api/server/services/ModelService.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const axios = require('axios');
const { Providers } = require('@librechat/agents');
const { HttpsProxyAgent } = require('https-proxy-agent');
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
const { inputSchema, logAxiosError, extractBaseURL, processModelData } = require('~/utils');
Expand Down Expand Up @@ -57,7 +58,7 @@ const fetchModels = async ({
return models;
}

if (name && name.toLowerCase().startsWith('ollama')) {
if (name && name.toLowerCase().startsWith(Providers.OLLAMA)) {
return await OllamaClient.fetchModels(baseURL);
}

Expand Down
2 changes: 1 addition & 1 deletion api/strategies/googleStrategy.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const getProfileDetails = ({ profile }) => ({
id: profile.id,
avatarUrl: profile.photos[0].value,
username: profile.name.givenName,
name: `${profile.name.givenName} ${profile.name.familyName}`,
name: `${profile.name.givenName}${profile.name.familyName ? ` ${profile.name.familyName}` : ''}`,
emailVerified: profile.emails[0].verified,
});

Expand Down
Loading

0 comments on commit 350e72d

Please sign in to comment.