From b93e1be504bba044a91b7d7f2692b16ae52d81a0 Mon Sep 17 00:00:00 2001 From: Shaw Date: Tue, 28 Jan 2025 12:14:07 -0500 Subject: [PATCH] this is maybe the wrong way to do this, committing --- agent/src/cache.ts | 8 +- agent/src/characters.ts | 34 +- agent/src/clients.ts | 8 +- agent/src/database.ts | 8 +- .../runtime => agent}/src/defaultCharacter.ts | 3 +- agent/src/index.ts | 28 +- agent/src/plugins.ts | 59 +- agent/src/utils.ts | 8 +- docs/api/index.md | 2 +- docs/api/typedoc-sidebar.cjs | 4 +- docs/api/variables/elizaLogger.md | 6 +- .../development/coders/chat_2024-11-09.md | 4 +- .../development/coders/chat_2024-11-23.md | 2 +- docs/docs/api/globals.md | 2 +- docs/docs/api/typedoc-sidebar.cjs | 4 +- docs/docs/api/variables/elizaLogger.md | 4 +- docs/docs/packages/clients.md | 2 +- .../plugin/src/actions/sampleAction.ts | 4 +- .../plugin/src/evaluators/sampleEvalutor.ts | 8 +- .../plugin/src/providers/sampleProvider.ts | 4 +- .../plugin/src/services/sampleService.ts | 18 +- .../__tests__/sqlite-adapter.test.ts | 6 +- packages/adapter-sqlite/src/index.ts | 36 +- packages/adapter-sqlite/src/sqlite_vec.ts | 6 +- packages/client-direct/src/api.ts | 22 +- packages/client-direct/src/index.ts | 38 +- .../client-direct/src/verifiable-log-api.ts | 8 +- packages/client-telegram/src/index.ts | 6 +- .../client-telegram/src/messageManager.ts | 40 +- .../client-telegram/src/telegramClient.ts | 46 +- packages/client-twitter/src/base.ts | 52 +- packages/client-twitter/src/index.ts | 16 +- packages/client-twitter/src/interactions.ts | 58 +- .../src/plugins/SttTtsSpacesPlugin.ts | 52 +- packages/client-twitter/src/post.ts | 150 ++--- packages/client-twitter/src/search.ts | 26 +- packages/client-twitter/src/spaces.ts | 66 +- packages/client-twitter/src/utils.ts | 26 +- packages/core/src/index.ts | 6 +- packages/core/src/logger.ts | 282 -------- packages/core/src/prompts/index.ts | 2 +- packages/core/tsconfig.json | 2 +- packages/plugin-anthropic/src/index.ts | 180 +++-- .../src/localembeddingManager.ts | 18 +- .../__tests__/defaultCharacters.test.ts | 52 -- packages/runtime/__tests__/goals.test.ts | 2 - packages/{core => runtime}/src/character.ts | 4 +- packages/{core => runtime}/src/client.ts | 8 +- packages/runtime/src/database.ts | 4 +- packages/runtime/src/embedding.ts | 10 +- packages/runtime/src/environment.ts | 4 +- packages/{core => runtime}/src/formatters.ts | 16 +- packages/runtime/src/generation.ts | 123 ++-- packages/runtime/src/index.ts | 2 - packages/runtime/src/knowledge.ts | 14 +- packages/runtime/src/logger.ts | 4 +- packages/runtime/src/memory.ts | 8 +- packages/runtime/src/ragknowledge.ts | 624 ------------------ packages/runtime/src/runtime.ts | 515 ++------------- packages/runtime/src/settings.ts | 10 +- packages/runtime/src/types.ts | 61 -- packages/runtime/src/utils.ts | 2 +- packages/runtime/tsconfig.json | 2 +- 63 files changed, 644 insertions(+), 2185 deletions(-) rename {packages/runtime => agent}/src/defaultCharacter.ts (99%) delete mode 100644 packages/core/src/logger.ts delete mode 100644 packages/runtime/__tests__/defaultCharacters.test.ts rename packages/{core => runtime}/src/character.ts (98%) rename packages/{core => runtime}/src/client.ts (97%) rename packages/{core => runtime}/src/formatters.ts (92%) delete mode 100644 packages/runtime/src/ragknowledge.ts diff --git a/agent/src/cache.ts b/agent/src/cache.ts index faba1980dee..e52593c4715 100644 --- a/agent/src/cache.ts +++ b/agent/src/cache.ts @@ -9,7 +9,7 @@ import { CacheStore, type Character, DbCacheAdapter, - elizaLogger, + logger, FsCacheAdapter, type IDatabaseCacheAdapter } from "@elizaos/core" @@ -37,7 +37,7 @@ export function initializeCache(cacheStore: string, character: Character, baseDi switch (cacheStore) { case CacheStore.REDIS: if (process.env.REDIS_URL) { - elizaLogger.info("Connecting to Redis...") + logger.info("Connecting to Redis...") const redisClient = new RedisClient(process.env.REDIS_URL) if (!character?.id) { throw new Error("CacheStore.REDIS requires id to be set in character definition") @@ -51,14 +51,14 @@ export function initializeCache(cacheStore: string, character: Character, baseDi case CacheStore.DATABASE: if (db) { - elizaLogger.info("Using Database Cache...") + logger.info("Using Database Cache...") return initializeDbCache(character, db) } else { throw new Error("Database adapter is not provided for CacheStore.Database.") } case CacheStore.FILESYSTEM: - elizaLogger.info("Using File System Cache...") + logger.info("Using File System Cache...") if (!baseDir) { throw new Error("baseDir must be provided for CacheStore.FILESYSTEM.") } diff --git a/agent/src/characters.ts b/agent/src/characters.ts index 9626f0480cd..08203221acc 100644 --- a/agent/src/characters.ts +++ b/agent/src/characters.ts @@ -6,7 +6,7 @@ import { type Character, defaultCharacter, - elizaLogger, + logger, validateCharacterConfig } from "@elizaos/core" import { onchainJson } from "@elizaos/plugin-iq6900" @@ -38,7 +38,7 @@ export function parseArguments(): { }) .parseSync() } catch (error) { - elizaLogger.error("Error parsing arguments:", error) + logger.error("Error parsing arguments:", error) return {} } } @@ -102,7 +102,7 @@ export async function loadCharacterFromOnchain(): Promise { // Handle plugins if (isAllStrings(character.plugins)) { - elizaLogger.info("Plugins are: ", character.plugins) + logger.info("Plugins are: ", character.plugins) const importedPlugins = await Promise.all( character.plugins.map(async (plugin) => { const importedPlugin = await import(plugin) @@ -113,10 +113,10 @@ export async function loadCharacterFromOnchain(): Promise { } loadedCharacters.push(character) - elizaLogger.info(`Successfully loaded character from: ${process.env.IQ_WALLET_ADDRESS}`) + logger.info(`Successfully loaded character from: ${process.env.IQ_WALLET_ADDRESS}`) return loadedCharacters } catch (e) { - elizaLogger.error(`Error parsing character from ${process.env.IQ_WALLET_ADDRESS}: ${e}`) + logger.error(`Error parsing character from ${process.env.IQ_WALLET_ADDRESS}: ${e}`) process.exit(1) } } @@ -135,7 +135,7 @@ async function loadCharactersFromUrl(url: string): Promise { } return characters } catch (e) { - elizaLogger.error(`Error loading character(s) from ${url}: ${e}`) + logger.error(`Error loading character(s) from ${url}: ${e}`) process.exit(1) } } @@ -162,11 +162,11 @@ export async function jsonToCharacter(filePath: string, character: any): Promise // Handle plugins character.plugins = await handlePluginImporting(character.plugins) if (character.extends) { - elizaLogger.info(`Merging ${character.name} character with parent characters`) + logger.info(`Merging ${character.name} character with parent characters`) for (const extendPath of character.extends) { const baseCharacter = await loadCharacter(path.resolve(path.dirname(filePath), extendPath)) character = mergeCharacters(baseCharacter, character) - elizaLogger.info(`Merged ${character.name} with ${baseCharacter.name}`) + logger.info(`Merged ${character.name} with ${baseCharacter.name}`) } } return character @@ -196,7 +196,7 @@ export async function loadCharacterTryPath(characterPath: string): Promise ({ path: p, @@ -213,17 +213,17 @@ export async function loadCharacterTryPath(characterPath: string): Promise elizaLogger.error(` - ${p}`)) + logger.error(`Error loading character from ${characterPath}: File not found in any of the expected locations`) + logger.error("Tried the following paths:") + pathsToTry.forEach((p) => logger.error(` - ${p}`)) throw new Error(`Error loading character from ${characterPath}: File not found in any of the expected locations`) } try { const character: Character = await loadCharacter(resolvedPath) - elizaLogger.info(`Successfully loaded character from: ${resolvedPath}`) + logger.info(`Successfully loaded character from: ${resolvedPath}`) return character } catch (e) { - elizaLogger.error(`Error parsing character from ${resolvedPath}: ${e}`) + logger.error(`Error parsing character from ${resolvedPath}: ${e}`) throw new Error(`Error parsing character from ${resolvedPath}: ${e}`) } } @@ -241,7 +241,7 @@ async function readCharactersFromStorage(characterPaths: string[]): Promise = {} const clientTypes: string[] = character.clients?.map((str) => str.toLowerCase()) || [] - elizaLogger.log("initializeClients", clientTypes, "for", character.name) + logger.log("initializeClients", clientTypes, "for", character.name) // Start Auto Client if "auto" detected as a configured client @@ -90,7 +90,7 @@ export async function initializeClients(character: Character, runtime: IAgentRun if (simsaiClient) clients.simsai = simsaiClient } - elizaLogger.log("client keys", Object.keys(clients)) + logger.log("client keys", Object.keys(clients)) // TODO: Add Slack client to the list // Initialize clients as an object @@ -122,7 +122,7 @@ export async function initializeClients(character: Character, runtime: IAgentRun for (const client of plugin.clients) { const startedClient = await client.start(runtime) const clientType = determineClientType(client) - elizaLogger.debug(`Initializing client of type: ${clientType}`) + logger.debug(`Initializing client of type: ${clientType}`) clients[clientType] = startedClient } } diff --git a/agent/src/database.ts b/agent/src/database.ts index 875d9d8b6c8..388cd25f888 100644 --- a/agent/src/database.ts +++ b/agent/src/database.ts @@ -4,7 +4,7 @@ import { SqliteDatabaseAdapter } from "@elizaos/adapter-sqlite" import { - elizaLogger + logger } from "@elizaos/runtime" // import { intifacePlugin } from "@elizaos/plugin-intiface"; @@ -17,16 +17,16 @@ const __dirname = path.dirname(__filename) // get the name of the directory export function initializeDatabase(dataDir: string) { const filePath = process.env.SQLITE_FILE ?? path.resolve(dataDir, "db.sqlite") - elizaLogger.info(`Initializing SQLite database at ${filePath}...`) + logger.info(`Initializing SQLite database at ${filePath}...`) const db = new SqliteDatabaseAdapter(new Database(filePath)) // Test the connection db.init() .then(() => { - elizaLogger.success("Successfully connected to SQLite database") + logger.success("Successfully connected to SQLite database") }) .catch((error) => { - elizaLogger.error("Failed to connect to SQLite:", error) + logger.error("Failed to connect to SQLite:", error) }) return db diff --git a/packages/runtime/src/defaultCharacter.ts b/agent/src/defaultCharacter.ts similarity index 99% rename from packages/runtime/src/defaultCharacter.ts rename to agent/src/defaultCharacter.ts index 8faaa64f2b4..2d72dd4e4f6 100644 --- a/packages/runtime/src/defaultCharacter.ts +++ b/agent/src/defaultCharacter.ts @@ -1,11 +1,10 @@ -import { Character, ModelProviderName } from "./types.ts"; +import { Character, ModelProviderName } from "../../packages/runtime/src/types.ts"; export const defaultCharacter: Character = { name: "Eliza", username: "eliza", plugins: [], clients: [], - modelProvider: ModelProviderName.LLAMALOCAL, settings: { secrets: {}, voice: { diff --git a/agent/src/index.ts b/agent/src/index.ts index 2598f5978b5..5754504ba81 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -3,7 +3,7 @@ import { AgentRuntime, CacheStore, type Character, - elizaLogger, + logger, type ICacheManager, type IDatabaseAdapter, type IDatabaseCacheAdapter, @@ -15,7 +15,7 @@ import { AgentRuntime, CacheStore, type Character, - elizaLogger, + logger, type ICacheManager, type IDatabaseAdapter, type IDatabaseCacheAdapter, @@ -33,11 +33,12 @@ import { initializeClients } from "./clients" import { initializeDatabase } from "./database" import { handlePluginImporting, importPlugins } from "./plugins" import { getTokenForProvider, logFetch } from "./utils" +import { defaultCharacter } from "./defaultCharacter" const __filename = fileURLToPath(import.meta.url) // get the resolved path to the file const __dirname = path.dirname(__filename) // get the name of the directory export async function createAgent(character: Character, db: IDatabaseAdapter, cache: ICacheManager): Promise { - const { plugins, verifiableInferenceAdapter } = await importPlugins(character, token); + const plugins = await importPlugins(character); return new AgentRuntime({ databaseAdapter: db, modelProvider: character.modelProvider, @@ -92,15 +93,15 @@ async function startAgent(character: Character, directClient: DirectClient): Pro directClient.registerAgent(runtime); // report to console - elizaLogger.debug(`Started ${character.name} as ${runtime.agentId}`); + logger.debug(`Started ${character.name} as ${runtime.agentId}`); return runtime; } catch (error) { - elizaLogger.error( + logger.error( `Error starting agent for character ${character.name}:`, error ); - elizaLogger.error(error); + logger.error(error); // if (db) { // await db.close(); // } @@ -132,17 +133,22 @@ const startAgents = async () => { let serverPort = Number.parseInt(settings.SERVER_PORT || "3000") const characters = await loadAllCharacters() + if (characters.length === 0) { + // import default character + characters.push(defaultCharacter); + } + try { for (const character of characters) { await startAgent(character, directClient); } } catch (error) { - elizaLogger.error("Error starting agents:", error); + logger.error("Error starting agents:", error); } // Find available port while (!(await checkPortAvailable(serverPort))) { - elizaLogger.warn( + logger.warn( `Port ${serverPort} is in use, trying ${serverPort + 1}` ); serverPort++; @@ -163,16 +169,16 @@ const startAgents = async () => { directClient.start(serverPort); if (serverPort !== Number.parseInt(settings.SERVER_PORT || "3000")) { - elizaLogger.log(`Server started on alternate port ${serverPort}`); + logger.log(`Server started on alternate port ${serverPort}`); } - elizaLogger.log( + logger.log( "Run `pnpm start:client` to start the client and visit the outputted URL (http://localhost:5173) to chat with your agents. When running multiple agents, use client with different port `SERVER_PORT=3001 pnpm start:client`" ); }; startAgents().catch((error) => { - elizaLogger.error("Unhandled error in startAgents:", error); + logger.error("Unhandled error in startAgents:", error); process.exit(1); }); diff --git a/agent/src/plugins.ts b/agent/src/plugins.ts index 610abf8a756..4660057646e 100644 --- a/agent/src/plugins.ts +++ b/agent/src/plugins.ts @@ -4,7 +4,7 @@ import { type Character, - elizaLogger + logger } from "@elizaos/runtime"; // import { zgPlugin } from "@elizaos/plugin-0g"; // import { agentKitPlugin } from "@elizaos/plugin-agentkit"; @@ -109,16 +109,16 @@ import { let nodePlugin: any | undefined -export const importPlugins = async (character: Character, token: string) => { +export const importPlugins = async (character: Character) => { // nodePlugin ??= createNodePlugin(); - // elizaLogger.log(`Creating runtime for character ${character.name}`) + // logger.log(`Creating runtime for character ${character.name}`) // const teeMode = getSecret(character, "TEE_MODE") || "OFF" // const walletSecretSalt = getSecret(character, "WALLET_SECRET_SALT") // // Validate TEE configuration // if (teeMode !== TEEMode.OFF && !walletSecretSalt) { - // elizaLogger.error("A WALLET_SECRET_SALT required when TEE_MODE is enabled") + // logger.error("A WALLET_SECRET_SALT required when TEE_MODE is enabled") // throw new Error("Invalid TEE configuration") // } @@ -134,51 +134,7 @@ export const importPlugins = async (character: Character, token: string) => { // getSecret(character, secret) // ); // } - - // Initialize Reclaim adapter if environment variables are present - // let verifiableInferenceAdapter; - // if ( - // process.env.RECLAIM_APP_ID && - // process.env.RECLAIM_APP_SECRET && - // process.env.VERIFIABLE_INFERENCE_ENABLED === "true" - // ) { - // verifiableInferenceAdapter = new ReclaimAdapter({ - // appId: process.env.RECLAIM_APP_ID, - // appSecret: process.env.RECLAIM_APP_SECRET, - // modelProvider: character.modelProvider, - // token, - // }); - // elizaLogger.log("Verifiable inference adapter initialized"); - // } - // Initialize Opacity adapter if environment variables are present - // let verifiableInferenceAdapter - // if (process.env.OPACITY_TEAM_ID && process.env.OPACITY_CLOUDFLARE_NAME && process.env.OPACITY_PROVER_URL && process.env.VERIFIABLE_INFERENCE_ENABLED === "true") { - // verifiableInferenceAdapter = new OpacityAdapter({ - // teamId: process.env.OPACITY_TEAM_ID, - // teamName: process.env.OPACITY_CLOUDFLARE_NAME, - // opacityProverUrl: process.env.OPACITY_PROVER_URL, - // modelProvider: character.modelProvider, - // token: token, - // }) - // elizaLogger.log("Verifiable inference adapter initialized") - // elizaLogger.log("teamId", process.env.OPACITY_TEAM_ID) - // elizaLogger.log("teamName", process.env.OPACITY_CLOUDFLARE_NAME) - // elizaLogger.log("opacityProverUrl", process.env.OPACITY_PROVER_URL) - // elizaLogger.log("modelProvider", character.modelProvider) - // elizaLogger.log("token", token) - // } - // if (process.env.PRIMUS_APP_ID && process.env.PRIMUS_APP_SECRET && process.env.VERIFIABLE_INFERENCE_ENABLED === "true") { - // verifiableInferenceAdapter = new PrimusAdapter({ - // appId: process.env.PRIMUS_APP_ID, - // appSecret: process.env.PRIMUS_APP_SECRET, - // attMode: "proxytls", - // modelProvider: character.modelProvider, - // token, - // }) - // elizaLogger.log("Verifiable inference primus adapter initialized") - // } - return { - plugins: [ + return [ // parseBooleanFromText(getSecret(character, "BITMIND")) && // getSecret(character, "BITMIND_API_TOKEN") // ? bittensorPlugin @@ -430,12 +386,11 @@ export const importPlugins = async (character: Character, token: string) => { // getSecret(character, "GELATO_RELAY_API_KEY") ? gelatoPlugin : null, // getSecret(character, "TRIKON_WALLET_ADDRESS") ? trikonPlugin : null, ].flat().filter(Boolean) -} }; export async function handlePluginImporting(plugins: string[]) { if (plugins.length > 0) { - elizaLogger.info("Plugins are: ", plugins) + logger.info("Plugins are: ", plugins) const importedPlugins = await Promise.all( plugins.map(async (plugin) => { try { @@ -443,7 +398,7 @@ export async function handlePluginImporting(plugins: string[]) { const functionName = plugin.replace("@elizaos/plugin-", "").replace(/-./g, (x) => x[1].toUpperCase()) + "Plugin" // Assumes plugin function is camelCased with Plugin suffix return importedPlugin.default || importedPlugin[functionName] } catch (importError) { - elizaLogger.error(`Failed to import plugin: ${plugin}`, importError) + logger.error(`Failed to import plugin: ${plugin}`, importError) return [] // Return null for failed imports } }) diff --git a/agent/src/utils.ts b/agent/src/utils.ts index 08f2dc44e38..7e735cf8f77 100644 --- a/agent/src/utils.ts +++ b/agent/src/utils.ts @@ -2,7 +2,7 @@ // We'll be removing ModelProviderName and all hardcoded model provisioning, in favor of plugins with runtime function calls // settings should also probably go away -import { Character, elizaLogger, ModelProviderName, settings } from "@elizaos/core"; +import { Character, logger, ModelProviderName, settings } from "@elizaos/core"; export function getSecret(character: Character, secret: string) { return character.settings?.secrets?.[secret] || process.env[secret] } @@ -13,9 +13,9 @@ export const wait = (minTime = 1000, maxTime = 3000) => { } export const logFetch = async (url: string, options: any) => { - elizaLogger.debug(`Fetching ${url}`) + logger.debug(`Fetching ${url}`) // Disabled to avoid disclosure of sensitive information such as API keys - // elizaLogger.debug(JSON.stringify(options, null, 2)); + // logger.debug(JSON.stringify(options, null, 2)); return fetch(url, options) } @@ -87,7 +87,7 @@ export function getTokenForProvider(provider: ModelProviderName, character: Char return character.settings?.secrets?.LIVEPEER_GATEWAY_URL || settings.LIVEPEER_GATEWAY_URL default: const errorMessage = `Failed to get token - unsupported model provider: ${provider}` - elizaLogger.error(errorMessage) + logger.error(errorMessage) throw new Error(errorMessage) } } \ No newline at end of file diff --git a/docs/api/index.md b/docs/api/index.md index 55807d52823..0a27a39191e 100644 --- a/docs/api/index.md +++ b/docs/api/index.md @@ -93,7 +93,7 @@ - [CharacterSchema](variables/CharacterSchema.md) - [evaluationTemplate](variables/evaluationTemplate.md) - [knowledge](variables/knowledge.md) -- [elizaLogger](variables/elizaLogger.md) +- [logger](variables/logger.md) - [models](variables/models.md) - [messageCompletionFooter](variables/messageCompletionFooter.md) - [shouldRespondFooter](variables/shouldRespondFooter.md) diff --git a/docs/api/typedoc-sidebar.cjs b/docs/api/typedoc-sidebar.cjs index 02a6df082f2..4c50625781b 100644 --- a/docs/api/typedoc-sidebar.cjs +++ b/docs/api/typedoc-sidebar.cjs @@ -339,8 +339,8 @@ const typedocSidebar = { { type: "doc", id: "variables/knowledge", label: "knowledge" }, { type: "doc", - id: "variables/elizaLogger", - label: "elizaLogger", + id: "variables/logger", + label: "logger", }, { type: "doc", id: "variables/models", label: "models" }, { diff --git a/docs/api/variables/elizaLogger.md b/docs/api/variables/elizaLogger.md index fcab97553f7..d04af6ded4d 100644 --- a/docs/api/variables/elizaLogger.md +++ b/docs/api/variables/elizaLogger.md @@ -1,8 +1,8 @@ -[@elizaos/runtime v0.1.7](../index.md) / elizaLogger +[@elizaos/runtime v0.1.7](../index.md) / logger -# Variable: elizaLogger +# Variable: logger -> `const` **elizaLogger**: `ElizaLogger` +> `const` **logger**: `ElizaLogger` ## Defined in diff --git a/docs/community/Discord/development/coders/chat_2024-11-09.md b/docs/community/Discord/development/coders/chat_2024-11-09.md index 2e9169675ad..e69aa868aeb 100644 --- a/docs/community/Discord/development/coders/chat_2024-11-09.md +++ b/docs/community/Discord/development/coders/chat_2024-11-09.md @@ -13,12 +13,12 @@ In the discussion, participants explored various technical solutions for tunneli - What are some tips or configurations needed when setting up a TG bot key that doesn't answer back with anything? - Ophiuchus: You might want to check your Botfather settings and ensure proper configuration for response behavior, such as using `thenShouldRespond()` method in the Telegram client. - How can one configure verbose logging for Eliza logger? - - v1xingyue: Set `elizaLogger.verbose` to true or false depending on your preference. The default value is false. You can also set it up using process.env variables and pass them as flags if needed. + - v1xingyue: Set `logger.verbose` to true or false depending on your preference. The default value is false. You can also set it up using process.env variables and pass them as flags if needed. ## Who Helped Who - Ophiuchus helped nirai kanai with installing the official Telegram client by providing the command `pnpm i` for installation and suggesting to include optional dependencies like sharp. This resolved issues related to missing dependencies during installation. -- V1xingyue helped shaw with configuring elizaLogger settings by showing how to set verbose mode in TypeScript code, which allows users to control logging verbosity. +- V1xingyue helped shaw with configuring logger settings by showing how to set verbose mode in TypeScript code, which allows users to control logging verbosity. - Trophy offered assistance to the group by proposing to send a friend request and share resources for further collaboration on their project. ## Action Items diff --git a/docs/community/Discord/development/coders/chat_2024-11-23.md b/docs/community/Discord/development/coders/chat_2024-11-23.md index 7bc02f4b72f..7e8850394e2 100644 --- a/docs/community/Discord/development/coders/chat_2024-11-23.md +++ b/docs/community/Discord/development/coders/chat_2024-11-23.md @@ -23,7 +23,7 @@ In the discussion, jmill advised Lambrino on setting up `WALLET_PUBLIC_KEY` in t ## Who Helped Who - jmill helped Lambrino with setting up solana plugin in agent script by suggesting to set `WALLET_PUBLIC_KEY` in env and providing a code snippet for including plugins. The issue was related to formatting within the "plugins" array, which was resolved successfully. -- AzFlin helped an unnamed user with increasing logging mode to debug by pointing out where they can see debug logs in the code (e.g., `elizaLogger.debug("generate post prompt:\n" + context);`). The context of the problem is not fully clear, but it seems related to debugging agent behavior. +- AzFlin helped an unnamed user with increasing logging mode to debug by pointing out where they can see debug logs in the code (e.g., `logger.debug("generate post prompt:\n" + context);`). The context of the problem is not fully clear, but it seems related to debugging agent behavior. - moonboi 🌑 helped Lambrino by suggesting that the issue with the agent getting caught up on previous stuff might not be a global issue, implying that the problem could lie elsewhere in their setup or usage of the agent. ## Action Items diff --git a/docs/docs/api/globals.md b/docs/docs/api/globals.md index a545622de49..833ca0d6daa 100644 --- a/docs/docs/api/globals.md +++ b/docs/docs/api/globals.md @@ -61,7 +61,7 @@ ## Variables - [defaultCharacter](variables/defaultCharacter.md) -- [elizaLogger](variables/elizaLogger.md) +- [logger](variables/logger.md) - [embeddingDimension](variables/embeddingDimension.md) - [embeddingZeroVector](variables/embeddingZeroVector.md) - [evaluationTemplate](variables/evaluationTemplate.md) diff --git a/docs/docs/api/typedoc-sidebar.cjs b/docs/docs/api/typedoc-sidebar.cjs index 90495796fb3..a1e6bc1dcbc 100644 --- a/docs/docs/api/typedoc-sidebar.cjs +++ b/docs/docs/api/typedoc-sidebar.cjs @@ -207,8 +207,8 @@ const typedocSidebar = { }, { type: "doc", - id: "api/variables/elizaLogger", - label: "elizaLogger", + id: "api/variables/logger", + label: "logger", }, { type: "doc", diff --git a/docs/docs/api/variables/elizaLogger.md b/docs/docs/api/variables/elizaLogger.md index 4db17febc12..167fb0e6cf9 100644 --- a/docs/docs/api/variables/elizaLogger.md +++ b/docs/docs/api/variables/elizaLogger.md @@ -1,6 +1,6 @@ -# Variable: elizaLogger +# Variable: logger -> `const` **elizaLogger**: `ElizaLogger` +> `const` **logger**: `ElizaLogger` ## Defined in diff --git a/docs/docs/packages/clients.md b/docs/docs/packages/clients.md index 869999de5d3..5db0fa6258e 100644 --- a/docs/docs/packages/clients.md +++ b/docs/docs/packages/clients.md @@ -334,7 +334,7 @@ export const DevaClientInterface: Client = { await deva.start(); - elizaLogger.success( + logger.success( `✅ Deva client successfully started for character ${runtime.character.name}`, ); diff --git a/packages/_examples/plugin/src/actions/sampleAction.ts b/packages/_examples/plugin/src/actions/sampleAction.ts index 8a03146cb0d..b16efbed26f 100644 --- a/packages/_examples/plugin/src/actions/sampleAction.ts +++ b/packages/_examples/plugin/src/actions/sampleAction.ts @@ -7,7 +7,7 @@ import { composeContext, generateObject, ModelClass, - elizaLogger, + logger, } from "@elizaos/runtime"; import { CreateResourceSchema, isCreateResourceContent } from "../types"; @@ -67,7 +67,7 @@ Resource has been stored in memory.`, [] ); } catch (error) { - elizaLogger.error("Error creating resource:", error); + logger.error("Error creating resource:", error); callback( { text: "Failed to create resource. Please check the logs." }, [] diff --git a/packages/_examples/plugin/src/evaluators/sampleEvalutor.ts b/packages/_examples/plugin/src/evaluators/sampleEvalutor.ts index 1dfadac1f88..47ec772f8a0 100644 --- a/packages/_examples/plugin/src/evaluators/sampleEvalutor.ts +++ b/packages/_examples/plugin/src/evaluators/sampleEvalutor.ts @@ -3,7 +3,7 @@ import { type IAgentRuntime, type Memory, type State, - elizaLogger, + logger, } from "@elizaos/runtime"; export const sampleEvaluator: Evaluator = { @@ -28,17 +28,17 @@ export const sampleEvaluator: Evaluator = { ], handler: async (runtime: IAgentRuntime, memory: Memory, state: State) => { // Evaluation logic for the evaluator - elizaLogger.log("Evaluating data in sampleEvaluator..."); + logger.log("Evaluating data in sampleEvaluator..."); // Example evaluation logic if (memory.content?.includes("important")) { - elizaLogger.log("Important content found in memory."); + logger.log("Important content found in memory."); return { score: 1, reason: "Memory contains important content.", }; } - elizaLogger.log("No important content found in memory."); + logger.log("No important content found in memory."); return { score: 0, reason: "Memory does not contain important content.", diff --git a/packages/_examples/plugin/src/providers/sampleProvider.ts b/packages/_examples/plugin/src/providers/sampleProvider.ts index 0df6f03ece8..f3d94d02fdc 100644 --- a/packages/_examples/plugin/src/providers/sampleProvider.ts +++ b/packages/_examples/plugin/src/providers/sampleProvider.ts @@ -3,13 +3,13 @@ import { type IAgentRuntime, type Memory, type State, - elizaLogger, + logger, } from "@elizaos/runtime"; export const sampleProvider: Provider = { // biome-ignore lint: 'runtime' is intentionally unused get: async (runtime: IAgentRuntime, message: Memory, state: State) => { // Data retrieval logic for the provider - elizaLogger.log("Retrieving data in sampleProvider..."); + logger.log("Retrieving data in sampleProvider..."); }, }; diff --git a/packages/_examples/plugin/src/services/sampleService.ts b/packages/_examples/plugin/src/services/sampleService.ts index a7706748262..f5b771f83c5 100644 --- a/packages/_examples/plugin/src/services/sampleService.ts +++ b/packages/_examples/plugin/src/services/sampleService.ts @@ -4,7 +4,7 @@ import { type IAgentRuntime, // Memory, // State, - elizaLogger, + logger, // stringToUuid, } from "@elizaos/runtime"; // import { sampleProvider } from "../providers/sampleProvider"; // TODO: Uncomment this line to use the sampleProvider @@ -39,7 +39,7 @@ export class SampleService extends Service { // Start the periodic task this.startPeriodicTask(); SampleService.isInitialized = true; - elizaLogger.info("SampleService initialized and started periodic task"); + logger.info("SampleService initialized and started periodic task"); } private static activeTaskCount = 0; @@ -47,7 +47,7 @@ export class SampleService extends Service { private startPeriodicTask(): void { // Verify if a task is already active if (SampleService.activeTaskCount > 0) { - elizaLogger.warn( + logger.warn( "SampleService: Periodic task already running, skipping" ); return; @@ -59,7 +59,7 @@ export class SampleService extends Service { } SampleService.activeTaskCount++; - elizaLogger.info( + logger.info( `SampleService: Starting periodic task (active tasks: ${SampleService.activeTaskCount})` ); @@ -74,7 +74,7 @@ export class SampleService extends Service { private async fetchSample(): Promise { if (!this.runtime) { - elizaLogger.error("SampleService: Runtime not initialized"); + logger.error("SampleService: Runtime not initialized"); return; } @@ -104,13 +104,13 @@ export class SampleService extends Service { // await sampleProvider.get(this.runtime, dummyMemory, dummyState); // hello world log example - elizaLogger.info("SampleService: Hello world"); + logger.info("SampleService: Hello world"); - elizaLogger.info( + logger.info( "SampleService: Successfully fetched and processed sample" ); } catch (error) { - elizaLogger.error("SampleService: Error fetching sample:", error); + logger.error("SampleService: Error fetching sample:", error); } } @@ -120,7 +120,7 @@ export class SampleService extends Service { clearInterval(this.intervalId); this.intervalId = null; SampleService.activeTaskCount--; - elizaLogger.info( + logger.info( `SampleService stopped (active tasks: ${SampleService.activeTaskCount})` ); } diff --git a/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts b/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts index 56d4eedbf08..f7663f27932 100644 --- a/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts +++ b/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts @@ -1,15 +1,15 @@ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { SqliteDatabaseAdapter } from '../src'; -import { type UUID, elizaLogger } from '@elizaos/runtime'; +import { type UUID, logger } from '@elizaos/runtime'; import type Database from 'better-sqlite3'; import { load } from '../src/sqlite_vec'; -// Mock the elizaLogger +// Mock the logger vi.mock('@elizaos/runtime', async () => { const actual = await vi.importActual('@elizaos/runtime'); return { ...actual as any, - elizaLogger: { + logger: { error: vi.fn() } }; diff --git a/packages/adapter-sqlite/src/index.ts b/packages/adapter-sqlite/src/index.ts index 11ebaa141ad..d2c8f0df3b3 100644 --- a/packages/adapter-sqlite/src/index.ts +++ b/packages/adapter-sqlite/src/index.ts @@ -3,7 +3,7 @@ export * from "./sqlite_vec.ts"; import { DatabaseAdapter, - elizaLogger, + logger, type IDatabaseCacheAdapter, } from "@elizaos/runtime"; import type { @@ -904,7 +904,7 @@ export class SqliteDatabaseAdapter return results; } catch (error) { - elizaLogger.error("Error in searchKnowledge:", error); + logger.error("Error in searchKnowledge:", error); throw error; } } @@ -944,7 +944,7 @@ export class SqliteDatabaseAdapter error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY"; if (isShared && isPrimaryKeyError) { - elizaLogger.info( + logger.info( `Shared knowledge ${knowledge.id} already exists, skipping` ); return; @@ -952,7 +952,7 @@ export class SqliteDatabaseAdapter !isShared && !error.message?.includes("SQLITE_CONSTRAINT_PRIMARYKEY") ) { - elizaLogger.error(`Error creating knowledge ${knowledge.id}:`, { + logger.error(`Error creating knowledge ${knowledge.id}:`, { error, embeddingLength: knowledge.embedding?.length, content: knowledge.content, @@ -960,7 +960,7 @@ export class SqliteDatabaseAdapter throw error; } - elizaLogger.debug( + logger.debug( `Knowledge ${knowledge.id} already exists, skipping` ); } @@ -977,12 +977,12 @@ export class SqliteDatabaseAdapter if (id.includes("*")) { const pattern = id.replace("*", "%"); const sql = "DELETE FROM knowledge WHERE id LIKE ?"; - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Executing SQL: ${sql} with pattern: ${pattern}` ); const stmt = this.db.prepare(sql); const result = stmt.run(pattern); - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Pattern deletion affected ${result.changes} rows` ); return result.changes; // Return changes for logging @@ -991,7 +991,7 @@ export class SqliteDatabaseAdapter const selectSql = "SELECT id FROM knowledge WHERE id = ?"; const chunkSql = "SELECT id FROM knowledge WHERE json_extract(content, '$.metadata.originalId') = ?"; - elizaLogger.debug(`[Knowledge Remove] Checking existence with: + logger.debug(`[Knowledge Remove] Checking existence with: Main: ${selectSql} [${id}] Chunks: ${chunkSql} [${id}]`); @@ -1002,7 +1002,7 @@ export class SqliteDatabaseAdapter .prepare(chunkSql) .all(id) as ChunkRow[]; - elizaLogger.debug(`[Knowledge Remove] Found:`, { + logger.debug(`[Knowledge Remove] Found:`, { mainEntryExists: !!mainEntry?.id, chunkCount: chunks.length, chunkIds: chunks.map((c) => c.id), @@ -1011,34 +1011,34 @@ export class SqliteDatabaseAdapter // Execute and log chunk deletion const chunkDeleteSql = "DELETE FROM knowledge WHERE json_extract(content, '$.metadata.originalId') = ?"; - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Executing chunk deletion: ${chunkDeleteSql} [${id}]` ); const chunkResult = this.db.prepare(chunkDeleteSql).run(id); - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Chunk deletion affected ${chunkResult.changes} rows` ); // Execute and log main entry deletion const mainDeleteSql = "DELETE FROM knowledge WHERE id = ?"; - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Executing main deletion: ${mainDeleteSql} [${id}]` ); const mainResult = this.db.prepare(mainDeleteSql).run(id); - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Main deletion affected ${mainResult.changes} rows` ); const totalChanges = chunkResult.changes + mainResult.changes; - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Total rows affected: ${totalChanges}` ); // Verify deletion const verifyMain = this.db.prepare(selectSql).get(id); const verifyChunks = this.db.prepare(chunkSql).all(id); - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Post-deletion check:`, { mainStillExists: !!verifyMain, @@ -1050,11 +1050,11 @@ export class SqliteDatabaseAdapter } })(); // Important: Call the transaction function - elizaLogger.debug( + logger.debug( `[Knowledge Remove] Transaction completed for id: ${id}` ); } catch (error) { - elizaLogger.error("[Knowledge Remove] Error:", { + logger.error("[Knowledge Remove] Error:", { id, error: error instanceof Error @@ -1076,7 +1076,7 @@ export class SqliteDatabaseAdapter try { this.db.prepare(sql).run(agentId); } catch (error) { - elizaLogger.error( + logger.error( `Error clearing knowledge for agent ${agentId}:`, error ); diff --git a/packages/adapter-sqlite/src/sqlite_vec.ts b/packages/adapter-sqlite/src/sqlite_vec.ts index 2da8bbfc022..be6879b0dcc 100644 --- a/packages/adapter-sqlite/src/sqlite_vec.ts +++ b/packages/adapter-sqlite/src/sqlite_vec.ts @@ -1,15 +1,15 @@ import * as sqliteVec from "sqlite-vec"; import type { Database } from "better-sqlite3"; -import { elizaLogger } from "@elizaos/runtime"; +import { logger } from "@elizaos/runtime"; // Loads the sqlite-vec extensions into the provided SQLite database export function loadVecExtensions(db: Database): void { try { // Load sqlite-vec extensions sqliteVec.load(db); - elizaLogger.log("sqlite-vec extensions loaded successfully."); + logger.log("sqlite-vec extensions loaded successfully."); } catch (error) { - elizaLogger.error("Failed to load sqlite-vec extensions:", error); + logger.error("Failed to load sqlite-vec extensions:", error); throw error; } } diff --git a/packages/client-direct/src/api.ts b/packages/client-direct/src/api.ts index 04e06d1a21a..fa8e7cee91b 100644 --- a/packages/client-direct/src/api.ts +++ b/packages/client-direct/src/api.ts @@ -6,7 +6,7 @@ import fs from "fs"; import { type AgentRuntime, - elizaLogger, + logger, getEnvVariable, type UUID, validateCharacterConfig, @@ -157,7 +157,7 @@ export function createApiRouter( try { validateCharacterConfig(character); } catch (e) { - elizaLogger.error(`Error parsing character: ${e}`); + logger.error(`Error parsing character: ${e}`); res.status(400).json({ success: false, message: e.message, @@ -168,9 +168,9 @@ export function createApiRouter( // start it up (and register it) try { agent = await directClient.startAgent(character); - elizaLogger.log(`${character.name} started`); + logger.log(`${character.name} started`); } catch (e) { - elizaLogger.error(`Error starting agent: ${e}`); + logger.error(`Error starting agent: ${e}`); res.status(500).json({ success: false, message: e.message, @@ -196,11 +196,11 @@ export function createApiRouter( 2 ) ); - elizaLogger.info( + logger.info( `Character stored successfully at ${filepath}` ); } catch (error) { - elizaLogger.error( + logger.error( `Failed to store character: ${error.message}` ); } @@ -329,7 +329,7 @@ export function createApiRouter( ); res.json({ agents: allAgents, attestation: attestation }); } catch (error) { - elizaLogger.error("Failed to get TEE agents:", error); + logger.error("Failed to get TEE agents:", error); res.status(500).json({ error: "Failed to get TEE agents", }); @@ -355,7 +355,7 @@ export function createApiRouter( ); res.json({ agent: teeAgent, attestation: attestation }); } catch (error) { - elizaLogger.error("Failed to get TEE agent:", error); + logger.error("Failed to get TEE agent:", error); res.status(500).json({ error: "Failed to get TEE agent", }); @@ -396,7 +396,7 @@ export function createApiRouter( attestation: attestation, }); } catch (error) { - elizaLogger.error("Failed to get TEE logs:", error); + logger.error("Failed to get TEE logs:", error); res.status(500).json({ error: "Failed to get TEE logs", }); @@ -422,14 +422,14 @@ export function createApiRouter( throw new Error("No character path or JSON provided"); } await directClient.startAgent(character); - elizaLogger.log(`${character.name} started`); + logger.log(`${character.name} started`); res.json({ id: character.id, character: character, }); } catch (e) { - elizaLogger.error(`Error parsing character: ${e}`); + logger.error(`Error parsing character: ${e}`); res.status(400).json({ error: e.message, }); diff --git a/packages/client-direct/src/index.ts b/packages/client-direct/src/index.ts index 4b18881a6d1..1030921ddf4 100644 --- a/packages/client-direct/src/index.ts +++ b/packages/client-direct/src/index.ts @@ -5,7 +5,7 @@ import multer from "multer"; import { z } from "zod"; import { type AgentRuntime, - elizaLogger, + logger, messageCompletionFooter, generateCaption, generateImage, @@ -117,7 +117,7 @@ export class DirectClient { public jsonToCharacter: Function; // Store jsonToCharacter functor constructor() { - elizaLogger.log("DirectClient constructor"); + logger.log("DirectClient constructor"); this.app = express(); this.app.use(cors()); this.agents = new Map(); @@ -517,7 +517,7 @@ export class DirectClient { try { hfOut = hyperfiOutSchema.parse(response.object); } catch { - elizaLogger.error( + logger.error( "cant serialize response", response.object ); @@ -661,13 +661,13 @@ export class DirectClient { assetId ); - elizaLogger.log("Download directory:", downloadDir); + logger.log("Download directory:", downloadDir); try { - elizaLogger.log("Creating directory..."); + logger.log("Creating directory..."); await fs.promises.mkdir(downloadDir, { recursive: true }); - elizaLogger.log("Fetching file..."); + logger.log("Fetching file..."); const fileResponse = await fetch( `https://api.bageldb.ai/api/v1/asset/${assetId}/download`, { @@ -683,7 +683,7 @@ export class DirectClient { ); } - elizaLogger.log("Response headers:", fileResponse.headers); + logger.log("Response headers:", fileResponse.headers); const fileName = fileResponse.headers @@ -691,19 +691,19 @@ export class DirectClient { ?.split("filename=")[1] ?.replace(/"/g, /* " */ "") || "default_name.txt"; - elizaLogger.log("Saving as:", fileName); + logger.log("Saving as:", fileName); const arrayBuffer = await fileResponse.arrayBuffer(); const buffer = Buffer.from(arrayBuffer); const filePath = path.join(downloadDir, fileName); - elizaLogger.log("Full file path:", filePath); + logger.log("Full file path:", filePath); await fs.promises.writeFile(filePath, buffer); // Verify file was written const stats = await fs.promises.stat(filePath); - elizaLogger.log( + logger.log( "File written successfully. Size:", stats.size, "bytes" @@ -718,7 +718,7 @@ export class DirectClient { fileSize: stats.size, }); } catch (error) { - elizaLogger.error("Detailed error:", error); + logger.error("Detailed error:", error); res.status(500).json({ error: "Failed to download files from BagelDB", details: error.message, @@ -892,7 +892,7 @@ export class DirectClient { res.send(Buffer.from(audioBuffer)); } catch (error) { - elizaLogger.error( + logger.error( "Error processing message or generating speech:", error ); @@ -965,7 +965,7 @@ export class DirectClient { res.send(Buffer.from(audioBuffer)); } catch (error) { - elizaLogger.error( + logger.error( "Error processing message or generating speech:", error ); @@ -990,22 +990,22 @@ export class DirectClient { public start(port: number) { this.server = this.app.listen(port, () => { - elizaLogger.success( + logger.success( `REST API bound to 0.0.0.0:${port}. If running locally, access it at http://localhost:${port}.` ); }); // Handle graceful shutdown const gracefulShutdown = () => { - elizaLogger.log("Received shutdown signal, closing server..."); + logger.log("Received shutdown signal, closing server..."); this.server.close(() => { - elizaLogger.success("Server closed successfully"); + logger.success("Server closed successfully"); process.exit(0); }); // Force close after 5 seconds if server hasn't closed setTimeout(() => { - elizaLogger.error( + logger.error( "Could not close connections in time, forcefully shutting down" ); process.exit(1); @@ -1020,7 +1020,7 @@ export class DirectClient { public stop() { if (this.server) { this.server.close(() => { - elizaLogger.success("Server stopped"); + logger.success("Server stopped"); }); } } @@ -1028,7 +1028,7 @@ export class DirectClient { export const DirectClientInterface: Client = { start: async (_runtime: IAgentRuntime) => { - elizaLogger.log("DirectClientInterface start"); + logger.log("DirectClientInterface start"); const client = new DirectClient(); const serverPort = Number.parseInt(settings.SERVER_PORT || "3000"); client.start(serverPort); diff --git a/packages/client-direct/src/verifiable-log-api.ts b/packages/client-direct/src/verifiable-log-api.ts index ca03dc710a5..5d31438234a 100644 --- a/packages/client-direct/src/verifiable-log-api.ts +++ b/packages/client-direct/src/verifiable-log-api.ts @@ -2,7 +2,7 @@ import express from "express"; import bodyParser from "body-parser"; import cors from "cors"; -import { type AgentRuntime, elizaLogger, ServiceType } from "@elizaos/runtime"; +import { type AgentRuntime, logger, ServiceType } from "@elizaos/runtime"; import type { VerifiableLogService, VerifiableLogQuery, @@ -34,7 +34,7 @@ export function createVerifiableLogApiRouter( data: pageQuery, }); } catch (error) { - elizaLogger.error("Detailed error:", error); + logger.error("Detailed error:", error); res.status(500).json({ error: "failed to get agents registered ", details: error.message, @@ -66,7 +66,7 @@ export function createVerifiableLogApiRouter( data: pageQuery, }); } catch (error) { - elizaLogger.error("Detailed error:", error); + logger.error("Detailed error:", error); res.status(500).json({ error: "Failed to Get Attestation", details: error.message, @@ -105,7 +105,7 @@ export function createVerifiableLogApiRouter( data: pageQuery, }); } catch (error) { - elizaLogger.error("Detailed error:", error); + logger.error("Detailed error:", error); res.status(500).json({ error: "Failed to Get Verifiable Logs", details: error.message, diff --git a/packages/client-telegram/src/index.ts b/packages/client-telegram/src/index.ts index cd68f15dc6b..54aadc231ac 100644 --- a/packages/client-telegram/src/index.ts +++ b/packages/client-telegram/src/index.ts @@ -1,4 +1,4 @@ -import { elizaLogger } from "@elizaos/runtime"; +import { logger } from "@elizaos/runtime"; import type { Client, IAgentRuntime } from "@elizaos/runtime"; import { TelegramClient } from "./telegramClient.ts"; import { validateTelegramConfig } from "./environment.ts"; @@ -14,13 +14,13 @@ export const TelegramClientInterface: Client = { await tg.start(); - elizaLogger.success( + logger.success( `✅ Telegram client successfully started for character ${runtime.character.name}` ); return tg; }, stop: async (_runtime: IAgentRuntime) => { - elizaLogger.warn("Telegram client does not support stopping yet"); + logger.warn("Telegram client does not support stopping yet"); }, }; diff --git a/packages/client-telegram/src/messageManager.ts b/packages/client-telegram/src/messageManager.ts index 15daddef2a8..84dd4ee5a37 100644 --- a/packages/client-telegram/src/messageManager.ts +++ b/packages/client-telegram/src/messageManager.ts @@ -2,7 +2,7 @@ import type { Message } from "@telegraf/types"; import type { Context, Telegraf } from "telegraf"; import { composeContext, - elizaLogger, + logger, ServiceType, composeRandomUser, } from "@elizaos/runtime"; @@ -86,7 +86,7 @@ export class MessageManager { this.runtime = runtime; this._initializeTeamMemberUsernames().catch((error) => - elizaLogger.error( + logger.error( "Error initializing team member usernames:", error ) @@ -130,12 +130,12 @@ export class MessageManager { const chat = await this.bot.telegram.getChat(id); if ("username" in chat && chat.username) { this.teamMemberUsernames.set(id, chat.username); - elizaLogger.info( + logger.info( `Cached username for team member ${id}: ${chat.username}` ); } } catch (error) { - elizaLogger.error( + logger.error( `Error getting username for team member ${id}:`, error ); @@ -146,16 +146,16 @@ export class MessageManager { private _startAutoPostMonitoring(): void { // Wait for bot to be ready if (this.bot.botInfo) { - elizaLogger.info( + logger.info( "[AutoPost Telegram] Bot ready, starting monitoring" ); this._initializeAutoPost(); } else { - elizaLogger.info( + logger.info( "[AutoPost Telegram] Bot not ready, waiting for ready event" ); this.bot.telegram.getMe().then(() => { - elizaLogger.info( + logger.info( "[AutoPost Telegram] Bot ready, starting monitoring" ); this._initializeAutoPost(); @@ -279,15 +279,15 @@ export class MessageManager { state = await this.runtime.updateRecentMessageState(state); await this.runtime.evaluate(memory, state, true); } catch (error) { - elizaLogger.warn("[AutoPost Telegram] Error:", error); + logger.warn("[AutoPost Telegram] Error:", error); } } else { - elizaLogger.warn( + logger.warn( "[AutoPost Telegram] Activity within threshold. Not posting." ); } } catch (error) { - elizaLogger.warn( + logger.warn( "[AutoPost Telegram] Error checking channel activity:", error ); @@ -296,7 +296,7 @@ export class MessageManager { private async _monitorPinnedMessages(ctx: Context): Promise { if (!this.autoPostConfig.pinnedMessagesGroups.length) { - elizaLogger.warn( + logger.warn( "[AutoPost Telegram] Auto post config no pinned message groups" ); return; @@ -320,7 +320,7 @@ export class MessageManager { if (!mainChannel) return; try { - elizaLogger.info( + logger.info( `[AutoPost Telegram] Processing pinned message in group ${ctx.chat.id}` ); @@ -405,7 +405,7 @@ export class MessageManager { state = await this.runtime.updateRecentMessageState(state); await this.runtime.evaluate(memory, state, true); } catch (error) { - elizaLogger.warn( + logger.warn( `[AutoPost Telegram] Error processing pinned message:`, error ); @@ -643,7 +643,7 @@ export class MessageManager { try { let imageUrl: string | null = null; - elizaLogger.info(`Telegram Message: ${message}`); + logger.info(`Telegram Message: ${message}`); if ("photo" in message && message.photo?.length > 0) { const photo = message.photo[message.photo.length - 1]; @@ -694,7 +694,7 @@ export class MessageManager { "text" in message && message.text?.includes(`@${this.bot.botInfo?.username}`) ) { - elizaLogger.info(`Bot mentioned`); + logger.info(`Bot mentioned`); return true; } @@ -997,16 +997,16 @@ export class MessageManager { } } - elizaLogger.info( + logger.info( `${ type.charAt(0).toUpperCase() + type.slice(1) } sent successfully: ${mediaPath}` ); } catch (error) { - elizaLogger.error( + logger.error( `Failed to send ${type}. Path: ${mediaPath}. Error: ${error.message}` ); - elizaLogger.debug(error.stack); + logger.debug(error.stack); throw error; } } @@ -1406,8 +1406,8 @@ export class MessageManager { await this.runtime.evaluate(memory, state, shouldRespond, callback); } catch (error) { - elizaLogger.error("❌ Error handling message:", error); - elizaLogger.error("Error sending message:", error); + logger.error("❌ Error handling message:", error); + logger.error("Error sending message:", error); } } } diff --git a/packages/client-telegram/src/telegramClient.ts b/packages/client-telegram/src/telegramClient.ts index a258b37c57c..ae0899cdfa5 100644 --- a/packages/client-telegram/src/telegramClient.ts +++ b/packages/client-telegram/src/telegramClient.ts @@ -1,6 +1,6 @@ import { type Context, Telegraf } from "telegraf"; import { message } from "telegraf/filters"; -import { type IAgentRuntime, elizaLogger } from "@elizaos/runtime"; +import { type IAgentRuntime, logger } from "@elizaos/runtime"; import { MessageManager } from "./messageManager.ts"; import { getOrCreateRecommenderInBe } from "./getOrCreateRecommenderInBe.ts"; @@ -14,7 +14,7 @@ export class TelegramClient { private options; constructor(runtime: IAgentRuntime, botToken: string) { - elizaLogger.log("📱 Constructing new TelegramClient..."); + logger.log("📱 Constructing new TelegramClient..."); this.options = { telegram: { apiRoot: runtime.getSetting("TELEGRAM_API_ROOT") || process.env.TELEGRAM_API_ROOT || "https://api.telegram.org" @@ -26,30 +26,30 @@ export class TelegramClient { this.backend = runtime.getSetting("BACKEND_URL"); this.backendToken = runtime.getSetting("BACKEND_TOKEN"); this.tgTrader = runtime.getSetting("TG_TRADER"); // boolean To Be added to the settings - elizaLogger.log("✅ TelegramClient constructor completed"); + logger.log("✅ TelegramClient constructor completed"); } public async start(): Promise { - elizaLogger.log("🚀 Starting Telegram bot..."); + logger.log("🚀 Starting Telegram bot..."); try { await this.initializeBot(); this.setupMessageHandlers(); this.setupShutdownHandlers(); } catch (error) { - elizaLogger.error("❌ Failed to launch Telegram bot:", error); + logger.error("❌ Failed to launch Telegram bot:", error); throw error; } } private async initializeBot(): Promise { this.bot.launch({ dropPendingUpdates: true }); - elizaLogger.log( + logger.log( "✨ Telegram bot successfully launched and is running!" ); const botInfo = await this.bot.telegram.getMe(); this.bot.botInfo = botInfo; - elizaLogger.success(`Bot username: @${botInfo.username}`); + logger.success(`Bot username: @${botInfo.username}`); this.messageManager.bot = this.bot; } @@ -68,12 +68,12 @@ export class TelegramClient { const currentGroupId = ctx.chat.id.toString(); if (!allowedGroups.includes(currentGroupId)) { - elizaLogger.info(`Unauthorized group detected: ${currentGroupId}`); + logger.info(`Unauthorized group detected: ${currentGroupId}`); try { await ctx.reply("Not authorized. Leaving."); await ctx.leaveChat(); } catch (error) { - elizaLogger.error( + logger.error( `Error leaving unauthorized group ${currentGroupId}:`, error ); @@ -85,7 +85,7 @@ export class TelegramClient { } private setupMessageHandlers(): void { - elizaLogger.log("Setting up message handler..."); + logger.log("Setting up message handler..."); this.bot.on(message("new_chat_members"), async (ctx) => { try { @@ -98,7 +98,7 @@ export class TelegramClient { return; } } catch (error) { - elizaLogger.error("Error handling new chat members:", error); + logger.error("Error handling new chat members:", error); } }); @@ -114,7 +114,7 @@ export class TelegramClient { const username = ctx.from?.username || ctx.from?.first_name || "Unknown"; if (!userId) { - elizaLogger.warn( + logger.warn( "Received message from a user without an ID." ); return; @@ -127,7 +127,7 @@ export class TelegramClient { this.backend ); } catch (error) { - elizaLogger.error( + logger.error( "Error getting or creating recommender in backend", error ); @@ -136,7 +136,7 @@ export class TelegramClient { await this.messageManager.handleMessage(ctx); } catch (error) { - elizaLogger.error("❌ Error handling message:", error); + logger.error("❌ Error handling message:", error); // Don't try to reply if we've left the group or been kicked if (error?.response?.error_code !== 403) { try { @@ -144,7 +144,7 @@ export class TelegramClient { "An error occurred while processing your message." ); } catch (replyError) { - elizaLogger.error( + logger.error( "Failed to send error message:", replyError ); @@ -154,35 +154,35 @@ export class TelegramClient { }); this.bot.on("photo", (ctx) => { - elizaLogger.log( + logger.log( "📸 Received photo message with caption:", ctx.message.caption ); }); this.bot.on("document", (ctx) => { - elizaLogger.log( + logger.log( "📎 Received document message:", ctx.message.document.file_name ); }); this.bot.catch((err, ctx) => { - elizaLogger.error(`❌ Telegram Error for ${ctx.updateType}:`, err); + logger.error(`❌ Telegram Error for ${ctx.updateType}:`, err); ctx.reply("An unexpected error occurred. Please try again later."); }); } private setupShutdownHandlers(): void { const shutdownHandler = async (signal: string) => { - elizaLogger.log( + logger.log( `⚠️ Received ${signal}. Shutting down Telegram bot gracefully...` ); try { await this.stop(); - elizaLogger.log("🛑 Telegram bot stopped gracefully"); + logger.log("🛑 Telegram bot stopped gracefully"); } catch (error) { - elizaLogger.error( + logger.error( "❌ Error during Telegram bot shutdown:", error ); @@ -196,9 +196,9 @@ export class TelegramClient { } public async stop(): Promise { - elizaLogger.log("Stopping Telegram bot..."); + logger.log("Stopping Telegram bot..."); //await this.bot.stop(); - elizaLogger.log("Telegram bot stopped"); + logger.log("Telegram bot stopped"); } } diff --git a/packages/client-twitter/src/base.ts b/packages/client-twitter/src/base.ts index 3543877ed74..ddd86f28f93 100644 --- a/packages/client-twitter/src/base.ts +++ b/packages/client-twitter/src/base.ts @@ -6,7 +6,7 @@ import { type State, type UUID, getEmbeddingZeroVector, - elizaLogger, + logger, stringToUuid, ActionTimelineType, } from "@elizaos/core"; @@ -264,16 +264,16 @@ export class ClientBase extends EventEmitter { const cachedCookies = await this.getCachedCookies(username); if (cachedCookies) { - elizaLogger.info("Using cached cookies"); + logger.info("Using cached cookies"); await this.setCookiesFromArray(cachedCookies); } - elizaLogger.log("Waiting for Twitter login"); + logger.log("Waiting for Twitter login"); while (retries > 0) { try { if (await this.twitterClient.isLoggedIn()) { // cookies are valid, no login required - elizaLogger.info("Successfully logged in."); + logger.info("Successfully logged in."); break; } else { await this.twitterClient.login( @@ -284,8 +284,8 @@ export class ClientBase extends EventEmitter { ); if (await this.twitterClient.isLoggedIn()) { // fresh login, store new cookies - elizaLogger.info("Successfully logged in."); - elizaLogger.info("Caching cookies"); + logger.info("Successfully logged in."); + logger.info("Caching cookies"); await this.cacheCookies( username, await this.twitterClient.getCookies() @@ -294,16 +294,16 @@ export class ClientBase extends EventEmitter { } } } catch (error) { - elizaLogger.error(`Login attempt failed: ${error.message}`); + logger.error(`Login attempt failed: ${error.message}`); } retries--; - elizaLogger.error( + logger.error( `Failed to login to Twitter. Retrying... (${retries} attempts left)` ); if (retries === 0) { - elizaLogger.error( + logger.error( "Max retries reached. Exiting login process." ); throw new Error("Twitter login failed after maximum retries."); @@ -315,8 +315,8 @@ export class ClientBase extends EventEmitter { this.profile = await this.fetchProfile(username); if (this.profile) { - elizaLogger.log("Twitter user ID:", this.profile.id); - elizaLogger.log( + logger.log("Twitter user ID:", this.profile.id); + logger.log( "Twitter loaded:", JSON.stringify(this.profile, null, 10) ); @@ -337,7 +337,7 @@ export class ClientBase extends EventEmitter { } async fetchOwnPosts(count: number): Promise { - elizaLogger.debug("fetching own posts"); + logger.debug("fetching own posts"); const homeTimeline = await this.twitterClient.getUserTweets( this.profile.id, count @@ -353,22 +353,22 @@ export class ClientBase extends EventEmitter { count: number, following?: boolean ): Promise { - elizaLogger.debug("fetching home timeline"); + logger.debug("fetching home timeline"); const homeTimeline = following ? await this.twitterClient.fetchFollowingTimeline(count, []) : await this.twitterClient.fetchHomeTimeline(count, []); - elizaLogger.debug(homeTimeline, { depth: Number.POSITIVE_INFINITY }); + logger.debug(homeTimeline, { depth: Number.POSITIVE_INFINITY }); const processedTimeline = homeTimeline .filter((t) => t.__typename !== "TweetWithVisibilityResults") // what's this about? .map((tweet) => this.parseTweet(tweet)); - //elizaLogger.debug("process homeTimeline", processedTimeline); + //logger.debug("process homeTimeline", processedTimeline); return processedTimeline; } async fetchTimelineForActions(count: number): Promise { - elizaLogger.debug("fetching timeline for actions"); + logger.debug("fetching timeline for actions"); const agentUsername = this.twitterConfig.TWITTER_USERNAME; @@ -416,17 +416,17 @@ export class ClientBase extends EventEmitter { ); return (result ?? { tweets: [] }) as QueryTweetsResponse; } catch (error) { - elizaLogger.error("Error fetching search tweets:", error); + logger.error("Error fetching search tweets:", error); return { tweets: [] }; } } catch (error) { - elizaLogger.error("Error fetching search tweets:", error); + logger.error("Error fetching search tweets:", error); return { tweets: [] }; } } private async populateTimeline() { - elizaLogger.debug("populating timeline..."); + logger.debug("populating timeline..."); const cachedTimeline = await this.getCachedTimeline(); @@ -475,7 +475,7 @@ export class ClientBase extends EventEmitter { // Save the missing tweets as memories for (const tweet of tweetsToSave) { - elizaLogger.log("Saving Tweet", tweet.id); + logger.log("Saving Tweet", tweet.id); const roomId = stringToUuid( tweet.conversationId + "-" + this.runtime.agentId @@ -517,7 +517,7 @@ export class ClientBase extends EventEmitter { : undefined, } as Content; - elizaLogger.log("Creating memory for tweet", tweet.id); + logger.log("Creating memory for tweet", tweet.id); // check if it already exists const memory = @@ -526,7 +526,7 @@ export class ClientBase extends EventEmitter { ); if (memory) { - elizaLogger.log( + logger.log( "Memory already exists, skipping timeline population" ); break; @@ -545,7 +545,7 @@ export class ClientBase extends EventEmitter { await this.cacheTweet(tweet); } - elizaLogger.log( + logger.log( `Populated ${tweetsToSave.length} missing tweets from the cache.` ); return; @@ -596,7 +596,7 @@ export class ClientBase extends EventEmitter { ) ); - elizaLogger.debug({ + logger.debug({ processingTweets: tweetsToSave.map((tweet) => tweet.id).join(","), }); @@ -609,7 +609,7 @@ export class ClientBase extends EventEmitter { // Save the new tweets as memories for (const tweet of tweetsToSave) { - elizaLogger.log("Saving Tweet", tweet.id); + logger.log("Saving Tweet", tweet.id); const roomId = stringToUuid( tweet.conversationId + "-" + this.runtime.agentId @@ -690,7 +690,7 @@ export class ClientBase extends EventEmitter { recentMessage.length > 0 && recentMessage[0].content === message.content ) { - elizaLogger.debug("Message already saved", recentMessage[0].id); + logger.debug("Message already saved", recentMessage[0].id); } else { await this.runtime.messageManager.createMemory({ ...message, diff --git a/packages/client-twitter/src/index.ts b/packages/client-twitter/src/index.ts index 52957c1878b..d525492f44a 100644 --- a/packages/client-twitter/src/index.ts +++ b/packages/client-twitter/src/index.ts @@ -1,4 +1,4 @@ -import { type Client, elizaLogger, type IAgentRuntime } from "@elizaos/core"; +import { type Client, logger, type IAgentRuntime } from "@elizaos/core"; import { ClientBase } from "./base.ts"; import { validateTwitterConfig, type TwitterConfig } from "./environment.ts"; import { TwitterInteractionClient } from "./interactions.ts"; @@ -30,11 +30,11 @@ class TwitterManager { // Optional search logic (enabled if TWITTER_SEARCH_ENABLE is true) if (twitterConfig.TWITTER_SEARCH_ENABLE) { - elizaLogger.warn("Twitter/X client running in a mode that:"); - elizaLogger.warn("1. violates consent of random users"); - elizaLogger.warn("2. burns your rate limit"); - elizaLogger.warn("3. can get your account banned"); - elizaLogger.warn("use at your own risk"); + logger.warn("Twitter/X client running in a mode that:"); + logger.warn("1. violates consent of random users"); + logger.warn("2. burns your rate limit"); + logger.warn("3. can get your account banned"); + logger.warn("use at your own risk"); this.search = new TwitterSearchClient(this.client, runtime); } @@ -53,7 +53,7 @@ export const TwitterClientInterface: Client = { const twitterConfig: TwitterConfig = await validateTwitterConfig(runtime); - elizaLogger.log("Twitter client started"); + logger.log("Twitter client started"); const manager = new TwitterManager(runtime, twitterConfig); @@ -80,7 +80,7 @@ export const TwitterClientInterface: Client = { }, async stop(_runtime: IAgentRuntime) { - elizaLogger.warn("Twitter client does not support stopping yet"); + logger.warn("Twitter client does not support stopping yet"); }, }; diff --git a/packages/client-twitter/src/interactions.ts b/packages/client-twitter/src/interactions.ts index ca85b814209..85cf3c23419 100644 --- a/packages/client-twitter/src/interactions.ts +++ b/packages/client-twitter/src/interactions.ts @@ -12,7 +12,7 @@ import { ModelClass, type State, stringToUuid, - elizaLogger, + logger, getEmbeddingZeroVector, type IImageDescriptionService, ServiceType @@ -116,7 +116,7 @@ export class TwitterInteractionClient { } async handleTwitterInteractions() { - elizaLogger.log("Checking Twitter interactions"); + logger.log("Checking Twitter interactions"); const twitterUsername = this.client.profile.username; try { @@ -129,7 +129,7 @@ export class TwitterInteractionClient { ) ).tweets; - elizaLogger.log( + logger.log( "Completed checking mentioned tweets:", mentionCandidates.length ); @@ -139,7 +139,7 @@ export class TwitterInteractionClient { const TARGET_USERS = this.client.twitterConfig.TWITTER_TARGET_USERS; - elizaLogger.log("Processing target users:", TARGET_USERS); + logger.log("Processing target users:", TARGET_USERS); if (TARGET_USERS.length > 0) { // Create a map to store tweets by user @@ -166,7 +166,7 @@ export class TwitterInteractionClient { Date.now() - tweet.timestamp * 1000 < 2 * 60 * 60 * 1000; - elizaLogger.log(`Tweet ${tweet.id} checks:`, { + logger.log(`Tweet ${tweet.id} checks:`, { isUnprocessed, isRecent, isReply: tweet.isReply, @@ -183,12 +183,12 @@ export class TwitterInteractionClient { if (validTweets.length > 0) { tweetsByUser.set(username, validTweets); - elizaLogger.log( + logger.log( `Found ${validTweets.length} valid tweets from ${username}` ); } } catch (error) { - elizaLogger.error( + logger.error( `Error fetching tweets for ${username}:`, error ); @@ -206,7 +206,7 @@ export class TwitterInteractionClient { Math.floor(Math.random() * tweets.length) ]; selectedTweets.push(randomTweet); - elizaLogger.log( + logger.log( `Selected tweet from ${username}: ${randomTweet.text?.substring(0, 100)}` ); } @@ -219,7 +219,7 @@ export class TwitterInteractionClient { ]; } } else { - elizaLogger.log( + logger.log( "No target users configured, processing only mentions" ); } @@ -247,12 +247,12 @@ export class TwitterInteractionClient { ); if (existingResponse) { - elizaLogger.log( + logger.log( `Already responded to tweet ${tweet.id}, skipping` ); continue; } - elizaLogger.log("New Tweet found", tweet.permanentUrl); + logger.log("New Tweet found", tweet.permanentUrl); const roomId = stringToUuid( tweet.conversationId + "-" + this.runtime.agentId @@ -300,9 +300,9 @@ export class TwitterInteractionClient { // Save the latest checked tweet ID to the file await this.client.cacheLatestCheckedTweetId(); - elizaLogger.log("Finished checking Twitter interactions"); + logger.log("Finished checking Twitter interactions"); } catch (error) { - elizaLogger.error("Error handling Twitter interactions:", error); + logger.error("Error handling Twitter interactions:", error); } } @@ -322,11 +322,11 @@ export class TwitterInteractionClient { } if (!message.content.text) { - elizaLogger.log("Skipping Tweet with no text", tweet.id); + logger.log("Skipping Tweet with no text", tweet.id); return { text: "", action: "IGNORE" }; } - elizaLogger.log("Processing Tweet: ", tweet.id); + logger.log("Processing Tweet: ", tweet.id); const formatTweet = (tweet: Tweet) => { return ` ID: ${tweet.id} From: ${tweet.name} (@${tweet.username}) @@ -360,7 +360,7 @@ export class TwitterInteractionClient { } } catch (error) { // Handle the error - elizaLogger.error("Error Occured during describing image: ", error); + logger.error("Error Occured during describing image: ", error); } @@ -382,7 +382,7 @@ export class TwitterInteractionClient { await this.runtime.messageManager.getMemoryById(tweetId); if (!tweetExists) { - elizaLogger.log("tweet does not exist, saving"); + logger.log("tweet does not exist, saving"); const userIdUUID = stringToUuid(tweet.userId as string); const roomId = stringToUuid(tweet.conversationId); @@ -429,7 +429,7 @@ export class TwitterInteractionClient { // Promise<"RESPOND" | "IGNORE" | "STOP" | null> { if (shouldRespond !== "RESPOND") { - elizaLogger.log("Not responding to message"); + logger.log("Not responding to message"); return { text: "Response Decision:", action: shouldRespond }; } @@ -477,7 +477,7 @@ export class TwitterInteractionClient { if (response.text) { if (this.isDryRun) { - elizaLogger.info( + logger.info( `Dry run: Selected Post: ${tweet.id} - ${tweet.username}: ${tweet.text}\nAgent's Output:\n${response.text}` ); } else { @@ -530,7 +530,7 @@ export class TwitterInteractionClient { ); await wait(); } catch (error) { - elizaLogger.error(`Error sending response tweet: ${error}`); + logger.error(`Error sending response tweet: ${error}`); } } } @@ -544,19 +544,19 @@ export class TwitterInteractionClient { const visited: Set = new Set(); async function processThread(currentTweet: Tweet, depth = 0) { - elizaLogger.log("Processing tweet:", { + logger.log("Processing tweet:", { id: currentTweet.id, inReplyToStatusId: currentTweet.inReplyToStatusId, depth: depth, }); if (!currentTweet) { - elizaLogger.log("No current tweet found for thread building"); + logger.log("No current tweet found for thread building"); return; } if (depth >= maxReplies) { - elizaLogger.log("Reached maximum reply depth", depth); + logger.log("Reached maximum reply depth", depth); return; } @@ -607,7 +607,7 @@ export class TwitterInteractionClient { } if (visited.has(currentTweet.id)) { - elizaLogger.log("Already visited tweet:", currentTweet.id); + logger.log("Already visited tweet:", currentTweet.id); return; } @@ -615,7 +615,7 @@ export class TwitterInteractionClient { thread.unshift(currentTweet); if (currentTweet.inReplyToStatusId) { - elizaLogger.log( + logger.log( "Fetching parent tweet:", currentTweet.inReplyToStatusId ); @@ -625,25 +625,25 @@ export class TwitterInteractionClient { ); if (parentTweet) { - elizaLogger.log("Found parent tweet:", { + logger.log("Found parent tweet:", { id: parentTweet.id, text: parentTweet.text?.slice(0, 50), }); await processThread(parentTweet, depth + 1); } else { - elizaLogger.log( + logger.log( "No parent tweet found for:", currentTweet.inReplyToStatusId ); } } catch (error) { - elizaLogger.log("Error fetching parent tweet:", { + logger.log("Error fetching parent tweet:", { tweetId: currentTweet.inReplyToStatusId, error, }); } } else { - elizaLogger.log( + logger.log( "Reached end of reply chain at:", currentTweet.id ); diff --git a/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts b/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts index 8f69432dbb9..90601f78a19 100644 --- a/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts +++ b/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts @@ -3,7 +3,7 @@ import { spawn } from "child_process"; import { type ITranscriptionService, - elizaLogger, + logger, stringToUuid, composeContext, getEmbeddingZeroVector, @@ -96,11 +96,11 @@ export class SttTtsPlugin implements Plugin { private ttsAbortController: AbortController | null = null; onAttach(_space: Space) { - elizaLogger.log("[SttTtsPlugin] onAttach => space was attached"); + logger.log("[SttTtsPlugin] onAttach => space was attached"); } init(params: { space: Space; pluginConfig?: Record }): void { - elizaLogger.log( + logger.log( "[SttTtsPlugin] init => Space fully ready. Subscribing to events.", ); @@ -160,13 +160,13 @@ export class SttTtsPlugin implements Plugin { if (!this.isSpeaking) { this.userSpeakingTimer = setTimeout(() => { - elizaLogger.log( + logger.log( "[SttTtsPlugin] start processing audio for user =>", data.userId, ); this.userSpeakingTimer = null; this.processAudio(data.userId).catch((err) => - elizaLogger.error( + logger.error( "[SttTtsPlugin] handleSilence error =>", err, ), @@ -199,7 +199,7 @@ export class SttTtsPlugin implements Plugin { if (this.ttsAbortController) { this.ttsAbortController.abort(); this.isSpeaking = false; - elizaLogger.log("[SttTtsPlugin] TTS playback interrupted"); + logger.log("[SttTtsPlugin] TTS playback interrupted"); } } } @@ -265,7 +265,7 @@ export class SttTtsPlugin implements Plugin { } this.isProcessingAudio = true; try { - elizaLogger.log( + logger.log( "[SttTtsPlugin] Starting audio processing for user:", userId, ); @@ -273,13 +273,13 @@ export class SttTtsPlugin implements Plugin { this.pcmBuffers.clear(); if (!chunks.length) { - elizaLogger.warn( + logger.warn( "[SttTtsPlugin] No audio chunks for user =>", userId, ); return; } - elizaLogger.log( + logger.log( `[SttTtsPlugin] Flushing STT buffer for user=${userId}, chunks=${chunks.length}`, ); @@ -298,31 +298,31 @@ export class SttTtsPlugin implements Plugin { const sttText = await this.transcriptionService.transcribe(wavBuffer); - elizaLogger.log( + logger.log( `[SttTtsPlugin] Transcription result: "${sttText}"`, ); if (!sttText || !sttText.trim()) { - elizaLogger.warn( + logger.warn( "[SttTtsPlugin] No speech recognized for user =>", userId, ); return; } - elizaLogger.log( + logger.log( `[SttTtsPlugin] STT => user=${userId}, text="${sttText}"`, ); // Get response const replyText = await this.handleUserMessage(sttText, userId); if (!replyText || !replyText.length || !replyText.trim()) { - elizaLogger.warn( + logger.warn( "[SttTtsPlugin] No replyText for user =>", userId, ); return; } - elizaLogger.log( + logger.log( `[SttTtsPlugin] user=${userId}, reply="${replyText}"`, ); this.isProcessingAudio = false; @@ -330,7 +330,7 @@ export class SttTtsPlugin implements Plugin { // Use the standard speak method with queue await this.speakText(replyText); } catch (error) { - elizaLogger.error("[SttTtsPlugin] processAudio error =>", error); + logger.error("[SttTtsPlugin] processAudio error =>", error); } finally { this.isProcessingAudio = false; } @@ -344,7 +344,7 @@ export class SttTtsPlugin implements Plugin { if (!this.isSpeaking) { this.isSpeaking = true; this.processTtsQueue().catch((err) => { - elizaLogger.error( + logger.error( "[SttTtsPlugin] processTtsQueue error =>", err, ); @@ -367,20 +367,20 @@ export class SttTtsPlugin implements Plugin { const ttsAudio = await this.elevenLabsTts(text); const pcm = await this.convertMp3ToPcm(ttsAudio, 48000); if (signal.aborted) { - elizaLogger.log( + logger.log( "[SttTtsPlugin] TTS interrupted before streaming", ); return; } await this.streamToJanus(pcm, 48000); if (signal.aborted) { - elizaLogger.log( + logger.log( "[SttTtsPlugin] TTS interrupted after streaming", ); return; } } catch (err) { - elizaLogger.error("[SttTtsPlugin] TTS streaming error =>", err); + logger.error("[SttTtsPlugin] TTS streaming error =>", err); } finally { // Clean up the AbortController this.ttsAbortController = null; @@ -503,7 +503,7 @@ export class SttTtsPlugin implements Plugin { response.source = "discord"; if (!response) { - elizaLogger.error( + logger.error( "[SttTtsPlugin] No response from generateMessageResponse", ); return; @@ -520,7 +520,7 @@ export class SttTtsPlugin implements Plugin { } private async _shouldIgnore(message: Memory): Promise { - elizaLogger.debug("message.content: ", message.content); + logger.debug("message.content: ", message.content); // if the message is 3 characters or less, ignore it if ((message.content as Content).text.length < 3) { return true; @@ -606,7 +606,7 @@ export class SttTtsPlugin implements Plugin { return false; } - elizaLogger.error( + logger.error( "Invalid response from response generateText:", response, ); @@ -705,7 +705,7 @@ export class SttTtsPlugin implements Plugin { offset += FRAME_SIZE ) { if (this.ttsAbortController?.signal.aborted) { - elizaLogger.log("[SttTtsPlugin] streamToJanus interrupted"); + logger.log("[SttTtsPlugin] streamToJanus interrupted"); return; } const frame = new Int16Array(FRAME_SIZE); @@ -723,7 +723,7 @@ export class SttTtsPlugin implements Plugin { */ public addMessage(role: "system" | "user" | "assistant", content: string) { this.chatContext.push({ role, content }); - elizaLogger.log( + logger.log( `[SttTtsPlugin] addMessage => role=${role}, content=${content}`, ); } @@ -733,11 +733,11 @@ export class SttTtsPlugin implements Plugin { */ public clearChatContext() { this.chatContext = []; - elizaLogger.log("[SttTtsPlugin] clearChatContext => done"); + logger.log("[SttTtsPlugin] clearChatContext => done"); } cleanup(): void { - elizaLogger.log("[SttTtsPlugin] cleanup => releasing resources"); + logger.log("[SttTtsPlugin] cleanup => releasing resources"); this.pcmBuffers.clear(); this.userSpeakingTimer = null; this.ttsQueue = []; diff --git a/packages/client-twitter/src/post.ts b/packages/client-twitter/src/post.ts index 8b787b36ec6..ced5aa77444 100644 --- a/packages/client-twitter/src/post.ts +++ b/packages/client-twitter/src/post.ts @@ -13,7 +13,7 @@ import { extractAttributes, cleanJsonResponse, } from "@elizaos/core"; -import { elizaLogger } from "@elizaos/core"; +import { logger } from "@elizaos/core"; import type { ClientBase } from "./base.ts"; import { postActionResponseFooter } from "@elizaos/core"; import { generateTweetActions } from "@elizaos/core"; @@ -113,32 +113,32 @@ export class TwitterPostClient { this.isDryRun = this.client.twitterConfig.TWITTER_DRY_RUN; // Log configuration on initialization - elizaLogger.log("Twitter Client Configuration:"); - elizaLogger.log(`- Username: ${this.twitterUsername}`); - elizaLogger.log( + logger.log("Twitter Client Configuration:"); + logger.log(`- Username: ${this.twitterUsername}`); + logger.log( `- Dry Run Mode: ${this.isDryRun ? "enabled" : "disabled"}` ); - elizaLogger.log( + logger.log( `- Post Interval: ${this.client.twitterConfig.POST_INTERVAL_MIN}-${this.client.twitterConfig.POST_INTERVAL_MAX} minutes` ); - elizaLogger.log( + logger.log( `- Action Processing: ${ this.client.twitterConfig.ENABLE_ACTION_PROCESSING ? "enabled" : "disabled" }` ); - elizaLogger.log( + logger.log( `- Action Interval: ${this.client.twitterConfig.ACTION_INTERVAL} minutes` ); - elizaLogger.log( + logger.log( `- Post Immediately: ${ this.client.twitterConfig.POST_IMMEDIATELY ? "enabled" : "disabled" }` ); - elizaLogger.log( + logger.log( `- Search Enabled: ${ this.client.twitterConfig.TWITTER_SEARCH_ENABLE ? "enabled" @@ -148,11 +148,11 @@ export class TwitterPostClient { const targetUsers = this.client.twitterConfig.TWITTER_TARGET_USERS; if (targetUsers) { - elizaLogger.log(`- Target Users: ${targetUsers}`); + logger.log(`- Target Users: ${targetUsers}`); } if (this.isDryRun) { - elizaLogger.log( + logger.log( "Twitter client initialized in dry run mode - no actual tweets should be posted" ); } @@ -204,7 +204,7 @@ export class TwitterPostClient { this.discordClientForApproval.once( Events.ClientReady, (readyClient) => { - elizaLogger.log( + logger.log( `Discord bot is ready as ${readyClient.user.tag}!` ); @@ -215,7 +215,7 @@ export class TwitterPostClient { // - Read Messages/View Channels // - Read Message History - elizaLogger.log( + logger.log( `Use this link to properly invite the Twitter Post Approval Discord bot: ${invite}` ); } @@ -252,7 +252,7 @@ export class TwitterPostClient { generateNewTweetLoop(); // Set up next iteration }, delay); - elizaLogger.log(`Next tweet scheduled in ${randomMinutes} minutes`); + logger.log(`Next tweet scheduled in ${randomMinutes} minutes`); }; const processActionsLoop = async () => { @@ -262,8 +262,8 @@ export class TwitterPostClient { try { const results = await this.processTweetActions(); if (results) { - elizaLogger.log(`Processed ${results.length} tweets`); - elizaLogger.log( + logger.log(`Processed ${results.length} tweets`); + logger.log( `Next action processing scheduled in ${actionInterval} minutes` ); // Wait for the full interval before next processing @@ -273,7 +273,7 @@ export class TwitterPostClient { ); } } catch (error) { - elizaLogger.error( + logger.error( "Error in action processing loop:", error ); @@ -288,11 +288,11 @@ export class TwitterPostClient { } generateNewTweetLoop(); - elizaLogger.log("Tweet generation loop started"); + logger.log("Tweet generation loop started"); if (this.client.twitterConfig.ENABLE_ACTION_PROCESSING) { processActionsLoop().catch((error) => { - elizaLogger.error( + logger.error( "Fatal error in process actions loop:", error ); @@ -354,7 +354,7 @@ export class TwitterPostClient { await client.cacheTweet(tweet); // Log the posted tweet - elizaLogger.log(`Tweet posted:\n ${tweet.permanentUrl}`); + logger.log(`Tweet posted:\n ${tweet.permanentUrl}`); // Ensure the room and participant exist await runtime.ensureRoomExists(roomId); @@ -429,12 +429,12 @@ export class TwitterPostClient { ); const body = await standardTweetResult.json(); if (!body?.data?.create_tweet?.tweet_results?.result) { - elizaLogger.error("Error sending tweet; Bad response:", body); + logger.error("Error sending tweet; Bad response:", body); return; } return body.data.create_tweet.tweet_results.result; } catch (error) { - elizaLogger.error("Error sending standard Tweet:", error); + logger.error("Error sending standard Tweet:", error); throw error; } } @@ -449,7 +449,7 @@ export class TwitterPostClient { mediaData?: MediaData[] ) { try { - elizaLogger.log(`Posting new tweet:\n`); + logger.log(`Posting new tweet:\n`); let result; @@ -483,7 +483,7 @@ export class TwitterPostClient { rawTweetContent ); } catch (error) { - elizaLogger.error("Error sending tweet:", error); + logger.error("Error sending tweet:", error); } } @@ -491,7 +491,7 @@ export class TwitterPostClient { * Generates and posts a new tweet. If isDryRun is true, only logs what would have been posted. */ async generateNewTweet() { - elizaLogger.log("Generating new tweet"); + logger.log("Generating new tweet"); try { const roomId = stringToUuid( @@ -528,7 +528,7 @@ export class TwitterPostClient { twitterPostTemplate, }); - elizaLogger.debug("generate post prompt:\n" + context); + logger.debug("generate post prompt:\n" + context); const response = await generateText({ runtime: this.runtime, @@ -593,7 +593,7 @@ export class TwitterPostClient { ); if (this.isDryRun) { - elizaLogger.info( + logger.info( `Dry run: would have posted tweet: ${tweetTextForPosting}` ); return; @@ -602,7 +602,7 @@ export class TwitterPostClient { try { if (this.approvalRequired) { // Send for approval instead of posting directly - elizaLogger.log( + logger.log( `Sending Tweet For Approval:\n ${tweetTextForPosting}` ); await this.sendForApproval( @@ -610,9 +610,9 @@ export class TwitterPostClient { roomId, rawTweetContent ); - elizaLogger.log("Tweet sent for approval"); + logger.log("Tweet sent for approval"); } else { - elizaLogger.log( + logger.log( `Posting new tweet:\n ${tweetTextForPosting}` ); this.postTweet( @@ -626,10 +626,10 @@ export class TwitterPostClient { ); } } catch (error) { - elizaLogger.error("Error sending tweet:", error); + logger.error("Error sending tweet:", error); } } catch (error) { - elizaLogger.error("Error generating new tweet:", error); + logger.error("Error generating new tweet:", error); } } @@ -654,7 +654,7 @@ export class TwitterPostClient { modelClass: ModelClass.SMALL, }); - elizaLogger.log("generate tweet content response:\n" + response); + logger.log("generate tweet content response:\n" + response); // First clean up any markdown and newlines const cleanedResponse = cleanJsonResponse(response); @@ -709,7 +709,7 @@ export class TwitterPostClient { */ private async processTweetActions() { if (this.isProcessing) { - elizaLogger.log("Already processing tweet actions, skipping"); + logger.log("Already processing tweet actions, skipping"); return null; } @@ -717,7 +717,7 @@ export class TwitterPostClient { this.isProcessing = true; this.lastProcessTime = Date.now(); - elizaLogger.log("Processing tweet actions"); + logger.log("Processing tweet actions"); await this.runtime.ensureUserExists( this.runtime.agentId, @@ -741,7 +741,7 @@ export class TwitterPostClient { stringToUuid(tweet.id + "-" + this.runtime.agentId) ); if (memory) { - elizaLogger.log( + logger.log( `Already processed tweet ID: ${tweet.id}` ); continue; @@ -779,7 +779,7 @@ export class TwitterPostClient { }); if (!actionResponse) { - elizaLogger.log( + logger.log( `No valid actions generated for tweet ${tweet.id}` ); continue; @@ -791,7 +791,7 @@ export class TwitterPostClient { roomId: roomId, }); } catch (error) { - elizaLogger.error( + logger.error( `Error processing tweet ${tweet.id}:`, error ); @@ -830,7 +830,7 @@ export class TwitterPostClient { return this.processTimelineActions(sortedTimelines); // Return results array to indicate completion } catch (error) { - elizaLogger.error("Error in processTweetActions:", error); + logger.error("Error in processTweetActions:", error); throw error; } finally { this.isProcessing = false; @@ -867,7 +867,7 @@ export class TwitterPostClient { // Execute actions if (actionResponse.like) { if (this.isDryRun) { - elizaLogger.info( + logger.info( `Dry run: would have liked tweet ${tweet.id}` ); executedActions.push("like (dry run)"); @@ -875,9 +875,9 @@ export class TwitterPostClient { try { await this.client.twitterClient.likeTweet(tweet.id); executedActions.push("like"); - elizaLogger.log(`Liked tweet ${tweet.id}`); + logger.log(`Liked tweet ${tweet.id}`); } catch (error) { - elizaLogger.error( + logger.error( `Error liking tweet ${tweet.id}:`, error ); @@ -887,7 +887,7 @@ export class TwitterPostClient { if (actionResponse.retweet) { if (this.isDryRun) { - elizaLogger.info( + logger.info( `Dry run: would have retweeted tweet ${tweet.id}` ); executedActions.push("retweet (dry run)"); @@ -895,9 +895,9 @@ export class TwitterPostClient { try { await this.client.twitterClient.retweet(tweet.id); executedActions.push("retweet"); - elizaLogger.log(`Retweeted tweet ${tweet.id}`); + logger.log(`Retweeted tweet ${tweet.id}`); } catch (error) { - elizaLogger.error( + logger.error( `Error retweeting tweet ${tweet.id}:`, error ); @@ -924,7 +924,7 @@ export class TwitterPostClient { // Generate image descriptions if present const imageDescriptions = []; if (tweet.photos?.length > 0) { - elizaLogger.log( + logger.log( "Processing images in tweet for context" ); for (const photo of tweet.photos) { @@ -949,7 +949,7 @@ export class TwitterPostClient { quotedContent = `\nQuoted Tweet from @${quotedTweet.username}:\n${quotedTweet.text}`; } } catch (error) { - elizaLogger.error( + logger.error( "Error fetching quoted tweet:", error ); @@ -999,19 +999,19 @@ export class TwitterPostClient { ); if (!quoteContent) { - elizaLogger.error( + logger.error( "Failed to generate valid quote tweet content" ); return; } - elizaLogger.log( + logger.log( "Generated quote tweet content:", quoteContent ); // Check for dry run mode if (this.isDryRun) { - elizaLogger.info( + logger.info( `Dry run: A quote tweet for tweet ID ${tweet.id} would have been posted with the following content: "${quoteContent}".` ); executedActions.push("quote (dry run)"); @@ -1030,7 +1030,7 @@ export class TwitterPostClient { if ( body?.data?.create_tweet?.tweet_results?.result ) { - elizaLogger.log( + logger.log( "Successfully posted quote tweet" ); executedActions.push("quote"); @@ -1041,14 +1041,14 @@ export class TwitterPostClient { `Context:\n${enrichedState}\n\nGenerated Quote:\n${quoteContent}` ); } else { - elizaLogger.error( + logger.error( "Quote tweet creation failed:", body ); } } } catch (error) { - elizaLogger.error( + logger.error( "Error in quote tweet generation:", error ); @@ -1063,7 +1063,7 @@ export class TwitterPostClient { executedActions ); } catch (error) { - elizaLogger.error( + logger.error( `Error replying to tweet ${tweet.id}:`, error ); @@ -1107,7 +1107,7 @@ export class TwitterPostClient { executedActions, }); } catch (error) { - elizaLogger.error(`Error processing tweet ${tweet.id}:`, error); + logger.error(`Error processing tweet ${tweet.id}:`, error); continue; } } @@ -1139,7 +1139,7 @@ export class TwitterPostClient { // Generate image descriptions if present const imageDescriptions = []; if (tweet.photos?.length > 0) { - elizaLogger.log("Processing images in tweet for context"); + logger.log("Processing images in tweet for context"); for (const photo of tweet.photos) { const description = await this.runtime .getService( @@ -1162,7 +1162,7 @@ export class TwitterPostClient { quotedContent = `\nQuoted Tweet from @${quotedTweet.username}:\n${quotedTweet.text}`; } } catch (error) { - elizaLogger.error("Error fetching quoted tweet:", error); + logger.error("Error fetching quoted tweet:", error); } } @@ -1199,19 +1199,19 @@ export class TwitterPostClient { }); if (!replyText) { - elizaLogger.error("Failed to generate valid reply content"); + logger.error("Failed to generate valid reply content"); return; } if (this.isDryRun) { - elizaLogger.info( + logger.info( `Dry run: reply to tweet ${tweet.id} would have been: ${replyText}` ); executedActions.push("reply (dry run)"); return; } - elizaLogger.debug("Final reply text to be sent:", replyText); + logger.debug("Final reply text to be sent:", replyText); let result; @@ -1230,7 +1230,7 @@ export class TwitterPostClient { } if (result) { - elizaLogger.log("Successfully posted reply tweet"); + logger.log("Successfully posted reply tweet"); executedActions.push("reply"); // Cache generation context for debugging @@ -1239,10 +1239,10 @@ export class TwitterPostClient { `Context:\n${enrichedState}\n\nGenerated Reply:\n${replyText}` ); } else { - elizaLogger.error("Tweet reply creation failed"); + logger.error("Tweet reply creation failed"); } } catch (error) { - elizaLogger.error("Error in handleTextOnlyReply:", error); + logger.error("Error in handleTextOnlyReply:", error); } } @@ -1311,7 +1311,7 @@ export class TwitterPostClient { return message.id; } catch (error) { - elizaLogger.error( + logger.error( "Error Sending Twitter Post Approval Request:", error ); @@ -1328,10 +1328,10 @@ export class TwitterPostClient { this.discordApprovalChannelId ); - elizaLogger.log(`channel ${JSON.stringify(channel)}`); + logger.log(`channel ${JSON.stringify(channel)}`); if (!(channel instanceof TextChannel)) { - elizaLogger.error("Invalid approval channel"); + logger.error("Invalid approval channel"); return "PENDING"; } @@ -1368,7 +1368,7 @@ export class TwitterPostClient { return "PENDING"; } catch (error) { - elizaLogger.error("Error checking approval status:", error); + logger.error("Error checking approval status:", error); return "PENDING"; } } @@ -1396,7 +1396,7 @@ export class TwitterPostClient { } private async handlePendingTweet() { - elizaLogger.log("Checking Pending Tweets..."); + logger.log("Checking Pending Tweets..."); const pendingTweetsKey = `twitter/${this.client.profile.username}/pendingTweet`; const pendingTweets = (await this.runtime.cacheManager.get( @@ -1409,7 +1409,7 @@ export class TwitterPostClient { Date.now() - pendingTweet.timestamp > 24 * 60 * 60 * 1000; if (isExpired) { - elizaLogger.log("Pending tweet expired, cleaning up"); + logger.log("Pending tweet expired, cleaning up"); // Notify on Discord about expiration try { @@ -1426,7 +1426,7 @@ export class TwitterPostClient { ); } } catch (error) { - elizaLogger.error( + logger.error( "Error sending expiration notification:", error ); @@ -1437,12 +1437,12 @@ export class TwitterPostClient { } // Check approval status - elizaLogger.log("Checking approval status..."); + logger.log("Checking approval status..."); const approvalStatus: PendingTweetApprovalStatus = await this.checkApprovalStatus(pendingTweet.discordMessageId); if (approvalStatus === "APPROVED") { - elizaLogger.log("Tweet Approved, Posting"); + logger.log("Tweet Approved, Posting"); await this.postTweet( this.runtime, this.client, @@ -1467,7 +1467,7 @@ export class TwitterPostClient { ); } } catch (error) { - elizaLogger.error( + logger.error( "Error sending post notification:", error ); @@ -1475,7 +1475,7 @@ export class TwitterPostClient { await this.cleanupPendingTweet(pendingTweet.discordMessageId); } else if (approvalStatus === "REJECTED") { - elizaLogger.log("Tweet Rejected, Cleaning Up"); + logger.log("Tweet Rejected, Cleaning Up"); await this.cleanupPendingTweet(pendingTweet.discordMessageId); // Notify about Rejection of Tweet try { @@ -1492,7 +1492,7 @@ export class TwitterPostClient { ); } } catch (error) { - elizaLogger.error( + logger.error( "Error sending rejection notification:", error ); diff --git a/packages/client-twitter/src/search.ts b/packages/client-twitter/src/search.ts index 01ed6e68277..07f1f18d47b 100644 --- a/packages/client-twitter/src/search.ts +++ b/packages/client-twitter/src/search.ts @@ -1,5 +1,5 @@ import { SearchMode } from "agent-twitter-client"; -import { composeContext, elizaLogger } from "@elizaos/core"; +import { composeContext, logger } from "@elizaos/core"; import { generateMessageResponse, generateText } from "@elizaos/core"; import { messageCompletionFooter } from "@elizaos/core"; import { @@ -61,7 +61,7 @@ export class TwitterSearchClient { private engageWithSearchTermsLoop() { this.engageWithSearchTerms().then(); const randomMinutes = Math.floor(Math.random() * (120 - 60 + 1)) + 60; - elizaLogger.log( + logger.log( `Next twitter search scheduled in ${randomMinutes} minutes` ); setTimeout( @@ -71,13 +71,13 @@ export class TwitterSearchClient { } private async engageWithSearchTerms() { - elizaLogger.log("Engaging with search terms"); + logger.log("Engaging with search terms"); try { const searchTerm = [...this.runtime.character.topics][ Math.floor(Math.random() * this.runtime.character.topics.length) ]; - elizaLogger.log("Fetching search tweets"); + logger.log("Fetching search tweets"); // TODO: we wait 5 seconds here to avoid getting rate limited on startup, but we should queue await new Promise((resolve) => setTimeout(resolve, 5000)); const recentTweets = await this.client.fetchSearchTweets( @@ -85,7 +85,7 @@ export class TwitterSearchClient { 20, SearchMode.Top ); - elizaLogger.log("Search tweets fetched"); + logger.log("Search tweets fetched"); const homeTimeline = await this.client.fetchHomeTimeline(50); @@ -105,7 +105,7 @@ export class TwitterSearchClient { .slice(0, 20); if (slicedTweets.length === 0) { - elizaLogger.log( + logger.log( "No valid tweets found for the search term", searchTerm ); @@ -155,15 +155,15 @@ export class TwitterSearchClient { ); if (!selectedTweet) { - elizaLogger.warn("No matching tweet found for the selected ID"); - elizaLogger.log("Selected tweet ID:", tweetId); + logger.warn("No matching tweet found for the selected ID"); + logger.log("Selected tweet ID:", tweetId); return; } - elizaLogger.log("Selected tweet to reply to:", selectedTweet?.text); + logger.log("Selected tweet to reply to:", selectedTweet?.text); if (selectedTweet.username === this.twitterUsername) { - elizaLogger.log("Skipping tweet from bot itself"); + logger.log("Skipping tweet from bot itself"); return; } @@ -206,7 +206,7 @@ export class TwitterSearchClient { }; if (!message.content.text) { - elizaLogger.warn("Returning: No response text found"); + logger.warn("Returning: No response text found"); return; } @@ -270,11 +270,11 @@ export class TwitterSearchClient { const response = responseContent; if (!response.text) { - elizaLogger.warn("Returning: No response text found"); + logger.warn("Returning: No response text found"); return; } - elizaLogger.log( + logger.log( `Bot would respond to tweet ${selectedTweet.id} with: ${response.text}` ); try { diff --git a/packages/client-twitter/src/spaces.ts b/packages/client-twitter/src/spaces.ts index 7764106c9ee..5b6bfd6900e 100644 --- a/packages/client-twitter/src/spaces.ts +++ b/packages/client-twitter/src/spaces.ts @@ -1,5 +1,5 @@ import { - elizaLogger, + logger, type IAgentRuntime, composeContext, generateText, @@ -52,7 +52,7 @@ Only return the text, no additional formatting. }); return output.trim(); } catch (err) { - elizaLogger.error("[generateFiller] Error generating filler:", err); + logger.error("[generateFiller] Error generating filler:", err); return ""; } } @@ -70,7 +70,7 @@ async function speakFiller( const text = await generateFiller(runtime, fillerType); if (!text) return; - elizaLogger.log(`[Space] Filler (${fillerType}) => ${text}`); + logger.log(`[Space] Filler (${fillerType}) => ${text}`); await sttTtsPlugin.speakText(text); if (sleepAfterMs > 0) { @@ -108,7 +108,7 @@ Example: .filter(Boolean); return topics.length ? topics : ["Random Tech Chat", "AI Thoughts"]; } catch (err) { - elizaLogger.error("[generateTopicsIfEmpty] GPT error =>", err); + logger.error("[generateTopicsIfEmpty] GPT error =>", err); return ["Random Tech Chat", "AI Thoughts"]; } } @@ -167,7 +167,7 @@ export class TwitterSpaceClient { * Periodic check to launch or manage space */ public async startPeriodicSpaceCheck() { - elizaLogger.log("[Space] Starting periodic check routine..."); + logger.log("[Space] Starting periodic check routine..."); // For instance: const intervalMsWhenIdle = 5 * 60_000; // 5 minutes if no Space is running @@ -199,7 +199,7 @@ export class TwitterSpaceClient { ); } } catch (error) { - elizaLogger.error("[Space] Error in routine =>", error); + logger.error("[Space] Error in routine =>", error); // In case of error, still schedule next iteration this.checkInterval = setTimeout(routine, intervalMsWhenIdle); } @@ -219,14 +219,14 @@ export class TwitterSpaceClient { // Random chance const r = Math.random(); if (r > (this.decisionOptions.randomChance ?? 0.3)) { - elizaLogger.log("[Space] Random check => skip launching"); + logger.log("[Space] Random check => skip launching"); return false; } // Business hours if (this.decisionOptions.businessHoursOnly) { const hour = new Date().getUTCHours(); if (hour < 9 || hour >= 17) { - elizaLogger.log("[Space] Out of business hours => skip"); + logger.log("[Space] Out of business hours => skip"); return false; } } @@ -237,12 +237,12 @@ export class TwitterSpaceClient { (this.decisionOptions.minIntervalBetweenSpacesMinutes ?? 60) * 60_000; if (now - this.lastSpaceEndedAt < minIntervalMs) { - elizaLogger.log("[Space] Too soon since last space => skip"); + logger.log("[Space] Too soon since last space => skip"); return false; } } - elizaLogger.log("[Space] Deciding to launch a new Space..."); + logger.log("[Space] Deciding to launch a new Space..."); return true; } @@ -277,7 +277,7 @@ export class TwitterSpaceClient { } public async startSpace(config: SpaceConfig) { - elizaLogger.log("[Space] Starting a new Twitter Space..."); + logger.log("[Space] Starting a new Twitter Space..."); try { this.currentSpace = new Space(this.scraper); @@ -297,12 +297,12 @@ export class TwitterSpaceClient { this.spaceId = broadcastInfo.room_id; // Plugins if (this.decisionOptions.enableRecording) { - elizaLogger.log("[Space] Using RecordToDiskPlugin"); + logger.log("[Space] Using RecordToDiskPlugin"); this.currentSpace.use(new RecordToDiskPlugin()); } if (this.decisionOptions.enableSttTts) { - elizaLogger.log("[Space] Using SttTtsPlugin"); + logger.log("[Space] Using SttTtsPlugin"); const sttTts = new SttTtsPlugin(); this.sttTtsPlugin = sttTts; this.currentSpace.use(sttTts, { @@ -320,7 +320,7 @@ export class TwitterSpaceClient { } if (this.decisionOptions.enableIdleMonitor) { - elizaLogger.log("[Space] Using IdleMonitorPlugin"); + logger.log("[Space] Using IdleMonitorPlugin"); this.currentSpace.use( new IdleMonitorPlugin( this.decisionOptions.idleKickTimeoutMs ?? 60_000, @@ -338,7 +338,7 @@ export class TwitterSpaceClient { "broadcasts", "spaces" ); - elizaLogger.log(`[Space] Space started => ${spaceUrl}`); + logger.log(`[Space] Space started => ${spaceUrl}`); // Greet await speakFiller( @@ -349,7 +349,7 @@ export class TwitterSpaceClient { // Events this.currentSpace.on("occupancyUpdate", (update) => { - elizaLogger.log( + logger.log( `[Space] Occupancy => ${update.occupancy} participant(s).` ); }); @@ -357,7 +357,7 @@ export class TwitterSpaceClient { this.currentSpace.on( "speakerRequest", async (req: SpeakerRequest) => { - elizaLogger.log( + logger.log( `[Space] Speaker request from @${req.username} (${req.userId}).` ); await this.handleSpeakerRequest(req); @@ -365,7 +365,7 @@ export class TwitterSpaceClient { ); this.currentSpace.on("idleTimeout", async (info) => { - elizaLogger.log( + logger.log( `[Space] idleTimeout => no audio for ${info.idleMs} ms.` ); await speakFiller( @@ -377,7 +377,7 @@ export class TwitterSpaceClient { }); process.on("SIGINT", async () => { - elizaLogger.log("[Space] SIGINT => stopping space"); + logger.log("[Space] SIGINT => stopping space"); await speakFiller( this.client.runtime, this.sttTtsPlugin, @@ -387,7 +387,7 @@ export class TwitterSpaceClient { process.exit(0); }); } catch (error) { - elizaLogger.error("[Space] Error launching Space =>", error); + logger.error("[Space] Error launching Space =>", error); this.isSpaceRunning = false; throw error; } @@ -414,7 +414,7 @@ export class TwitterSpaceClient { const speaker = this.activeSpeakers[i]; const elapsed = now - speaker.startTime; if (elapsed > maxDur) { - elizaLogger.log( + logger.log( `[Space] Speaker @${speaker.username} exceeded max duration => removing` ); await this.removeSpeaker(speaker.userId); @@ -434,7 +434,7 @@ export class TwitterSpaceClient { // 3) If somehow more than maxSpeakers are active, remove the extras if (numSpeakers > (this.decisionOptions.maxSpeakers ?? 1)) { - elizaLogger.log( + logger.log( "[Space] More than maxSpeakers => removing extras..." ); await this.kickExtraSpeakers(participants.speakers); @@ -449,7 +449,7 @@ export class TwitterSpaceClient { totalListeners === 0 && elapsedMinutes > 5) ) { - elizaLogger.log( + logger.log( "[Space] Condition met => stopping the Space..." ); await speakFiller( @@ -461,7 +461,7 @@ export class TwitterSpaceClient { await this.stopSpace(); } } catch (error) { - elizaLogger.error("[Space] Error in manageCurrentSpace =>", error); + logger.error("[Space] Error in manageCurrentSpace =>", error); } } @@ -495,7 +495,7 @@ export class TwitterSpaceClient { // If we haven't reached maxSpeakers, accept immediately if (janusSpeakers.length < (this.decisionOptions.maxSpeakers ?? 1)) { - elizaLogger.log(`[Space] Accepting speaker @${req.username} now`); + logger.log(`[Space] Accepting speaker @${req.username} now`); await speakFiller( this.client.runtime, this.sttTtsPlugin, @@ -503,7 +503,7 @@ export class TwitterSpaceClient { ); await this.acceptSpeaker(req); } else { - elizaLogger.log( + logger.log( `[Space] Adding speaker @${req.username} to the queue` ); this.speakerQueue.push(req); @@ -520,9 +520,9 @@ export class TwitterSpaceClient { username: req.username, startTime: Date.now(), }); - elizaLogger.log(`[Space] Speaker @${req.username} is now live`); + logger.log(`[Space] Speaker @${req.username} is now live`); } catch (err) { - elizaLogger.error( + logger.error( `[Space] Error approving speaker @${req.username}:`, err ); @@ -533,9 +533,9 @@ export class TwitterSpaceClient { if (!this.currentSpace) return; try { await this.currentSpace.removeSpeaker(userId); - elizaLogger.log(`[Space] Removed speaker userId=${userId}`); + logger.log(`[Space] Removed speaker userId=${userId}`); } catch (error) { - elizaLogger.error( + logger.error( `[Space] Error removing speaker userId=${userId} =>`, error ); @@ -553,7 +553,7 @@ export class TwitterSpaceClient { // sort by who joined first if needed, or just slice const extras = speakers.slice(ms); for (const sp of extras) { - elizaLogger.log( + logger.log( `[Space] Removing extra speaker => userId=${sp.user_id}` ); await this.removeSpeaker(sp.user_id); @@ -571,10 +571,10 @@ export class TwitterSpaceClient { public async stopSpace() { if (!this.currentSpace || !this.isSpaceRunning) return; try { - elizaLogger.log("[Space] Stopping the current Space..."); + logger.log("[Space] Stopping the current Space..."); await this.currentSpace.stop(); } catch (err) { - elizaLogger.error("[Space] Error stopping Space =>", err); + logger.error("[Space] Error stopping Space =>", err); } finally { this.isSpaceRunning = false; this.spaceId = undefined; diff --git a/packages/client-twitter/src/utils.ts b/packages/client-twitter/src/utils.ts index fd8a86b8763..4c1c4d5fd40 100644 --- a/packages/client-twitter/src/utils.ts +++ b/packages/client-twitter/src/utils.ts @@ -3,7 +3,7 @@ import { getEmbeddingZeroVector } from "@elizaos/core"; import type { Content, Memory, UUID } from "@elizaos/core"; import { stringToUuid } from "@elizaos/core"; import type { ClientBase } from "./base"; -import { elizaLogger } from "@elizaos/core"; +import { logger } from "@elizaos/core"; import type { Media } from "@elizaos/core"; import fs from "fs"; import path from "path"; @@ -39,20 +39,20 @@ export async function buildConversationThread( const visited: Set = new Set(); async function processThread(currentTweet: Tweet, depth = 0) { - elizaLogger.debug("Processing tweet:", { + logger.debug("Processing tweet:", { id: currentTweet.id, inReplyToStatusId: currentTweet.inReplyToStatusId, depth: depth, }); if (!currentTweet) { - elizaLogger.debug("No current tweet found for thread building"); + logger.debug("No current tweet found for thread building"); return; } // Stop if we've reached our reply limit if (depth >= maxReplies) { - elizaLogger.debug("Reached maximum reply depth", depth); + logger.debug("Reached maximum reply depth", depth); return; } @@ -102,14 +102,14 @@ export async function buildConversationThread( } if (visited.has(currentTweet.id)) { - elizaLogger.debug("Already visited tweet:", currentTweet.id); + logger.debug("Already visited tweet:", currentTweet.id); return; } visited.add(currentTweet.id); thread.unshift(currentTweet); - elizaLogger.debug("Current thread state:", { + logger.debug("Current thread state:", { length: thread.length, currentDepth: depth, tweetId: currentTweet.id, @@ -117,7 +117,7 @@ export async function buildConversationThread( // If there's a parent tweet, fetch and process it if (currentTweet.inReplyToStatusId) { - elizaLogger.debug( + logger.debug( "Fetching parent tweet:", currentTweet.inReplyToStatusId ); @@ -127,25 +127,25 @@ export async function buildConversationThread( ); if (parentTweet) { - elizaLogger.debug("Found parent tweet:", { + logger.debug("Found parent tweet:", { id: parentTweet.id, text: parentTweet.text?.slice(0, 50), }); await processThread(parentTweet, depth + 1); } else { - elizaLogger.debug( + logger.debug( "No parent tweet found for:", currentTweet.inReplyToStatusId ); } } catch (error) { - elizaLogger.error("Error fetching parent tweet:", { + logger.error("Error fetching parent tweet:", { tweetId: currentTweet.inReplyToStatusId, error, }); } } else { - elizaLogger.debug( + logger.debug( "Reached end of reply chain at:", currentTweet.id ); @@ -154,7 +154,7 @@ export async function buildConversationThread( await processThread(tweet, 0); - elizaLogger.debug("Final thread built:", { + logger.debug("Final thread built:", { totalTweets: thread.length, tweetIds: thread.map((t) => ({ id: t.id, @@ -259,7 +259,7 @@ export async function sendTweet( sentTweets.push(finalTweet); previousTweetId = finalTweet.id; } else { - elizaLogger.error("Error sending tweet chunk:", { + logger.error("Error sending tweet chunk:", { chunk, response: body, }); diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index fd61d1f1efe..5d01ef588e2 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,12 +1,10 @@ export * from "./cache"; -export * from "./character"; -export * from './client'; +export * from '../../runtime/src/client'; export * from "./database"; export * from "./files"; -export * from "./formatters"; +export * from "../../runtime/src/formatters"; export * from "./generate"; export * from "./http"; -export * from "./logger"; export * from "./memory"; export * from "./prompt"; export * from "./runtime"; diff --git a/packages/core/src/logger.ts b/packages/core/src/logger.ts deleted file mode 100644 index 2af385702d5..00000000000 --- a/packages/core/src/logger.ts +++ /dev/null @@ -1,282 +0,0 @@ -class ElizaLogger { - private static instance: ElizaLogger; - private isNode: boolean; - verbose = false; - closeByNewLine = true; - useIcons = true; - logsTitle = "LOGS"; - warningsTitle = "WARNINGS"; - errorsTitle = "ERRORS"; - informationsTitle = "INFORMATIONS"; - successesTitle = "SUCCESS"; - debugsTitle = "DEBUG"; - assertsTitle = "ASSERT"; - - private constructor() { - // Check if we're in Node.js environment - this.isNode = - typeof process !== "undefined" && - process.versions != null && - process.versions.node != null; - - // Set verbose based on environment - this.verbose = this.isNode ? process.env.VERBOSE === "true" : false; - - // Only show initialization message if not in CLI context - const isCLI = process.argv.some(arg => arg.includes("elizaos")); - if (!isCLI) { - console.log(`[ElizaLogger] Initializing with: - isNode: ${this.isNode} - verbose: ${this.verbose} - VERBOSE env: ${process.env.VERBOSE} - NODE_ENV: ${process.env.NODE_ENV} - `); - } - } - - public static getInstance(): ElizaLogger { - if (!ElizaLogger.instance) { - ElizaLogger.instance = new ElizaLogger(); - } - return ElizaLogger.instance; - } - - #getColor(foregroundColor = "", backgroundColor = "") { - if (!this.isNode) { - // Browser console styling - const colors: { [key: string]: string } = { - black: "#000000", - red: "#ff0000", - green: "#00ff00", - yellow: "#ffff00", - blue: "#0000ff", - magenta: "#ff00ff", - cyan: "#00ffff", - white: "#ffffff", - }; - - const fg = colors[foregroundColor.toLowerCase()] || colors.white; - const bg = colors[backgroundColor.toLowerCase()] || "transparent"; - return `color: ${fg}; background: ${bg};`; - } - - // Node.js console colors - let fgc = "\x1b[37m"; - switch (foregroundColor.trim().toLowerCase()) { - case "black": - fgc = "\x1b[30m"; - break; - case "red": - fgc = "\x1b[31m"; - break; - case "green": - fgc = "\x1b[32m"; - break; - case "yellow": - fgc = "\x1b[33m"; - break; - case "blue": - fgc = "\x1b[34m"; - break; - case "magenta": - fgc = "\x1b[35m"; - break; - case "cyan": - fgc = "\x1b[36m"; - break; - case "white": - fgc = "\x1b[37m"; - break; - } - - let bgc = ""; - switch (backgroundColor.trim().toLowerCase()) { - case "black": - bgc = "\x1b[40m"; - break; - case "red": - bgc = "\x1b[44m"; - break; - case "green": - bgc = "\x1b[44m"; - break; - case "yellow": - bgc = "\x1b[43m"; - break; - case "blue": - bgc = "\x1b[44m"; - break; - case "magenta": - bgc = "\x1b[45m"; - break; - case "cyan": - bgc = "\x1b[46m"; - break; - case "white": - bgc = "\x1b[47m"; - break; - } - - return `${fgc}${bgc}`; - } - - #getColorReset() { - return this.isNode ? "\x1b[0m" : ""; - } - - clear() { - console.clear(); - } - - print(foregroundColor = "white", backgroundColor = "black", ...strings) { - // Convert objects to strings - const processedStrings = strings.map((item) => { - if (typeof item === "object") { - return JSON.stringify(item, (key, value) => - typeof value === "bigint" ? value.toString() : value - ); - } - return item; - }); - - if (this.isNode) { - const c = this.#getColor(foregroundColor, backgroundColor); - console.log(c, processedStrings.join(""), this.#getColorReset()); - } else { - const style = this.#getColor(foregroundColor, backgroundColor); - console.log(`%c${processedStrings.join("")}`, style); - } - - if (this.closeByNewLine) console.log(""); - } - - #logWithStyle( - strings: any[], - options: { - fg: string; - bg: string; - icon: string; - groupTitle: string; - } - ) { - const { fg, bg, icon, groupTitle } = options; - - if (strings.length > 1) { - if (this.isNode) { - const c = this.#getColor(fg, bg); - console.group(c, (this.useIcons ? icon : "") + groupTitle); - } else { - const style = this.#getColor(fg, bg); - console.group( - `%c${this.useIcons ? icon : ""}${groupTitle}`, - style - ); - } - - const nl = this.closeByNewLine; - this.closeByNewLine = false; - strings.forEach((item) => { - this.print(fg, bg, item); - }); - this.closeByNewLine = nl; - console.groupEnd(); - if (nl) console.log(); - } else { - this.print( - fg, - bg, - strings.map((item) => { - return `${this.useIcons ? `${icon} ` : ""}${item}`; - }) - ); - } - } - - log(...strings) { - this.#logWithStyle(strings, { - fg: "white", - bg: "", - icon: "\u25ce", - groupTitle: ` ${this.logsTitle}`, - }); - } - - warn(...strings) { - this.#logWithStyle(strings, { - fg: "yellow", - bg: "", - icon: "\u26a0", - groupTitle: ` ${this.warningsTitle}`, - }); - } - - error(...strings) { - this.#logWithStyle(strings, { - fg: "red", - bg: "", - icon: "\u26D4", - groupTitle: ` ${this.errorsTitle}`, - }); - } - - info(...strings) { - this.#logWithStyle(strings, { - fg: "blue", - bg: "", - icon: "\u2139", - groupTitle: ` ${this.informationsTitle}`, - }); - } - - debug(...strings) { - if (!this.verbose) { - // for diagnosing verbose logging issues - // console.log( - // "[ElizaLogger] Debug message suppressed (verbose=false):", - // ...strings - // ); - return; - } - this.#logWithStyle(strings, { - fg: "magenta", - bg: "", - icon: "\u1367", - groupTitle: ` ${this.debugsTitle}`, - }); - } - - success(...strings) { - this.#logWithStyle(strings, { - fg: "green", - bg: "", - icon: "\u2713", - groupTitle: ` ${this.successesTitle}`, - }); - } - - assert(...strings) { - this.#logWithStyle(strings, { - fg: "cyan", - bg: "", - icon: "\u0021", - groupTitle: ` ${this.assertsTitle}`, - }); - } - - progress(message: string) { - if (this.isNode) { - // Clear the current line and move cursor to beginning - process.stdout.clearLine(0); - process.stdout.cursorTo(0); - process.stdout.write(message); - } else { - console.log(message); - } - } -} - -export const logger = ElizaLogger.getInstance(); -logger.closeByNewLine = true; -logger.useIcons = true; - -export default logger; diff --git a/packages/core/src/prompts/index.ts b/packages/core/src/prompts/index.ts index 2fd5b809edd..cbf4d0243e4 100644 --- a/packages/core/src/prompts/index.ts +++ b/packages/core/src/prompts/index.ts @@ -15,7 +15,7 @@ import { formatContextDetails, formatMsg, formatThought, -} from '../formatters'; +} from '../../../runtime/src/formatters'; import { loadPrompt } from '../prompt'; import dataLoaderPrompt from "./data-loader.md" diff --git a/packages/core/tsconfig.json b/packages/core/tsconfig.json index 7a0aa88cea3..12655352c89 100644 --- a/packages/core/tsconfig.json +++ b/packages/core/tsconfig.json @@ -22,6 +22,6 @@ "moduleDetection": "force", "allowArbitraryExtensions": true }, - "include": ["src/**/*", "types.d.ts"], + "include": ["src/**/*", "types.d.ts", "../runtime/src/character.ts", "../runtime/src/formatters.ts", "../runtime/src/client.ts"], "exclude": ["node_modules", "dist"] } diff --git a/packages/plugin-anthropic/src/index.ts b/packages/plugin-anthropic/src/index.ts index a914cae5694..255800a344a 100644 --- a/packages/plugin-anthropic/src/index.ts +++ b/packages/plugin-anthropic/src/index.ts @@ -43,7 +43,8 @@ export const anthropicGenerateText = createFunction({ }, }); -export default function registerPlugin (runtime: IAgentRuntime ) { +// temporary measure +export default function registerPlugin (runtime: IAgentRuntime) { runtime.registerFunction(anthropicGenerateText.name, anthropicGenerateText.handler); } @@ -239,7 +240,7 @@ V1 settings // const response = await fetch(fullUrl, requestOptions); // if (!response.ok) { -// elizaLogger.error("API Response:", await response.text()); // Debug log +// logger.error("API Response:", await response.text()); // Debug log // throw new Error( // `Embedding API Error: ${response.status} ${response.statusText}` // ); @@ -252,7 +253,7 @@ V1 settings // const data: EmbeddingResponse = await response.json(); // return data?.data?.[0].embedding; // } catch (e) { -// elizaLogger.error("Full error details:", e); +// logger.error("Full error details:", e); // throw e; // } // } @@ -326,7 +327,7 @@ V1 settings // provider: string // ): Promise => { // const responseText = await response.text(); -// elizaLogger.error( +// logger.error( // `${provider} API error:`, // response.status, // "-", @@ -542,32 +543,32 @@ V1 settings // ModelProviderName.OLLAMA // ) { // this.provider = new LocalImageProvider(); - // elizaLogger.debug("Using local provider for vision model"); + // logger.debug("Using local provider for vision model"); // } else if ( // this.runtime.imageVisionModelProvider === // ModelProviderName.ANTHROPIC // ) { // this.provider = new AnthropicImageProvider(this.runtime); - // elizaLogger.debug("Using anthropic for vision model"); + // logger.debug("Using anthropic for vision model"); // } else if ( // this.runtime.imageVisionModelProvider === // ModelProviderName.GOOGLE // ) { // this.provider = new GoogleImageProvider(this.runtime); - // elizaLogger.debug("Using google for vision model"); + // logger.debug("Using google for vision model"); // } else if ( // this.runtime.imageVisionModelProvider === // ModelProviderName.OPENAI // ) { // this.provider = new OpenAIImageProvider(this.runtime); - // elizaLogger.debug("Using openai for vision model"); + // logger.debug("Using openai for vision model"); // } else if ( // this.runtime.imageVisionModelProvider === ModelProviderName.GROQ // ) { // this.provider = new GroqImageProvider(this.runtime); - // elizaLogger.debug("Using Groq for vision model"); + // logger.debug("Using Groq for vision model"); // } else { - // elizaLogger.warn( + // logger.warn( // `Unsupported image vision model provider: ${this.runtime.imageVisionModelProvider}. ` + // `Please use one of the following: ${availableModels}. ` + // `Update the 'imageVisionModelProvider' field in the character file.` @@ -578,18 +579,18 @@ V1 settings // model === models[ModelProviderName.LLAMALOCAL] || // model === models[ModelProviderName.OLLAMA] // ) { - // elizaLogger.debug("Using local provider for vision model"); + // logger.debug("Using local provider for vision model"); // } else if (model === models[ModelProviderName.ANTHROPIC]) { // this.provider = new AnthropicImageProvider(this.runtime); - // elizaLogger.debug("Using anthropic for vision model"); + // logger.debug("Using anthropic for vision model"); // } else if (model === models[ModelProviderName.GOOGLE]) { // this.provider = new GoogleImageProvider(this.runtime); - // elizaLogger.debug("Using google for vision model"); + // logger.debug("Using google for vision model"); // } else if (model === models[ModelProviderName.GROQ]) { // this.provider = new GroqImageProvider(this.runtime); - // elizaLogger.debug("Using groq for vision model"); + // logger.debug("Using groq for vision model"); // } else { - // elizaLogger.debug("Using default openai for vision model"); + // logger.debug("Using default openai for vision model"); // this.provider = new OpenAIImageProvider(this.runtime); // }) @@ -600,7 +601,7 @@ V1 settings // const modelSettings = getImageModelSettings(runtime.imageModelProvider); // const model = modelSettings.name; -// elizaLogger.info("Generating image with options:", { +// logger.info("Generating image with options:", { // imageModelProvider: model, // }); @@ -703,7 +704,7 @@ V1 settings // const base64s = await Promise.all( // togetherResponse.data.map(async (image) => { // if (!image.url) { -// elizaLogger.error("Missing URL in image data:", image); +// logger.error("Missing URL in image data:", image); // throw new Error("Missing URL in Together AI response"); // } @@ -729,7 +730,7 @@ V1 settings // throw new Error("No images generated by Together AI"); // } -// elizaLogger.debug(`Generated ${base64s.length} images`); +// logger.debug(`Generated ${base64s.length} images`); // return { success: true, data: base64s }; // } else if (runtime.imageModelProvider === ModelProviderName.FAL) { // fal.config({ @@ -769,7 +770,7 @@ V1 settings // logs: true, // onQueueUpdate: (update) => { // if (update.status === "IN_PROGRESS") { -// elizaLogger.info(update.logs.map((log) => log.message)); +// logger.info(update.logs.map((log) => log.message)); // } // }, // }); @@ -960,42 +961,15 @@ V1 settings // old generateText code -// elizaLogger.info("Generating text with options:", { +// logger.info("Generating text with options:", { // modelProvider: runtime.modelProvider, // model: modelClass, // verifiableInference, // }); -// elizaLogger.log("Using provider:", runtime.modelProvider); -// // If verifiable inference is requested and adapter is provided, use it -// if (verifiableInference && runtime.verifiableInferenceAdapter) { -// elizaLogger.log( -// "Using verifiable inference adapter:", -// runtime.verifiableInferenceAdapter -// ); -// try { -// const result: VerifiableInferenceResult = -// await runtime.verifiableInferenceAdapter.generateText( -// context, -// modelClass, -// verifiableInferenceOptions -// ); -// elizaLogger.log("Verifiable inference result:", result); -// // Verify the proof -// const isValid = -// await runtime.verifiableInferenceAdapter.verifyProof(result); -// if (!isValid) { -// throw new Error("Failed to verify inference proof"); -// } - -// return result.text; -// } catch (error) { -// elizaLogger.error("Error in verifiable inference:", error); -// throw error; -// } -// } +// logger.log("Using provider:", runtime.modelProvider); // const provider = runtime.modelProvider; -// elizaLogger.debug("Provider settings:", { +// logger.debug("Provider settings:", { // provider, // hasRuntime: !!runtime, // runtimeSettings: { @@ -1080,7 +1054,7 @@ V1 settings // break; // } -// elizaLogger.info("Selected model:", model); +// logger.info("Selected model:", model); // const modelConfiguration = runtime.character?.settings?.modelConfig; // const temperature = @@ -1102,7 +1076,7 @@ V1 settings // const apiKey = runtime.token; // try { -// elizaLogger.debug( +// logger.debug( // `Trimming context to max length of ${max_context_length} tokens.` // ); @@ -1111,7 +1085,7 @@ V1 settings // let response: string; // const _stop = stop || modelSettings.stop; -// elizaLogger.debug( +// logger.debug( // `Using provider: ${provider}, model: ${model}, temperature: ${temperature}, max response length: ${max_response_length}` // ); @@ -1126,13 +1100,13 @@ V1 settings // case ModelProviderName.TOGETHER: // case ModelProviderName.NINETEEN_AI: // case ModelProviderName.AKASH_CHAT_API: { -// elizaLogger.debug( +// logger.debug( // "Initializing OpenAI model with Cloudflare check" // ); // const baseURL = // getCloudflareGatewayBaseURL(runtime, "openai") || endpoint; -// //elizaLogger.debug("OpenAI baseURL result:", { baseURL }); +// //logger.debug("OpenAI baseURL result:", { baseURL }); // const openai = createOpenAI({ // apiKey, // baseURL, @@ -1162,7 +1136,7 @@ V1 settings // } // case ModelProviderName.ETERNALAI: { -// elizaLogger.debug("Initializing EternalAI model."); +// logger.debug("Initializing EternalAI model."); // const openai = createOpenAI({ // apiKey, // baseURL: endpoint, @@ -1191,20 +1165,20 @@ V1 settings // runtime.getSetting("ETERNALAI_LOG") // ) // ) { -// elizaLogger.info( +// logger.info( // "Request data: ", // JSON.stringify(options, null, 2) // ); // const clonedResponse = fetching.clone(); // try { // clonedResponse.json().then((data) => { -// elizaLogger.info( +// logger.info( // "Response data: ", // JSON.stringify(data, null, 2) // ); // }); // } catch (e) { -// elizaLogger.debug(e); +// logger.debug(e); // } // } // return fetching; @@ -1219,18 +1193,18 @@ V1 settings // const on_chain_system_prompt = // await getOnChainEternalAISystemPrompt(runtime); // if (!on_chain_system_prompt) { -// elizaLogger.error( +// logger.error( // new Error("invalid on_chain_system_prompt") // ); // } else { // system_prompt = on_chain_system_prompt; -// elizaLogger.info( +// logger.info( // "new on-chain system prompt", // system_prompt // ); // } // } catch (e) { -// elizaLogger.error(e); +// logger.error(e); // } // const { text: openaiResponse } = await aiGenerateText({ @@ -1244,7 +1218,7 @@ V1 settings // }); // response = openaiResponse; -// elizaLogger.debug("Received response from EternalAI model."); +// logger.debug("Received response from EternalAI model."); // break; // } @@ -1272,7 +1246,7 @@ V1 settings // }); // response = googleResponse; -// elizaLogger.debug("Received response from Google model."); +// logger.debug("Received response from Google model."); // break; // } @@ -1293,18 +1267,18 @@ V1 settings // }); // response = mistralResponse; -// elizaLogger.debug("Received response from Mistral model."); +// logger.debug("Received response from Mistral model."); // break; // } // case ModelProviderName.ANTHROPIC: { -// elizaLogger.debug( +// logger.debug( // "Initializing Anthropic model with Cloudflare check" // ); // const baseURL = // getCloudflareGatewayBaseURL(runtime, "anthropic") || // "https://api.anthropic.com/v1"; -// elizaLogger.debug("Anthropic baseURL result:", { baseURL }); +// logger.debug("Anthropic baseURL result:", { baseURL }); // const anthropic = createAnthropic({ // apiKey, @@ -1329,12 +1303,12 @@ V1 settings // }); // response = anthropicResponse; -// elizaLogger.debug("Received response from Anthropic model."); +// logger.debug("Received response from Anthropic model."); // break; // } // case ModelProviderName.CLAUDE_VERTEX: { -// elizaLogger.debug("Initializing Claude Vertex model."); +// logger.debug("Initializing Claude Vertex model."); // const anthropic = createAnthropic({ // apiKey, @@ -1359,14 +1333,14 @@ V1 settings // }); // response = anthropicResponse; -// elizaLogger.debug( +// logger.debug( // "Received response from Claude Vertex model." // ); // break; // } // case ModelProviderName.GROK: { -// elizaLogger.debug("Initializing Grok model."); +// logger.debug("Initializing Grok model."); // const grok = createOpenAI({ // apiKey, // baseURL: endpoint, @@ -1393,16 +1367,16 @@ V1 settings // }); // response = grokResponse; -// elizaLogger.debug("Received response from Grok model."); +// logger.debug("Received response from Grok model."); // break; // } // case ModelProviderName.GROQ: { -// elizaLogger.debug( +// logger.debug( // "Initializing Groq model with Cloudflare check" // ); // const baseURL = getCloudflareGatewayBaseURL(runtime, "groq"); -// elizaLogger.debug("Groq baseURL result:", { baseURL }); +// logger.debug("Groq baseURL result:", { baseURL }); // const groq = createGroq({ // apiKey, // fetch: runtime.fetch, @@ -1427,12 +1401,12 @@ V1 settings // }); // response = groqResponse; -// elizaLogger.debug("Received response from Groq model."); +// logger.debug("Received response from Groq model."); // break; // } // case ModelProviderName.LLAMALOCAL: { -// elizaLogger.debug( +// logger.debug( // "Using local Llama model for text completion." // ); // const textGenerationService = @@ -1452,12 +1426,12 @@ V1 settings // presence_penalty, // max_response_length // ); -// elizaLogger.debug("Received response from local Llama model."); +// logger.debug("Received response from local Llama model."); // break; // } // case ModelProviderName.REDPILL: { -// elizaLogger.debug("Initializing RedPill model."); +// logger.debug("Initializing RedPill model."); // const serverUrl = getEndpoint(provider); // const openai = createOpenAI({ // apiKey, @@ -1483,12 +1457,12 @@ V1 settings // }); // response = redpillResponse; -// elizaLogger.debug("Received response from redpill model."); +// logger.debug("Received response from redpill model."); // break; // } // case ModelProviderName.OPENROUTER: { -// elizaLogger.debug("Initializing OpenRouter model."); +// logger.debug("Initializing OpenRouter model."); // const serverUrl = getEndpoint(provider); // const openrouter = createOpenAI({ // apiKey, @@ -1514,13 +1488,13 @@ V1 settings // }); // response = openrouterResponse; -// elizaLogger.debug("Received response from OpenRouter model."); +// logger.debug("Received response from OpenRouter model."); // break; // } // case ModelProviderName.OLLAMA: // { -// elizaLogger.debug("Initializing Ollama model."); +// logger.debug("Initializing Ollama model."); // const ollamaProvider = createOllama({ // baseURL: getEndpoint(provider) + "/api", @@ -1528,7 +1502,7 @@ V1 settings // }); // const ollama = ollamaProvider(model); -// elizaLogger.debug("****** MODEL\n", model); +// logger.debug("****** MODEL\n", model); // const { text: ollamaResponse } = await aiGenerateText({ // model: ollama, @@ -1545,11 +1519,11 @@ V1 settings // response = ollamaResponse; // } -// elizaLogger.debug("Received response from Ollama model."); +// logger.debug("Received response from Ollama model."); // break; // case ModelProviderName.HEURIST: { -// elizaLogger.debug("Initializing Heurist model."); +// logger.debug("Initializing Heurist model."); // const heurist = createOpenAI({ // apiKey: apiKey, // baseURL: endpoint, @@ -1575,11 +1549,11 @@ V1 settings // }); // response = heuristResponse; -// elizaLogger.debug("Received response from Heurist model."); +// logger.debug("Received response from Heurist model."); // break; // } // case ModelProviderName.GAIANET: { -// elizaLogger.debug("Initializing GAIANET model."); +// logger.debug("Initializing GAIANET model."); // var baseURL = getEndpoint(provider); // if (!baseURL) { @@ -1602,7 +1576,7 @@ V1 settings // } // } -// elizaLogger.debug("Using GAIANET model with baseURL:", baseURL); +// logger.debug("Using GAIANET model with baseURL:", baseURL); // const openai = createOpenAI({ // apiKey, @@ -1628,12 +1602,12 @@ V1 settings // }); // response = openaiResponse; -// elizaLogger.debug("Received response from GAIANET model."); +// logger.debug("Received response from GAIANET model."); // break; // } // case ModelProviderName.ATOMA: { -// elizaLogger.debug("Initializing Atoma model."); +// logger.debug("Initializing Atoma model."); // const atoma = createOpenAI({ // apiKey, // baseURL: endpoint, @@ -1658,12 +1632,12 @@ V1 settings // }); // response = atomaResponse; -// elizaLogger.debug("Received response from Atoma model."); +// logger.debug("Received response from Atoma model."); // break; // } // case ModelProviderName.GALADRIEL: { -// elizaLogger.debug("Initializing Galadriel model."); +// logger.debug("Initializing Galadriel model."); // const headers = {}; // const fineTuneApiKey = runtime.getSetting( // "GALADRIEL_FINE_TUNE_API_KEY" @@ -1696,12 +1670,12 @@ V1 settings // }); // response = galadrielResponse; -// elizaLogger.debug("Received response from Galadriel model."); +// logger.debug("Received response from Galadriel model."); // break; // } // case ModelProviderName.INFERA: { -// elizaLogger.debug("Initializing Infera model."); +// logger.debug("Initializing Infera model."); // const apiKey = settings.INFERA_API_KEY || runtime.token; @@ -1727,12 +1701,12 @@ V1 settings // presencePenalty: presence_penalty, // }); // response = inferaResponse; -// elizaLogger.debug("Received response from Infera model."); +// logger.debug("Received response from Infera model."); // break; // } // case ModelProviderName.VENICE: { -// elizaLogger.debug("Initializing Venice model."); +// logger.debug("Initializing Venice model."); // const venice = createOpenAI({ // apiKey: apiKey, // baseURL: endpoint, @@ -1753,12 +1727,12 @@ V1 settings // }); // response = veniceResponse; -// elizaLogger.debug("Received response from Venice model."); +// logger.debug("Received response from Venice model."); // break; // } // case ModelProviderName.NVIDIA: { -// elizaLogger.debug("Initializing NVIDIA model."); +// logger.debug("Initializing NVIDIA model."); // const nvidia = createOpenAI({ // apiKey: apiKey, // baseURL: endpoint, @@ -1779,12 +1753,12 @@ V1 settings // }); // response = nvidiaResponse; -// elizaLogger.debug("Received response from NVIDIA model."); +// logger.debug("Received response from NVIDIA model."); // break; // } // case ModelProviderName.DEEPSEEK: { -// elizaLogger.debug("Initializing Deepseek model."); +// logger.debug("Initializing Deepseek model."); // const serverUrl = models[provider].endpoint; // const deepseek = createOpenAI({ // apiKey, @@ -1810,12 +1784,12 @@ V1 settings // }); // response = deepseekResponse; -// elizaLogger.debug("Received response from Deepseek model."); +// logger.debug("Received response from Deepseek model."); // break; // } // case ModelProviderName.LIVEPEER: { -// elizaLogger.debug("Initializing Livepeer model."); +// logger.debug("Initializing Livepeer model."); // if (!endpoint) { // throw new Error("Livepeer Gateway URL is not defined"); @@ -1867,7 +1841,7 @@ V1 settings // /<\|start_header_id\|>assistant<\|end_header_id\|>\n\n/, // "" // ); -// elizaLogger.debug( +// logger.debug( // "Successfully received response from Livepeer model" // ); // break; @@ -1875,7 +1849,7 @@ V1 settings // default: { // const errorMessage = `Unsupported provider: ${provider}`; -// elizaLogger.error(errorMessage); +// logger.error(errorMessage); // throw new Error(errorMessage); // } // } diff --git a/packages/plugin-anthropic/src/localembeddingManager.ts b/packages/plugin-anthropic/src/localembeddingManager.ts index fe7dce89b6d..258a61878bc 100644 --- a/packages/plugin-anthropic/src/localembeddingManager.ts +++ b/packages/plugin-anthropic/src/localembeddingManager.ts @@ -1,16 +1,16 @@ import path from "node:path"; import { fileURLToPath } from "url"; import { FlagEmbedding, EmbeddingModel } from "fastembed"; -import elizaLogger from "@elizaos/runtime/src/logger"; +import logger from "@elizaos/runtime/src/logger"; async function getLocalEmbedding(input: string): Promise { - elizaLogger.debug("DEBUG - Inside getLocalEmbedding function"); + logger.debug("DEBUG - Inside getLocalEmbedding function"); try { const embeddingManager = LocalEmbeddingModelManager.getInstance(); return await embeddingManager.generateEmbedding(input); } catch (error) { - elizaLogger.error("Local embedding failed:", error); + logger.error("Local embedding failed:", error); throw error; } // } @@ -90,7 +90,7 @@ class LocalEmbeddingModelManager { fs.mkdirSync(cacheDir, { recursive: true }); } - elizaLogger.debug("Initializing BGE embedding model..."); + logger.debug("Initializing BGE embedding model..."); this.model = await FlagEmbedding.init({ cacheDir: cacheDir, @@ -98,9 +98,9 @@ class LocalEmbeddingModelManager { maxLength: 512, }); - elizaLogger.debug("BGE model initialized successfully"); + logger.debug("BGE model initialized successfully"); } catch (error) { - elizaLogger.error("Failed to initialize BGE model:", error); + logger.error("Failed to initialize BGE model:", error); throw error; } } @@ -118,7 +118,7 @@ class LocalEmbeddingModelManager { // Let fastembed handle tokenization internally const embedding = await this.model.queryEmbed(input); // Debug the raw embedding - uncomment if debugging embeddings - // elizaLogger.debug("Raw embedding from BGE:", { + // logger.debug("Raw embedding from BGE:", { // type: typeof embedding, // isArray: Array.isArray(embedding), // dimensions: Array.isArray(embedding) @@ -130,7 +130,7 @@ class LocalEmbeddingModelManager { // }); return this.processEmbedding(embedding); } catch (error) { - elizaLogger.error("Embedding generation failed:", error); + logger.error("Embedding generation failed:", error); throw error; } } @@ -164,7 +164,7 @@ class LocalEmbeddingModelManager { } if (finalEmbedding.length !== 384) { - elizaLogger.warn( + logger.warn( `Unexpected embedding dimension: ${finalEmbedding.length}` ); } diff --git a/packages/runtime/__tests__/defaultCharacters.test.ts b/packages/runtime/__tests__/defaultCharacters.test.ts deleted file mode 100644 index 9cb42b07894..00000000000 --- a/packages/runtime/__tests__/defaultCharacters.test.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { defaultCharacter } from "../src/defaultCharacter"; -import { ModelProviderName } from "../src/types"; - -describe("defaultCharacter", () => { - it("should have the correct name", () => { - expect(defaultCharacter.name).toBe("Eliza"); - }); - - it("should have an empty plugins array", () => { - expect(defaultCharacter.plugins).toEqual([]); - }); - - it("should have an empty clients array", () => { - expect(defaultCharacter.clients).toEqual([]); - }); - - it.skip("should have the correct modelProvider", () => { - expect(defaultCharacter.modelProvider).toBe(ModelProviderName.OLLAMA); - }); - - it("should have the correct voice model", () => { - expect(defaultCharacter.settings.voice.model).toBe( - "en_US-hfc_female-medium" - ); - }); - - it("should have a system description", () => { - expect(defaultCharacter.system).toContain( - "Roleplay and generate interesting" - ); - }); - - it("should have a bio array with at least one entry", () => { - expect(defaultCharacter.bio.length).toBeGreaterThan(0); - }); - - it("should have a lore array with at least one entry", () => { - expect(defaultCharacter.lore.length).toBeGreaterThan(0); - }); - - it("should have messageExamples array with at least one example", () => { - expect(defaultCharacter.messageExamples.length).toBeGreaterThan(0); - }); - - it("should have a topics array with at least one broad topic", () => { - expect(defaultCharacter.topics).toContain("Classical art"); - }); - - it('should have style settings with "all" array', () => { - expect(defaultCharacter.style.all.length).toBeGreaterThan(0); - }); -}); diff --git a/packages/runtime/__tests__/goals.test.ts b/packages/runtime/__tests__/goals.test.ts index be6a7f27305..4926cb57264 100644 --- a/packages/runtime/__tests__/goals.test.ts +++ b/packages/runtime/__tests__/goals.test.ts @@ -31,8 +31,6 @@ export const mockRuntime: IAgentRuntime = { databaseAdapter: mockDatabaseAdapter as any, cacheManager: new CacheManager(new MemoryCacheAdapter()), agentId: "qweqew-qweqwe-qweqwe-qweqwe-qweeqw", - serverUrl: "", - token: null, messageManager: { addEmbeddingToMemory: (_memory: Memory): Promise => { throw new Error("Function not implemented."); diff --git a/packages/core/src/character.ts b/packages/runtime/src/character.ts similarity index 98% rename from packages/core/src/character.ts rename to packages/runtime/src/character.ts index 847b79a0953..1ad7ca0ee41 100644 --- a/packages/core/src/character.ts +++ b/packages/runtime/src/character.ts @@ -1,5 +1,5 @@ import { z } from "zod"; -import elizaLogger from "./logger"; +import logger from "./logger"; // Helper schemas for nested types export const MessageExampleSchema = z.object({ @@ -127,7 +127,7 @@ export function validateCharacterConfig(json: unknown): CharacterConfig { ); Object.entries(groupedErrors).forEach(([field, messages]) => { - elizaLogger.error( + logger.error( `Validation errors in ${field}: ${messages.join(" - ")}` ); }); diff --git a/packages/core/src/client.ts b/packages/runtime/src/client.ts similarity index 97% rename from packages/core/src/client.ts rename to packages/runtime/src/client.ts index ae7df66380a..bf5907e3e10 100644 --- a/packages/core/src/client.ts +++ b/packages/runtime/src/client.ts @@ -9,8 +9,8 @@ import { MemoryMetadata, Message, IElizaRuntime, -} from './types'; -import { contextLoderPromptTemplate } from './prompts'; +} from '../../core/src/types'; +import { contextLoderPromptTemplate } from '../../core/src/prompts'; import { generateOutput, LLMParams, @@ -18,8 +18,8 @@ import { generateChatOutput, ActionCallOutput, generateActionResults, -} from './generate'; -import { createFunction } from './utils'; +} from '../../core/src/generate'; +import { createFunction } from '../../core/src/utils'; export async function handleRoomMessage( runtime: IElizaRuntime, diff --git a/packages/runtime/src/database.ts b/packages/runtime/src/database.ts index 8fa44b03cbc..22e7e9157c1 100644 --- a/packages/runtime/src/database.ts +++ b/packages/runtime/src/database.ts @@ -11,7 +11,7 @@ import type { IDatabaseAdapter, } from "./types.ts"; import { CircuitBreaker } from "./database/CircuitBreaker"; -import { elizaLogger } from "./logger"; +import { logger } from "./logger"; /** * An abstract class representing a database adapter for managing various entities @@ -450,7 +450,7 @@ export abstract class DatabaseAdapter implements IDatabaseAdapter { try { return await this.circuitBreaker.execute(operation); } catch (error) { - elizaLogger.error(`Circuit breaker error in ${context}:`, { + logger.error(`Circuit breaker error in ${context}:`, { error: error instanceof Error ? error.message : String(error), state: this.circuitBreaker.getState(), }); diff --git a/packages/runtime/src/embedding.ts b/packages/runtime/src/embedding.ts index aeefaa547e5..2c71ea7de7c 100644 --- a/packages/runtime/src/embedding.ts +++ b/packages/runtime/src/embedding.ts @@ -1,6 +1,6 @@ // TODO: Refactor to call the registered embedding model -import elizaLogger from "./logger.ts"; +import logger from "./logger.ts"; import { type IAgentRuntime } from "./types.ts"; export function getEmbeddingZeroVector(): number[] { @@ -25,7 +25,7 @@ export function getEmbeddingZeroVector(): number[] { */ export async function embed(runtime: IAgentRuntime, input: string) { - elizaLogger.debug("Embedding request:", { + logger.debug("Embedding request:", { input: input?.slice(0, 50) + "...", inputType: typeof input, inputLength: input?.length, @@ -35,7 +35,7 @@ export async function embed(runtime: IAgentRuntime, input: string) { // Validate input if (!input || typeof input !== "string" || input.trim().length === 0) { - elizaLogger.warn("Invalid embedding input:", { + logger.warn("Invalid embedding input:", { input, type: typeof input, length: input?.length, @@ -65,7 +65,7 @@ export async function embed(runtime: IAgentRuntime, input: string) { try { // return await getLocalEmbedding(input); } catch (error) { - elizaLogger.warn( + logger.warn( "Local embedding failed, falling back to remote", error ); @@ -76,7 +76,7 @@ export async function embed(runtime: IAgentRuntime, input: string) { input: string ) { if (!input) { - elizaLogger.log("No input to retrieve cached embedding for"); + logger.log("No input to retrieve cached embedding for"); return null; } diff --git a/packages/runtime/src/environment.ts b/packages/runtime/src/environment.ts index 78db38da319..26ad995edfc 100644 --- a/packages/runtime/src/environment.ts +++ b/packages/runtime/src/environment.ts @@ -1,6 +1,6 @@ import { z } from "zod"; import { ModelProviderName, Clients } from "./types"; -import elizaLogger from "./logger"; +import logger from "./logger"; // TODO: TO COMPLETE export const envSchema = z.object({ @@ -166,7 +166,7 @@ export function validateCharacterConfig(json: unknown): CharacterConfig { ); Object.entries(groupedErrors).forEach(([field, messages]) => { - elizaLogger.error( + logger.error( `Validation errors in ${field}: ${messages.join(" - ")}` ); }); diff --git a/packages/core/src/formatters.ts b/packages/runtime/src/formatters.ts similarity index 92% rename from packages/core/src/formatters.ts rename to packages/runtime/src/formatters.ts index f6a1850a4ea..711478e2e75 100644 --- a/packages/core/src/formatters.ts +++ b/packages/runtime/src/formatters.ts @@ -1,15 +1,11 @@ -import { Actor, Thought } from './types'; import { - Action, - State, - IElizaRuntime, - Message, - ActionCallMemory, - ActionResultMemory, -} from './types'; -import zodToJsonSchema from 'zod-to-json-schema'; -import { toHex } from 'viem'; + Action, ActionCallMemory, + ActionResultMemory, Actor, IElizaRuntime, + Message, State, Thought +} from '@elizaos/core'; import { UUID } from 'crypto'; +import { toHex } from 'viem'; +import zodToJsonSchema from 'zod-to-json-schema'; type ActionFormatterData = { runtime: IElizaRuntime; diff --git a/packages/runtime/src/generation.ts b/packages/runtime/src/generation.ts index 6e11e9b4e79..097708b5cf3 100644 --- a/packages/runtime/src/generation.ts +++ b/packages/runtime/src/generation.ts @@ -19,7 +19,7 @@ import { encodingForModel, type TiktokenModel } from "js-tiktoken"; import { AutoTokenizer } from "@huggingface/transformers"; import Together from "together-ai"; import type { ZodSchema } from "zod"; -import { elizaLogger } from "./index.ts"; +import { logger } from "./index.ts"; import { models, getModelSettings, @@ -43,10 +43,6 @@ import { ModelProviderName, ServiceType, type ActionResponse, - type IVerifiableInferenceAdapter, - type VerifiableInferenceOptions, - type VerifiableInferenceResult, - //VerifiableInferenceProvider, type TelemetrySettings, TokenizerType, } from "./types.ts"; @@ -108,7 +104,7 @@ export async function trimTokens( ); } - elizaLogger.warn(`Unsupported tokenizer type: ${tokenizerType}`); + logger.warn(`Unsupported tokenizer type: ${tokenizerType}`); return truncateTiktoken("gpt-4o", context, maxTokens); } @@ -132,7 +128,7 @@ async function truncateAuto( // Decode back to text - js-tiktoken decode() returns a string directly return tokenizer.decode(truncatedTokens); } catch (error) { - elizaLogger.error("Error in trimTokens:", error); + logger.error("Error in trimTokens:", error); // Return truncated string if tokenization fails return context.slice(-maxTokens * 4); // Rough estimate of 4 chars per token } @@ -160,7 +156,7 @@ async function truncateTiktoken( // Decode back to text - js-tiktoken decode() returns a string directly return encoding.decode(truncatedTokens); } catch (error) { - elizaLogger.error("Error in trimTokens:", error); + logger.error("Error in trimTokens:", error); // Return truncated string if tokenization fails return context.slice(-maxTokens * 4); // Rough estimate of 4 chars per token } @@ -212,17 +208,17 @@ async function getOnChainEternalAISystemPrompt( args: [new BigNumber(agentId)], }); if (result) { - elizaLogger.info("on-chain system-prompt response", result[0]); + logger.info("on-chain system-prompt response", result[0]); const value = result[0].toString().replace("0x", ""); const content = Buffer.from(value, "hex").toString("utf-8"); - elizaLogger.info("on-chain system-prompt", content); + logger.info("on-chain system-prompt", content); return await fetchEternalAISystemPrompt(runtime, content); } else { return undefined; } } catch (error) { - elizaLogger.error(error); - elizaLogger.error("err", error); + logger.error(error); + logger.error("err", error); } } return undefined; @@ -243,11 +239,11 @@ async function fetchEternalAISystemPrompt( IPFS, "https://gateway.lighthouse.storage/ipfs/" ); - elizaLogger.info("fetch lightHouse", lightHouse); + logger.info("fetch lightHouse", lightHouse); const responseLH = await fetch(lightHouse, { method: "GET", }); - elizaLogger.info("fetch lightHouse resp", responseLH); + logger.info("fetch lightHouse resp", responseLH); if (responseLH.ok) { const data = await responseLH.text(); return data; @@ -256,11 +252,11 @@ async function fetchEternalAISystemPrompt( IPFS, "https://cdn.eternalai.org/upload/" ); - elizaLogger.info("fetch gcs", gcs); + logger.info("fetch gcs", gcs); const responseGCS = await fetch(gcs, { method: "GET", }); - elizaLogger.info("fetch lightHouse gcs", responseGCS); + logger.info("fetch lightHouse gcs", responseGCS); if (responseGCS.ok) { const data = await responseGCS.text(); return data; @@ -288,7 +284,7 @@ function getCloudflareGatewayBaseURL( const cloudflareAccountId = runtime.getSetting("CLOUDFLARE_AI_ACCOUNT_ID"); const cloudflareGatewayId = runtime.getSetting("CLOUDFLARE_AI_GATEWAY_ID"); - elizaLogger.debug("Cloudflare Gateway Configuration:", { + logger.debug("Cloudflare Gateway Configuration:", { isEnabled: isCloudflareEnabled, hasAccountId: !!cloudflareAccountId, hasGatewayId: !!cloudflareGatewayId, @@ -296,26 +292,26 @@ function getCloudflareGatewayBaseURL( }); if (!isCloudflareEnabled) { - elizaLogger.debug("Cloudflare Gateway is not enabled"); + logger.debug("Cloudflare Gateway is not enabled"); return undefined; } if (!cloudflareAccountId) { - elizaLogger.warn( + logger.warn( "Cloudflare Gateway is enabled but CLOUDFLARE_AI_ACCOUNT_ID is not set" ); return undefined; } if (!cloudflareGatewayId) { - elizaLogger.warn( + logger.warn( "Cloudflare Gateway is enabled but CLOUDFLARE_AI_GATEWAY_ID is not set" ); return undefined; } const baseURL = `https://gateway.ai.cloudflare.com/v1/${cloudflareAccountId}/${cloudflareGatewayId}/${provider.toLowerCase()}`; - elizaLogger.info("Using Cloudflare Gateway:", { + logger.info("Using Cloudflare Gateway:", { provider, baseURL, accountId: cloudflareAccountId, @@ -354,7 +350,7 @@ export async function generateText({ return ""; } - elizaLogger.log("Generating text..."); + logger.log("Generating text..."); try { const response = await runtime.call('generate::text', { @@ -364,7 +360,7 @@ export async function generateText({ }); return response; } catch (error) { - elizaLogger.error("Error in generateText:", error); + logger.error("Error in generateText:", error); throw error; } } @@ -378,7 +374,6 @@ export async function generateText({ * @param opts.frequency_penalty The frequency penalty to apply (0.0 to 2.0) * @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0) * @param opts.temperature The temperature to control randomness (0.0 to 2.0) - * @param opts.serverUrl The URL of the API server * @param opts.max_context_length Maximum allowed context length in tokens * @param opts.max_response_length Maximum allowed response length in tokens * @returns Promise resolving to "RESPOND", "IGNORE", "STOP" or null @@ -395,7 +390,7 @@ export async function generateShouldRespond({ let retryDelay = 1000; while (true) { try { - elizaLogger.debug( + logger.debug( "Attempting to generate text with context:", context ); @@ -405,27 +400,27 @@ export async function generateShouldRespond({ modelClass, }); - elizaLogger.debug("Received response from generateText:", response); + logger.debug("Received response from generateText:", response); const parsedResponse = parseShouldRespondFromText(response.trim()); if (parsedResponse) { - elizaLogger.debug("Parsed response:", parsedResponse); + logger.debug("Parsed response:", parsedResponse); return parsedResponse; } else { - elizaLogger.debug("generateShouldRespond no response"); + logger.debug("generateShouldRespond no response"); } } catch (error) { - elizaLogger.error("Error in generateShouldRespond:", error); + logger.error("Error in generateShouldRespond:", error); if ( error instanceof TypeError && error.message.includes("queueTextCompletion") ) { - elizaLogger.error( + logger.error( "TypeError: Cannot read properties of null (reading 'queueTextCompletion')" ); } } - elizaLogger.log(`Retrying in ${retryDelay}ms...`); + logger.log(`Retrying in ${retryDelay}ms...`); await new Promise((resolve) => setTimeout(resolve, retryDelay)); retryDelay *= 2; } @@ -443,7 +438,7 @@ export async function splitChunks( chunkSize = 512, bleed = 20 ): Promise { - elizaLogger.debug(`[splitChunks] Starting text split`); + logger.debug(`[splitChunks] Starting text split`); const textSplitter = new RecursiveCharacterTextSplitter({ chunkSize: Number(chunkSize), @@ -451,7 +446,7 @@ export async function splitChunks( }); const chunks = await textSplitter.splitText(content); - elizaLogger.debug(`[splitChunks] Split complete:`, { + logger.debug(`[splitChunks] Split complete:`, { numberOfChunks: chunks.length, averageChunkSize: chunks.reduce((acc, chunk) => acc + chunk.length, 0) / @@ -470,7 +465,6 @@ export async function splitChunks( * @param opts.frequency_penalty The frequency penalty to apply (0.0 to 2.0) * @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0) * @param opts.temperature The temperature to control randomness (0.0 to 2.0) - * @param opts.serverUrl The URL of the API server * @param opts.max_context_length Maximum allowed context length in tokens * @param opts.max_response_length Maximum allowed response length in tokens * @returns Promise resolving to a boolean value parsed from the model's response @@ -504,7 +498,7 @@ export async function generateTrueOrFalse({ return parsedResponse; } } catch (error) { - elizaLogger.error("Error in generateTrueOrFalse:", error); + logger.error("Error in generateTrueOrFalse:", error); } await new Promise((resolve) => setTimeout(resolve, retryDelay)); @@ -521,7 +515,6 @@ export async function generateTrueOrFalse({ * @param opts.frequency_penalty The frequency penalty to apply (0.0 to 2.0) * @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0) * @param opts.temperature The temperature to control randomness (0.0 to 2.0) - * @param opts.serverUrl The URL of the API server * @param opts.token The API token for authentication * @param opts.max_context_length Maximum allowed context length in tokens * @param opts.max_response_length Maximum allowed response length in tokens @@ -537,7 +530,7 @@ export async function generateTextArray({ modelClass: ModelClass; }): Promise { if (!context) { - elizaLogger.error("generateTextArray context is empty"); + logger.error("generateTextArray context is empty"); return []; } let retryDelay = 1000; @@ -555,7 +548,7 @@ export async function generateTextArray({ return parsedResponse; } } catch (error) { - elizaLogger.error("Error in generateTextArray:", error); + logger.error("Error in generateTextArray:", error); } await new Promise((resolve) => setTimeout(resolve, retryDelay)); @@ -573,7 +566,7 @@ export async function generateObjectDeprecated({ modelClass: ModelClass; }): Promise { if (!context) { - elizaLogger.error("generateObjectDeprecated context is empty"); + logger.error("generateObjectDeprecated context is empty"); return null; } let retryDelay = 1000; @@ -591,7 +584,7 @@ export async function generateObjectDeprecated({ return parsedResponse; } } catch (error) { - elizaLogger.error("Error in generateObject:", error); + logger.error("Error in generateObject:", error); } await new Promise((resolve) => setTimeout(resolve, retryDelay)); @@ -609,7 +602,7 @@ export async function generateObjectArray({ modelClass: ModelClass; }): Promise { if (!context) { - elizaLogger.error("generateObjectArray context is empty"); + logger.error("generateObjectArray context is empty"); return []; } let retryDelay = 1000; @@ -627,7 +620,7 @@ export async function generateObjectArray({ return parsedResponse; } } catch (error) { - elizaLogger.error("Error in generateTextArray:", error); + logger.error("Error in generateTextArray:", error); } await new Promise((resolve) => setTimeout(resolve, retryDelay)); @@ -660,11 +653,11 @@ export async function generateMessageResponse({ const max_context_length = modelSettings.maxInputTokens; context = await trimTokens(context, max_context_length, runtime); - elizaLogger.debug("Context:", context); + logger.debug("Context:", context); let retryLength = 1000; // exponential backoff while (true) { try { - elizaLogger.log("Generating message response.."); + logger.log("Generating message response.."); const response = await generateText({ runtime, @@ -675,17 +668,17 @@ export async function generateMessageResponse({ // try parsing the response as JSON, if null then try again const parsedContent = parseJSONObjectFromText(response) as Content; if (!parsedContent) { - elizaLogger.debug("parsedContent is null, retrying"); + logger.debug("parsedContent is null, retrying"); continue; } return parsedContent; } catch (error) { - elizaLogger.error("ERROR:", error); + logger.error("ERROR:", error); // wait for 2 seconds retryLength *= 2; await new Promise((resolve) => setTimeout(resolve, retryLength)); - elizaLogger.debug("Retrying..."); + logger.debug("Retrying..."); } } } @@ -715,7 +708,7 @@ export const generateImage = async ( }> => { const modelSettings = getImageModelSettings(runtime.imageModelProvider); const model = modelSettings.name; - elizaLogger.info("Generating image with options:", { + logger.info("Generating image with options:", { imageModelProvider: model, }); @@ -818,7 +811,7 @@ export const generateImage = async ( const base64s = await Promise.all( togetherResponse.data.map(async (image) => { if (!image.url) { - elizaLogger.error("Missing URL in image data:", image); + logger.error("Missing URL in image data:", image); throw new Error("Missing URL in Together AI response"); } @@ -844,7 +837,7 @@ export const generateImage = async ( throw new Error("No images generated by Together AI"); } - elizaLogger.debug(`Generated ${base64s.length} images`); + logger.debug(`Generated ${base64s.length} images`); return { success: true, data: base64s }; } else if (runtime.imageModelProvider === ModelProviderName.FAL) { fal.config({ @@ -884,7 +877,7 @@ export const generateImage = async ( logs: true, onQueueUpdate: (update) => { if (update.status === "IN_PROGRESS") { - elizaLogger.info(update.logs.map((log) => log.message)); + logger.info(update.logs.map((log) => log.message)); } }, }); @@ -1185,9 +1178,6 @@ export const generateObject = async ({ runtime, context, modelClass, - verifiableInference, - verifiableInferenceAdapter, - verifiableInferenceOptions, }); return response; @@ -1229,9 +1219,6 @@ export async function handleProvider( runtime, context, modelClass, - //verifiableInference, - //verifiableInferenceAdapter, - //verifiableInferenceOptions, } = options; switch (provider) { case ModelProviderName.OPENAI: @@ -1272,7 +1259,7 @@ export async function handleProvider( return await handleLivepeer(options); default: { const errorMessage = `Unsupported provider: ${provider}`; - elizaLogger.error(errorMessage); + logger.error(errorMessage); throw new Error(errorMessage); } } @@ -1324,9 +1311,9 @@ async function handleAnthropic({ modelOptions, runtime, }: ProviderOptions): Promise> { - elizaLogger.debug("Handling Anthropic request with Cloudflare check"); + logger.debug("Handling Anthropic request with Cloudflare check"); const baseURL = getCloudflareGatewayBaseURL(runtime, "anthropic"); - elizaLogger.debug("Anthropic handleAnthropic baseURL:", { baseURL }); + logger.debug("Anthropic handleAnthropic baseURL:", { baseURL }); const anthropic = createAnthropic({ apiKey, baseURL }); return await aiGenerateObject({ @@ -1381,9 +1368,9 @@ async function handleGroq({ modelOptions, runtime, }: ProviderOptions): Promise> { - elizaLogger.debug("Handling Groq request with Cloudflare check"); + logger.debug("Handling Groq request with Cloudflare check"); const baseURL = getCloudflareGatewayBaseURL(runtime, "groq"); - elizaLogger.debug("Groq handleGroq baseURL:", { baseURL }); + logger.debug("Groq handleGroq baseURL:", { baseURL }); const groq = createGroq({ apiKey, baseURL }); return await aiGenerateObject({ @@ -1639,29 +1626,29 @@ export async function generateTweetActions({ context, modelClass, }); - elizaLogger.debug( + logger.debug( "Received response from generateText for tweet actions:", response ); const { actions } = parseActionResponseFromText(response.trim()); if (actions) { - elizaLogger.debug("Parsed tweet actions:", actions); + logger.debug("Parsed tweet actions:", actions); return actions; } else { - elizaLogger.debug("generateTweetActions no valid response"); + logger.debug("generateTweetActions no valid response"); } } catch (error) { - elizaLogger.error("Error in generateTweetActions:", error); + logger.error("Error in generateTweetActions:", error); if ( error instanceof TypeError && error.message.includes("queueTextCompletion") ) { - elizaLogger.error( + logger.error( "TypeError: Cannot read properties of null (reading 'queueTextCompletion')" ); } } - elizaLogger.log(`Retrying in ${retryDelay}ms...`); + logger.log(`Retrying in ${retryDelay}ms...`); await new Promise((resolve) => setTimeout(resolve, retryDelay)); retryDelay *= 2; } diff --git a/packages/runtime/src/index.ts b/packages/runtime/src/index.ts index b791852faba..e7b6c8509b0 100644 --- a/packages/runtime/src/index.ts +++ b/packages/runtime/src/index.ts @@ -3,7 +3,6 @@ import "./config.ts"; // Add this line first export * from "./actions.ts"; export * from "./context.ts"; export * from "./database.ts"; -export * from "./defaultCharacter.ts"; export * from "./evaluators.ts"; export * from "./generation.ts"; export * from "./goals.ts"; @@ -21,5 +20,4 @@ export * from "./uuid.ts"; export * from "./environment.ts"; export * from "./cache.ts"; export { default as knowledge } from "./knowledge.ts"; -export * from "./ragknowledge.ts"; export * from "./utils.ts"; diff --git a/packages/runtime/src/knowledge.ts b/packages/runtime/src/knowledge.ts index 1b4debf3696..3e508710e93 100644 --- a/packages/runtime/src/knowledge.ts +++ b/packages/runtime/src/knowledge.ts @@ -3,7 +3,7 @@ import { embed, getEmbeddingZeroVector } from "./embedding.ts"; import type { KnowledgeItem, UUID, Memory } from "./types.ts"; import { stringToUuid } from "./uuid.ts"; import { splitChunks } from "./generation.ts"; -import elizaLogger from "./logger.ts"; +import logger from "./logger.ts"; async function get( runtime: AgentRuntime, @@ -11,7 +11,7 @@ async function get( ): Promise { // Add validation for message if (!message?.content?.text) { - elizaLogger.warn("Invalid message for knowledge query:", { + logger.warn("Invalid message for knowledge query:", { message, content: message?.content, text: message?.content?.text, @@ -20,7 +20,7 @@ async function get( } const processed = preprocess(message.content.text); - elizaLogger.debug("Knowledge query:", { + logger.debug("Knowledge query:", { original: message.content.text, processed, length: processed?.length, @@ -28,7 +28,7 @@ async function get( // Validate processed text if (!processed || processed.trim().length === 0) { - elizaLogger.warn("Empty processed text for knowledge query"); + logger.warn("Empty processed text for knowledge query"); return []; } @@ -45,7 +45,7 @@ async function get( const uniqueSources = [ ...new Set( fragments.map((memory) => { - elizaLogger.log( + logger.log( `Matched fragment: ${memory.content.text} with similarity: ${memory.similarity}` ); return memory.content.source; @@ -103,13 +103,13 @@ async function set( } export function preprocess(content: string): string { - elizaLogger.debug("Preprocessing text:", { + logger.debug("Preprocessing text:", { input: content, length: content?.length, }); if (!content || typeof content !== "string") { - elizaLogger.warn("Invalid input for preprocessing"); + logger.warn("Invalid input for preprocessing"); return ""; } diff --git a/packages/runtime/src/logger.ts b/packages/runtime/src/logger.ts index 519f08fb67b..c3389501ed6 100644 --- a/packages/runtime/src/logger.ts +++ b/packages/runtime/src/logger.ts @@ -67,6 +67,6 @@ const options = { }, }; -export const elizaLogger = pino(options, createStream()); +export const logger = pino(options, createStream()); -export default elizaLogger; +export default logger; diff --git a/packages/runtime/src/memory.ts b/packages/runtime/src/memory.ts index ce8cfaa0175..c51d54e691b 100644 --- a/packages/runtime/src/memory.ts +++ b/packages/runtime/src/memory.ts @@ -1,5 +1,5 @@ import { embed, getEmbeddingZeroVector } from "./embedding.ts"; -import elizaLogger from "./logger.ts"; +import logger from "./logger.ts"; import type { IAgentRuntime, IMemoryManager, @@ -68,7 +68,7 @@ export class MemoryManager implements IMemoryManager { // Generate embedding from text content memory.embedding = await embed(this.runtime, memoryText); } catch (error) { - elizaLogger.error("Failed to generate embedding:", error); + logger.error("Failed to generate embedding:", error); // Fallback to zero vector if embedding fails memory.embedding = getEmbeddingZeroVector().slice(); } @@ -176,11 +176,11 @@ export class MemoryManager implements IMemoryManager { await this.runtime.databaseAdapter.getMemoryById(memory.id); if (existingMessage) { - elizaLogger.debug("Memory already exists, skipping"); + logger.debug("Memory already exists, skipping"); return; } - elizaLogger.log("Creating Memory", memory.id, memory.content.text); + logger.log("Creating Memory", memory.id, memory.content.text); await this.runtime.databaseAdapter.createMemory( memory, diff --git a/packages/runtime/src/ragknowledge.ts b/packages/runtime/src/ragknowledge.ts deleted file mode 100644 index 3d6be4ab42c..00000000000 --- a/packages/runtime/src/ragknowledge.ts +++ /dev/null @@ -1,624 +0,0 @@ -import { embed } from "./embedding.ts"; -import { splitChunks } from "./generation.ts"; -import elizaLogger from "./logger.ts"; -import { - type IAgentRuntime, - type IRAGKnowledgeManager, - type RAGKnowledgeItem, - type UUID, - KnowledgeScope, -} from "./types.ts"; -import { stringToUuid } from "./uuid.ts"; -import { existsSync } from "fs"; -import { join } from "path"; - -/** - * Manage knowledge in the database. - */ -export class RAGKnowledgeManager implements IRAGKnowledgeManager { - /** - * The AgentRuntime instance associated with this manager. - */ - runtime: IAgentRuntime; - - /** - * The name of the database table this manager operates on. - */ - tableName: string; - - /** - * The root directory where RAG knowledge files are located (internal) - */ - knowledgeRoot: string; - - /** - * Constructs a new KnowledgeManager instance. - * @param opts Options for the manager. - * @param opts.tableName The name of the table this manager will operate on. - * @param opts.runtime The AgentRuntime instance associated with this manager. - */ - constructor(opts: { - tableName: string; - runtime: IAgentRuntime; - knowledgeRoot: string; - }) { - this.runtime = opts.runtime; - this.tableName = opts.tableName; - this.knowledgeRoot = opts.knowledgeRoot; - } - - private readonly defaultRAGMatchThreshold = 0.85; - private readonly defaultRAGMatchCount = 5; - - /** - * Common English stop words to filter out from query analysis - */ - private readonly stopWords = new Set([ - "a", - "an", - "and", - "are", - "as", - "at", - "be", - "by", - "does", - "for", - "from", - "had", - "has", - "have", - "he", - "her", - "his", - "how", - "hey", - "i", - "in", - "is", - "it", - "its", - "of", - "on", - "or", - "that", - "the", - "this", - "to", - "was", - "what", - "when", - "where", - "which", - "who", - "will", - "with", - "would", - "there", - "their", - "they", - "your", - "you", - ]); - - /** - * Filters out stop words and returns meaningful terms - */ - private getQueryTerms(query: string): string[] { - return query - .toLowerCase() - .split(" ") - .filter((term) => term.length > 3) // Filter very short words - .filter((term) => !this.stopWords.has(term)); // Filter stop words - } - - /** - * Preprocesses text content for better RAG performance. - * @param content The text content to preprocess. - * @returns The preprocessed text. - */ - - private preprocess(content: string): string { - if (!content || typeof content !== "string") { - elizaLogger.warn("Invalid input for preprocessing"); - return ""; - } - - return ( - content - .replace(/```[\s\S]*?```/g, "") - .replace(/`.*?`/g, "") - .replace(/#{1,6}\s*(.*)/g, "$1") - .replace(/!\[(.*?)\]\(.*?\)/g, "$1") - .replace(/\[(.*?)\]\(.*?\)/g, "$1") - .replace(/(https?:\/\/)?(www\.)?([^\s]+\.[^\s]+)/g, "$3") - .replace(/<@[!&]?\d+>/g, "") - .replace(/<[^>]*>/g, "") - .replace(/^\s*[-*_]{3,}\s*$/gm, "") - .replace(/\/\*[\s\S]*?\*\//g, "") - .replace(/\/\/.*/g, "") - .replace(/\s+/g, " ") - .replace(/\n{3,}/g, "\n\n") - // .replace(/[^a-zA-Z0-9\s\-_./:?=&]/g, "") --this strips out CJK characters - .trim() - .toLowerCase() - ); - } - - private hasProximityMatch(text: string, terms: string[]): boolean { - const words = text.toLowerCase().split(" "); - const positions = terms - .map((term) => words.findIndex((w) => w.includes(term))) - .filter((pos) => pos !== -1); - - if (positions.length < 2) return false; - - // Check if any matches are within 5 words of each other - for (let i = 0; i < positions.length - 1; i++) { - if (Math.abs(positions[i] - positions[i + 1]) <= 5) { - return true; - } - } - return false; - } - - async getKnowledge(params: { - query?: string; - id?: UUID; - conversationContext?: string; - limit?: number; - agentId?: UUID; - }): Promise { - const agentId = params.agentId || this.runtime.agentId; - - // If id is provided, do direct lookup first - if (params.id) { - const directResults = - await this.runtime.databaseAdapter.getKnowledge({ - id: params.id, - agentId: agentId, - }); - - if (directResults.length > 0) { - return directResults; - } - } - - // If no id or no direct results, perform semantic search - if (params.query) { - try { - const processedQuery = this.preprocess(params.query); - - // Build search text with optional context - let searchText = processedQuery; - if (params.conversationContext) { - const relevantContext = this.preprocess( - params.conversationContext - ); - searchText = `${relevantContext} ${processedQuery}`; - } - - const embeddingArray = await embed(this.runtime, searchText); - - const embedding = new Float32Array(embeddingArray); - - // Get results with single query - const results = - await this.runtime.databaseAdapter.searchKnowledge({ - agentId: this.runtime.agentId, - embedding: embedding, - match_threshold: this.defaultRAGMatchThreshold, - match_count: - (params.limit || this.defaultRAGMatchCount) * 2, - searchText: processedQuery, - }); - - // Enhanced reranking with sophisticated scoring - const rerankedResults = results - .map((result) => { - let score = result.similarity; - - // Check for direct query term matches - const queryTerms = this.getQueryTerms(processedQuery); - - const matchingTerms = queryTerms.filter((term) => - result.content.text.toLowerCase().includes(term) - ); - - if (matchingTerms.length > 0) { - // Much stronger boost for matches - score *= - 1 + - (matchingTerms.length / queryTerms.length) * 2; // Double the boost - - if ( - this.hasProximityMatch( - result.content.text, - matchingTerms - ) - ) { - score *= 1.5; // Stronger proximity boost - } - } else { - // More aggressive penalty - if (!params.conversationContext) { - score *= 0.3; // Stronger penalty - } - } - - return { - ...result, - score, - matchedTerms: matchingTerms, // Add for debugging - }; - }) - .sort((a, b) => b.score - a.score); - - // Filter and return results - return rerankedResults - .filter( - (result) => - result.score >= this.defaultRAGMatchThreshold - ) - .slice(0, params.limit || this.defaultRAGMatchCount); - } catch (error) { - console.log(`[RAG Search Error] ${error}`); - return []; - } - } - - // If neither id nor query provided, return empty array - return []; - } - - async createKnowledge(item: RAGKnowledgeItem): Promise { - if (!item.content.text) { - elizaLogger.warn("Empty content in knowledge item"); - return; - } - - try { - // Process main document - const processedContent = this.preprocess(item.content.text); - const mainEmbeddingArray = await embed( - this.runtime, - processedContent - ); - - const mainEmbedding = new Float32Array(mainEmbeddingArray); - - // Create main document - await this.runtime.databaseAdapter.createKnowledge({ - id: item.id, - agentId: this.runtime.agentId, - content: { - text: item.content.text, - metadata: { - ...item.content.metadata, - isMain: true, - }, - }, - embedding: mainEmbedding, - createdAt: Date.now(), - }); - - // Generate and store chunks - const chunks = await splitChunks(processedContent, 512, 20); - - for (const [index, chunk] of chunks.entries()) { - const chunkEmbeddingArray = await embed(this.runtime, chunk); - const chunkEmbedding = new Float32Array(chunkEmbeddingArray); - const chunkId = `${item.id}-chunk-${index}` as UUID; - - await this.runtime.databaseAdapter.createKnowledge({ - id: chunkId, - agentId: this.runtime.agentId, - content: { - text: chunk, - metadata: { - ...item.content.metadata, - isChunk: true, - originalId: item.id, - chunkIndex: index, - }, - }, - embedding: chunkEmbedding, - createdAt: Date.now(), - }); - } - } catch (error) { - elizaLogger.error(`Error processing knowledge ${item.id}:`, error); - throw error; - } - } - - async searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array | number[]; - match_threshold?: number; - match_count?: number; - searchText?: string; - }): Promise { - const { - match_threshold = this.defaultRAGMatchThreshold, - match_count = this.defaultRAGMatchCount, - embedding, - searchText, - } = params; - - const float32Embedding = Array.isArray(embedding) - ? new Float32Array(embedding) - : embedding; - - return await this.runtime.databaseAdapter.searchKnowledge({ - agentId: params.agentId || this.runtime.agentId, - embedding: float32Embedding, - match_threshold, - match_count, - searchText, - }); - } - - async removeKnowledge(id: UUID): Promise { - await this.runtime.databaseAdapter.removeKnowledge(id); - } - - async clearKnowledge(shared?: boolean): Promise { - await this.runtime.databaseAdapter.clearKnowledge( - this.runtime.agentId, - shared ? shared : false - ); - } - - /** - * Lists all knowledge entries for an agent without semantic search or reranking. - * Used primarily for administrative tasks like cleanup. - * - * @param agentId The agent ID to fetch knowledge entries for - * @returns Array of RAGKnowledgeItem entries - */ - async listAllKnowledge(agentId: UUID): Promise { - elizaLogger.debug( - `[Knowledge List] Fetching all entries for agent: ${agentId}` - ); - - try { - // Only pass the required agentId parameter - const results = await this.runtime.databaseAdapter.getKnowledge({ - agentId: agentId, - }); - - elizaLogger.debug( - `[Knowledge List] Found ${results.length} entries` - ); - return results; - } catch (error) { - elizaLogger.error( - "[Knowledge List] Error fetching knowledge entries:", - error - ); - throw error; - } - } - - async cleanupDeletedKnowledgeFiles() { - try { - elizaLogger.debug( - "[Cleanup] Starting knowledge cleanup process, agent: ", - this.runtime.agentId - ); - - elizaLogger.debug( - `[Cleanup] Knowledge root path: ${this.knowledgeRoot}` - ); - - const existingKnowledge = await this.listAllKnowledge( - this.runtime.agentId - ); - // Only process parent documents, ignore chunks - const parentDocuments = existingKnowledge.filter( - (item) => - !item.id.includes("chunk") && item.content.metadata?.source // Must have a source path - ); - - elizaLogger.debug( - `[Cleanup] Found ${parentDocuments.length} parent documents to check` - ); - - for (const item of parentDocuments) { - const relativePath = item.content.metadata?.source; - const filePath = join(this.knowledgeRoot, relativePath); - - elizaLogger.debug( - `[Cleanup] Checking joined file path: ${filePath}` - ); - - if (!existsSync(filePath)) { - elizaLogger.warn( - `[Cleanup] File not found, starting removal process: ${filePath}` - ); - - const idToRemove = item.id; - elizaLogger.debug( - `[Cleanup] Using ID for removal: ${idToRemove}` - ); - - try { - // Just remove the parent document - this will cascade to chunks - await this.removeKnowledge(idToRemove); - - // // Clean up the cache - // const baseCacheKeyWithWildcard = `${this.generateKnowledgeCacheKeyBase( - // idToRemove, - // item.content.metadata?.isShared || false - // )}*`; - // await this.cacheManager.deleteByPattern({ - // keyPattern: baseCacheKeyWithWildcard, - // }); - - elizaLogger.success( - `[Cleanup] Successfully removed knowledge for file: ${filePath}` - ); - } catch (deleteError) { - elizaLogger.error( - `[Cleanup] Error during deletion process for ${filePath}:`, - deleteError instanceof Error - ? { - message: deleteError.message, - stack: deleteError.stack, - name: deleteError.name, - } - : deleteError - ); - } - } - } - - elizaLogger.debug("[Cleanup] Finished knowledge cleanup process"); - } catch (error) { - elizaLogger.error( - "[Cleanup] Error cleaning up deleted knowledge files:", - error - ); - } - } - - public generateScopedId(path: string, isShared: boolean): UUID { - // Prefix the path with scope before generating UUID to ensure different IDs for shared vs private - const scope = isShared ? KnowledgeScope.SHARED : KnowledgeScope.PRIVATE; - const scopedPath = `${scope}-${path}`; - return stringToUuid(scopedPath); - } - - async processFile(file: { - path: string; - content: string; - type: "pdf" | "md" | "txt"; - isShared?: boolean; - }): Promise { - const timeMarker = (label: string) => { - const time = (Date.now() - startTime) / 1000; - elizaLogger.info(`[Timing] ${label}: ${time.toFixed(2)}s`); - }; - - const startTime = Date.now(); - const content = file.content; - - try { - const fileSizeKB = new TextEncoder().encode(content).length / 1024; - elizaLogger.info( - `[File Progress] Starting ${file.path} (${fileSizeKB.toFixed(2)} KB)` - ); - - // Generate scoped ID for the file - const scopedId = this.generateScopedId( - file.path, - file.isShared || false - ); - - // Step 1: Preprocessing - //const preprocessStart = Date.now(); - const processedContent = this.preprocess(content); - timeMarker("Preprocessing"); - - // Step 2: Main document embedding - const mainEmbeddingArray = await embed( - this.runtime, - processedContent - ); - const mainEmbedding = new Float32Array(mainEmbeddingArray); - timeMarker("Main embedding"); - - // Step 3: Create main document - await this.runtime.databaseAdapter.createKnowledge({ - id: scopedId, - agentId: this.runtime.agentId, - content: { - text: content, - metadata: { - source: file.path, - type: file.type, - isShared: file.isShared || false, - }, - }, - embedding: mainEmbedding, - createdAt: Date.now(), - }); - timeMarker("Main document storage"); - - // Step 4: Generate chunks - const chunks = await splitChunks(processedContent, 512, 20); - const totalChunks = chunks.length; - elizaLogger.info(`Generated ${totalChunks} chunks`); - timeMarker("Chunk generation"); - - // Step 5: Process chunks with larger batches - const BATCH_SIZE = 10; // Increased batch size - let processedChunks = 0; - - for (let i = 0; i < chunks.length; i += BATCH_SIZE) { - const batchStart = Date.now(); - const batch = chunks.slice( - i, - Math.min(i + BATCH_SIZE, chunks.length) - ); - - // Process embeddings in parallel - const embeddings = await Promise.all( - batch.map((chunk) => embed(this.runtime, chunk)) - ); - - // Batch database operations - await Promise.all( - embeddings.map(async (embeddingArray, index) => { - const chunkId = - `${scopedId}-chunk-${i + index}` as UUID; - const chunkEmbedding = new Float32Array(embeddingArray); - - await this.runtime.databaseAdapter.createKnowledge({ - id: chunkId, - agentId: this.runtime.agentId, - content: { - text: batch[index], - metadata: { - source: file.path, - type: file.type, - isShared: file.isShared || false, - isChunk: true, - originalId: scopedId, - chunkIndex: i + index, - originalPath: file.path, - }, - }, - embedding: chunkEmbedding, - createdAt: Date.now(), - }); - }) - ); - - processedChunks += batch.length; - const batchTime = (Date.now() - batchStart) / 1000; - elizaLogger.info( - `[Batch Progress] ${file.path}: Processed ${processedChunks}/${totalChunks} chunks (${batchTime.toFixed(2)}s for batch)` - ); - } - - const totalTime = (Date.now() - startTime) / 1000; - elizaLogger.info( - `[Complete] Processed ${file.path} in ${totalTime.toFixed(2)}s` - ); - } catch (error) { - if ( - file.isShared && - error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY" - ) { - elizaLogger.info( - `Shared knowledge ${file.path} already exists in database, skipping creation` - ); - return; - } - elizaLogger.error(`Error processing file ${file.path}:`, error); - throw error; - } - } -} diff --git a/packages/runtime/src/runtime.ts b/packages/runtime/src/runtime.ts index 7ee74f60e52..d4512a5593e 100644 --- a/packages/runtime/src/runtime.ts +++ b/packages/runtime/src/runtime.ts @@ -8,7 +8,6 @@ import { formatActions, } from "./actions.ts"; import { addHeader, composeContext } from "./context.ts"; -import { defaultCharacter } from "./defaultCharacter.ts"; import { evaluationTemplate, formatEvaluatorExamples, @@ -17,14 +16,13 @@ import { } from "./evaluators.ts"; import { generateText } from "./generation.ts"; import { formatGoalsAsString, getGoals } from "./goals.ts"; -import { elizaLogger } from "./index.ts"; +import { logger } from "./index.ts"; import knowledge from "./knowledge.ts"; import { MemoryManager } from "./memory.ts"; import { formatActors, formatMessages, getActorDetails } from "./messages.ts"; import { parseJsonArrayFromText } from "./parsing.ts"; import { formatPosts } from "./posts.ts"; import { getProviders } from "./providers.ts"; -import { RAGKnowledgeManager } from "./ragknowledge.ts"; import settings from "./settings.ts"; import { type Character, @@ -34,10 +32,7 @@ import { type ICacheManager, type IDatabaseAdapter, type IMemoryManager, - type IRAGKnowledgeManager, - type IVerifiableInferenceAdapter, type KnowledgeItem, - // RAGKnowledgeItem, //Media, ModelClass, type Plugin, @@ -79,10 +74,6 @@ export class AgentRuntime implements IAgentRuntime { * The ID of the agent */ agentId: UUID; - /** - * The base URL of the server where the agent's requests are processed. - */ - serverUrl = "http://localhost:7998"; /** * The database adapter used for interacting with the database. @@ -142,8 +133,6 @@ export class AgentRuntime implements IAgentRuntime { */ knowledgeManager: IMemoryManager; - ragKnowledgeManager: IRAGKnowledgeManager; - private readonly knowledgeRoot: string; services: Map = new Map(); @@ -172,7 +161,7 @@ export class AgentRuntime implements IAgentRuntime { return result; } } catch(e) { - elizaLogger.error(`Error in function ${name}`, e); + logger.error(`Error in function ${name}`, e); // if more fns available, try next one if(fns.length > i + 1){ console.log("Trying next function"); @@ -195,7 +184,7 @@ export class AgentRuntime implements IAgentRuntime { } if (this.memoryManagers.has(manager.tableName)) { - elizaLogger.warn( + logger.warn( `Memory manager ${manager.tableName} is already registered. Skipping registration.`, ); return; @@ -211,7 +200,7 @@ export class AgentRuntime implements IAgentRuntime { getService(service: ServiceType): T | null { const serviceInstance = this.services.get(service); if (!serviceInstance) { - elizaLogger.error(`Service ${service} not found`); + logger.error(`Service ${service} not found`); return null; } return serviceInstance as T; @@ -219,10 +208,10 @@ export class AgentRuntime implements IAgentRuntime { async registerService(service: Service): Promise { const serviceType = service.serviceType; - elizaLogger.log("Registering service:", serviceType); + logger.log("Registering service:", serviceType); if (this.services.has(serviceType)) { - elizaLogger.warn( + logger.warn( `Service ${serviceType} is already registered. Skipping registration.`, ); return; @@ -230,7 +219,7 @@ export class AgentRuntime implements IAgentRuntime { // Add the service to the services map this.services.set(serviceType, service); - elizaLogger.success(`Service ${serviceType} registered successfully`); + logger.success(`Service ${serviceType} registered successfully`); } /** @@ -238,7 +227,6 @@ export class AgentRuntime implements IAgentRuntime { * @param opts - The options for configuring the AgentRuntime. * @param opts.conversationLength - The number of messages to hold in the recent message cache. * @param opts.token - The JWT token, can be a JWT token if outside worker, or an OpenAI token if inside worker. - * @param opts.serverUrl - The URL of the worker. * @param opts.actions - Optional custom actions. * @param opts.evaluators - Optional custom evaluators. * @param opts.services - Optional custom services. @@ -255,7 +243,6 @@ export class AgentRuntime implements IAgentRuntime { conversationLength?: number; // number of messages to hold in the recent message cache agentId?: UUID; // ID of the agent character?: Character; // The character to use for the agent - serverUrl?: string; // The URL of the worker actions?: Action[]; // Optional custom actions evaluators?: Evaluator[]; // Optional custom evaluators plugins?: Plugin[]; @@ -267,9 +254,8 @@ export class AgentRuntime implements IAgentRuntime { speechModelPath?: string; cacheManager: ICacheManager; logging?: boolean; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter; }) { - elizaLogger.debug( + logger.debug( `[AgentRuntime] Process working directory: ${process.cwd()}`, ); @@ -281,7 +267,7 @@ export class AgentRuntime implements IAgentRuntime { "knowledge", ); - elizaLogger.debug( + logger.debug( `[AgentRuntime] Process knowledgeRoot: ${this.knowledgeRoot}`, ); @@ -297,7 +283,10 @@ export class AgentRuntime implements IAgentRuntime { opts.character?.id ?? opts?.agentId ?? stringToUuid(opts.character?.name ?? uuidv4()); - this.character = opts.character || defaultCharacter; + + if(!opts.character) { + throw new Error("No character provided"); + } // By convention, we create a user and room using the agent id. // Memories related to it are considered global context for the agent. @@ -311,7 +300,7 @@ export class AgentRuntime implements IAgentRuntime { this.ensureParticipantExists(this.agentId, this.agentId); }); - elizaLogger.success(`Agent ID: ${this.agentId}`); + logger.success(`Agent ID: ${this.agentId}`); this.fetch = (opts.fetch as typeof fetch) ?? this.fetch; @@ -342,12 +331,6 @@ export class AgentRuntime implements IAgentRuntime { tableName: "fragments", }); - this.ragKnowledgeManager = new RAGKnowledgeManager({ - runtime: this, - tableName: "knowledge", - knowledgeRoot: this.knowledgeRoot, - }); - (opts.managers ?? []).forEach((manager: IMemoryManager) => { this.registerMemoryManager(manager); }); @@ -356,14 +339,6 @@ export class AgentRuntime implements IAgentRuntime { this.registerService(service); }); - this.serverUrl = opts.serverUrl ?? this.serverUrl; - - elizaLogger.info("Setting model provider..."); - - if (!this.serverUrl) { - elizaLogger.warn("No serverUrl provided, defaulting to localhost"); - } - this.plugins = [ ...(opts.character?.plugins ?? []), ...(opts.plugins ?? []), @@ -398,8 +373,6 @@ export class AgentRuntime implements IAgentRuntime { (opts.evaluators ?? []).forEach((evaluator: Evaluator) => { this.registerEvaluator(evaluator); }); - - this.verifiableInferenceAdapter = opts.verifiableInferenceAdapter; } async initialize() { @@ -407,11 +380,11 @@ export class AgentRuntime implements IAgentRuntime { try { await service.initialize(this); this.services.set(serviceType, service); - elizaLogger.success( + logger.success( `Service ${serviceType} initialized successfully`, ); } catch (error) { - elizaLogger.error( + logger.error( `Failed to initialize service ${serviceType}:`, error, ); @@ -431,95 +404,25 @@ export class AgentRuntime implements IAgentRuntime { this.character.knowledge && this.character.knowledge.length > 0 ) { - elizaLogger.info( + logger.info( `[RAG Check] RAG Knowledge enabled: ${this.character.settings.ragKnowledge ? true : false}`, ); - elizaLogger.info( + logger.info( `[RAG Check] Knowledge items:`, this.character.knowledge, ); - if (this.character.settings.ragKnowledge) { - // Type guards with logging for each knowledge type - const [directoryKnowledge, pathKnowledge, stringKnowledge] = - this.character.knowledge.reduce( - (acc, item) => { - if (typeof item === "object") { - if (isDirectoryItem(item)) { - elizaLogger.debug( - `[RAG Filter] Found directory item: ${JSON.stringify(item)}`, - ); - acc[0].push(item); - } else if ("path" in item) { - elizaLogger.debug( - `[RAG Filter] Found path item: ${JSON.stringify(item)}`, - ); - acc[1].push(item); - } - } else if (typeof item === "string") { - elizaLogger.debug( - `[RAG Filter] Found string item: ${item.slice(0, 100)}...`, - ); - acc[2].push(item); - } - return acc; - }, - [[], [], []] as [ - Array<{ directory: string; shared?: boolean }>, - Array<{ path: string; shared?: boolean }>, - Array, - ], - ); - - elizaLogger.info( - `[RAG Summary] Found ${directoryKnowledge.length} directories, ${pathKnowledge.length} paths, and ${stringKnowledge.length} strings`, - ); - - // Process each type of knowledge - if (directoryKnowledge.length > 0) { - elizaLogger.info( - `[RAG Process] Processing directory knowledge sources:`, - ); - for (const dir of directoryKnowledge) { - elizaLogger.info( - ` - Directory: ${dir.directory} (shared: ${!!dir.shared})`, - ); - await this.processCharacterRAGDirectory(dir); - } - } - - if (pathKnowledge.length > 0) { - elizaLogger.info( - `[RAG Process] Processing individual file knowledge sources`, - ); - await this.processCharacterRAGKnowledge(pathKnowledge); - } - - if (stringKnowledge.length > 0) { - elizaLogger.info( - `[RAG Process] Processing direct string knowledge`, - ); - await this.processCharacterKnowledge(stringKnowledge); - } - } else { - // Non-RAG mode: only process string knowledge - const stringKnowledge = this.character.knowledge.filter( - (item): item is string => typeof item === "string", - ); - await this.processCharacterKnowledge(stringKnowledge); - } - // After all new knowledge is processed, clean up any deleted files - elizaLogger.info( - `[RAG Cleanup] Starting cleanup of deleted knowledge files`, + // Non-RAG mode: only process string knowledge + const stringKnowledge = this.character.knowledge.filter( + (item): item is string => typeof item === "string", ); - await this.ragKnowledgeManager.cleanupDeletedKnowledgeFiles(); - elizaLogger.info(`[RAG Cleanup] Cleanup complete`); + await this.processCharacterKnowledge(stringKnowledge); } } async stop() { - elizaLogger.debug("runtime::stop - character", this.character); + logger.debug("runtime::stop - character", this.character); // stop services, they don't have a stop function // just initialize @@ -530,7 +433,7 @@ export class AgentRuntime implements IAgentRuntime { // client have a start for (const cStr in this.clients) { const c = this.clients[cStr]; - elizaLogger.log( + logger.log( "runtime::stop - requesting", cStr, "client stop for", @@ -557,7 +460,7 @@ export class AgentRuntime implements IAgentRuntime { continue; } - elizaLogger.info( + logger.info( "Processing knowledge for ", this.character.name, " - ", @@ -573,317 +476,6 @@ export class AgentRuntime implements IAgentRuntime { } } - /** - * Processes character knowledge by creating document memories and fragment memories. - * This function takes an array of knowledge items, creates a document knowledge for each item if it doesn't exist, - * then chunks the content into fragments, embeds each fragment, and creates fragment knowledge. - * An array of knowledge items or objects containing id, path, and content. - */ - private async processCharacterRAGKnowledge( - items: (string | { path: string; shared?: boolean })[], - ) { - let hasError = false; - - for (const item of items) { - if (!item) continue; - - try { - // Check if item is marked as shared - let isShared = false; - let contentItem = item; - - // Only treat as shared if explicitly marked - if (typeof item === "object" && "path" in item) { - isShared = item.shared === true; - contentItem = item.path; - } else { - contentItem = item; - } - - // const knowledgeId = stringToUuid(contentItem); - const knowledgeId = this.ragKnowledgeManager.generateScopedId( - contentItem, - isShared, - ); - const fileExtension = contentItem - .split(".") - .pop() - ?.toLowerCase(); - - // Check if it's a file or direct knowledge - if ( - fileExtension && - ["md", "txt", "pdf"].includes(fileExtension) - ) { - try { - const filePath = join(this.knowledgeRoot, contentItem); - // Get existing knowledge first with more detailed logging - elizaLogger.debug("[RAG Query]", { - knowledgeId, - agentId: this.agentId, - relativePath: contentItem, - fullPath: filePath, - isShared, - knowledgeRoot: this.knowledgeRoot, - }); - - // Get existing knowledge first - const existingKnowledge = - await this.ragKnowledgeManager.getKnowledge({ - id: knowledgeId, - agentId: this.agentId, // Keep agentId as it's used in OR query - }); - - elizaLogger.debug("[RAG Query Result]", { - relativePath: contentItem, - fullPath: filePath, - knowledgeId, - isShared, - exists: existingKnowledge.length > 0, - knowledgeCount: existingKnowledge.length, - firstResult: existingKnowledge[0] - ? { - id: existingKnowledge[0].id, - agentId: existingKnowledge[0].agentId, - contentLength: - existingKnowledge[0].content.text - .length, - } - : null, - results: existingKnowledge.map((k) => ({ - id: k.id, - agentId: k.agentId, - isBaseKnowledge: !k.id.includes("chunk"), - })), - }); - - // Read file content - const content: string = await readFile( - filePath, - "utf8", - ); - if (!content) { - hasError = true; - continue; - } - - if (existingKnowledge.length > 0) { - const existingContent = - existingKnowledge[0].content.text; - - elizaLogger.debug("[RAG Compare]", { - path: contentItem, - knowledgeId, - isShared, - existingContentLength: existingContent.length, - newContentLength: content.length, - contentSample: content.slice(0, 100), - existingContentSample: existingContent.slice( - 0, - 100, - ), - matches: existingContent === content, - }); - - if (existingContent === content) { - elizaLogger.info( - `${isShared ? "Shared knowledge" : "Knowledge"} ${contentItem} unchanged, skipping`, - ); - continue; - } - - // Content changed, remove old knowledge before adding new - elizaLogger.info( - `${isShared ? "Shared knowledge" : "Knowledge"} ${contentItem} changed, updating...`, - ); - await this.ragKnowledgeManager.removeKnowledge( - knowledgeId, - ); - await this.ragKnowledgeManager.removeKnowledge( - `${knowledgeId}-chunk-*` as UUID, - ); - } - - elizaLogger.info( - `Processing ${fileExtension.toUpperCase()} file content for`, - this.character.name, - "-", - contentItem, - ); - - await this.ragKnowledgeManager.processFile({ - path: contentItem, - content: content, - type: fileExtension as "pdf" | "md" | "txt", - isShared: isShared, - }); - } catch (error: any) { - hasError = true; - elizaLogger.error( - `Failed to read knowledge file ${contentItem}. Error details:`, - error?.message || error || "Unknown error", - ); - continue; - } - } else { - // Handle direct knowledge string - elizaLogger.info( - "Processing direct knowledge for", - this.character.name, - "-", - contentItem.slice(0, 100), - ); - - const existingKnowledge = - await this.ragKnowledgeManager.getKnowledge({ - id: knowledgeId, - agentId: this.agentId, - }); - - if (existingKnowledge.length > 0) { - elizaLogger.info( - `Direct knowledge ${knowledgeId} already exists, skipping`, - ); - continue; - } - - await this.ragKnowledgeManager.createKnowledge({ - id: knowledgeId, - agentId: this.agentId, - content: { - text: contentItem, - metadata: { - type: "direct", - }, - }, - }); - } - } catch (error: any) { - hasError = true; - elizaLogger.error( - `Error processing knowledge item ${item}:`, - error?.message || error || "Unknown error", - ); - continue; - } - } - - if (hasError) { - elizaLogger.warn( - "Some knowledge items failed to process, but continuing with available knowledge", - ); - } - } - - /** - * Processes directory-based RAG knowledge by recursively loading and processing files. - * @param dirConfig The directory configuration containing path and shared flag - */ - private async processCharacterRAGDirectory(dirConfig: { - directory: string; - shared?: boolean; - }) { - if (!dirConfig.directory) { - elizaLogger.error("[RAG Directory] No directory specified"); - return; - } - - // Sanitize directory path to prevent traversal attacks - const sanitizedDir = dirConfig.directory.replace(/\.\./g, ""); - const dirPath = join(this.knowledgeRoot, sanitizedDir); - - try { - // Check if directory exists - const dirExists = existsSync(dirPath); - if (!dirExists) { - elizaLogger.error( - `[RAG Directory] Directory does not exist: ${sanitizedDir}`, - ); - return; - } - - elizaLogger.debug(`[RAG Directory] Searching in: ${dirPath}`); - // Use glob to find all matching files in directory - const files = await glob("**/*.{md,txt,pdf}", { - cwd: dirPath, - nodir: true, - absolute: false, - }); - - if (files.length === 0) { - elizaLogger.warn( - `No matching files found in directory: ${dirConfig.directory}`, - ); - return; - } - - elizaLogger.info( - `[RAG Directory] Found ${files.length} files in ${dirConfig.directory}`, - ); - - // Process files in batches to avoid memory issues - const BATCH_SIZE = 5; - for (let i = 0; i < files.length; i += BATCH_SIZE) { - const batch = files.slice(i, i + BATCH_SIZE); - - await Promise.all( - batch.map(async (file) => { - try { - const relativePath = join(sanitizedDir, file); - - elizaLogger.debug( - `[RAG Directory] Processing file ${i + 1}/${files.length}:`, - { - file, - relativePath, - shared: dirConfig.shared, - }, - ); - - await this.processCharacterRAGKnowledge([ - { - path: relativePath, - shared: dirConfig.shared, - }, - ]); - } catch (error) { - elizaLogger.error( - `[RAG Directory] Failed to process file: ${file}`, - error instanceof Error - ? { - name: error.name, - message: error.message, - stack: error.stack, - } - : error, - ); - } - }), - ); - - elizaLogger.debug( - `[RAG Directory] Completed batch ${Math.min(i + BATCH_SIZE, files.length)}/${files.length} files`, - ); - } - - elizaLogger.success( - `[RAG Directory] Successfully processed directory: ${sanitizedDir}`, - ); - } catch (error) { - elizaLogger.error( - `[RAG Directory] Failed to process directory: ${sanitizedDir}`, - error instanceof Error - ? { - name: error.name, - message: error.message, - stack: error.stack, - } - : error, - ); - throw error; // Re-throw to let caller handle it - } - } - getSetting(key: string) { // check if the key is in the character.settings.secrets object if (this.character.settings?.secrets?.[key]) { @@ -915,7 +507,7 @@ export class AgentRuntime implements IAgentRuntime { * @param action The action to register. */ registerAction(action: Action) { - elizaLogger.success(`Registering action: ${action.name}`); + logger.success(`Registering action: ${action.name}`); this.actions.push(action); } @@ -948,7 +540,7 @@ export class AgentRuntime implements IAgentRuntime { ): Promise { for (const response of responses) { if (!response.content?.action) { - elizaLogger.warn("No action found in the response content."); + logger.warn("No action found in the response content."); continue; } @@ -956,7 +548,7 @@ export class AgentRuntime implements IAgentRuntime { .toLowerCase() .replace("_", ""); - elizaLogger.success(`Normalized action: ${normalizedAction}`); + logger.success(`Normalized action: ${normalizedAction}`); let action = this.actions.find( (a: { name: string }) => @@ -970,7 +562,7 @@ export class AgentRuntime implements IAgentRuntime { ); if (!action) { - elizaLogger.info("Attempting to find action in similes."); + logger.info("Attempting to find action in similes."); for (const _action of this.actions) { const simileAction = _action.similes.find( (simile) => @@ -984,7 +576,7 @@ export class AgentRuntime implements IAgentRuntime { ); if (simileAction) { action = _action; - elizaLogger.success( + logger.success( `Action found in similes: ${action.name}`, ); break; @@ -993,7 +585,7 @@ export class AgentRuntime implements IAgentRuntime { } if (!action) { - elizaLogger.error( + logger.error( "No action found for", response.content.action, ); @@ -1001,17 +593,17 @@ export class AgentRuntime implements IAgentRuntime { } if (!action.handler) { - elizaLogger.error(`Action ${action.name} has no handler.`); + logger.error(`Action ${action.name} has no handler.`); continue; } try { - elizaLogger.info( + logger.info( `Executing handler for action: ${action.name}`, ); await action.handler(this, message, state, {}, callback); } catch (error) { - elizaLogger.error(error); + logger.error(error); } } } @@ -1032,7 +624,7 @@ export class AgentRuntime implements IAgentRuntime { ) { const evaluatorPromises = this.evaluators.map( async (evaluator: Evaluator) => { - elizaLogger.log("Evaluating", evaluator.name); + logger.log("Evaluating", evaluator.name); if (!evaluator.handler) { return null; } @@ -1125,7 +717,7 @@ export class AgentRuntime implements IAgentRuntime { email: email || this.character.email || userId, // Temporary details: this.character || { summary: "" }, }); - elizaLogger.success(`User ${userName} created successfully.`); + logger.success(`User ${userName} created successfully.`); } } @@ -1135,11 +727,11 @@ export class AgentRuntime implements IAgentRuntime { if (!participants.includes(userId)) { await this.databaseAdapter.addParticipant(userId, roomId); if (userId === this.agentId) { - elizaLogger.log( + logger.log( `Agent ${this.character.name} linked to room ${roomId} successfully.`, ); } else { - elizaLogger.log( + logger.log( `User ${userId} linked to room ${roomId} successfully.`, ); } @@ -1186,7 +778,7 @@ export class AgentRuntime implements IAgentRuntime { const room = await this.databaseAdapter.getRoom(roomId); if (!room) { await this.databaseAdapter.createRoom(roomId); - elizaLogger.log(`Room ${roomId} created successfully.`); + logger.log(`Room ${roomId} created successfully.`); } } @@ -1415,24 +1007,9 @@ Text: ${attachment.text} let knowledgeData = []; let formattedKnowledge = ""; - if (this.character.settings?.ragKnowledge) { - const recentContext = recentMessagesData - .slice(-3) // Last 3 messages - .map((msg) => msg.content.text) - .join(" "); - - knowledgeData = await this.ragKnowledgeManager.getKnowledge({ - query: message.content.text, - conversationContext: recentContext, - limit: 5, - }); - - formattedKnowledge = formatKnowledge(knowledgeData); - } else { - knowledgeData = await knowledge.get(this, message); + knowledgeData = await knowledge.get(this, message); - formattedKnowledge = formatKnowledge(knowledgeData); - } + formattedKnowledge = formatKnowledge(knowledgeData); const initialState = { agentId: this.agentId, @@ -1449,8 +1026,6 @@ Text: ${attachment.text} ] : "", knowledge: formattedKnowledge, - knowledgeData: knowledgeData, - ragKnowledgeData: knowledgeData, // Recent interactions between the sender and receiver, formatted as messages recentMessageInteractions: formattedMessageInteractions, // Recent interactions between the sender and receiver, formatted as posts @@ -1714,14 +1289,6 @@ Text: ${attachment.text} attachments: formattedAttachments, } as State; } - - getVerifiableInferenceAdapter(): IVerifiableInferenceAdapter | undefined { - return this.verifiableInferenceAdapter; - } - - setVerifiableInferenceAdapter(adapter: IVerifiableInferenceAdapter): void { - this.verifiableInferenceAdapter = adapter; - } } const formatKnowledge = (knowledge: KnowledgeItem[]) => { diff --git a/packages/runtime/src/settings.ts b/packages/runtime/src/settings.ts index d403d16379a..a91bfe1529a 100644 --- a/packages/runtime/src/settings.ts +++ b/packages/runtime/src/settings.ts @@ -1,9 +1,9 @@ import { config } from "dotenv"; import fs from "fs"; import path from "path"; -import elizaLogger from "./logger.ts"; +import logger from "./logger.ts"; -elizaLogger.info("Loading embedding settings:", { +logger.info("Loading embedding settings:", { USE_OPENAI_EMBEDDING: process.env.USE_OPENAI_EMBEDDING, USE_OLLAMA_EMBEDDING: process.env.USE_OLLAMA_EMBEDDING, OLLAMA_EMBEDDING_MODEL: @@ -11,7 +11,7 @@ elizaLogger.info("Loading embedding settings:", { }); // Add this logging block -elizaLogger.info("Loading character settings:", { +logger.info("Loading character settings:", { CHARACTER_PATH: process.env.CHARACTER_PATH, ARGV: process.argv, CHARACTER_ARG: process.argv.find((arg) => arg.startsWith("--character=")), @@ -93,7 +93,7 @@ export function loadEnvConfig(): Settings { const result = config(envPath ? { path: envPath } : {}); if (!result.error) { - elizaLogger.log(`Loaded .env file from: ${envPath}`); + logger.log(`Loaded .env file from: ${envPath}`); } // Parse namespaced settings @@ -138,7 +138,7 @@ export function hasEnvVariable(key: string): boolean { // Initialize settings based on environment export const settings = isBrowser() ? environmentSettings : loadEnvConfig(); -elizaLogger.info("Parsed settings:", { +logger.info("Parsed settings:", { USE_OPENAI_EMBEDDING: settings.USE_OPENAI_EMBEDDING, USE_OPENAI_EMBEDDING_TYPE: typeof settings.USE_OPENAI_EMBEDDING, USE_OLLAMA_EMBEDDING: settings.USE_OLLAMA_EMBEDDING, diff --git a/packages/runtime/src/types.ts b/packages/runtime/src/types.ts index ee08e39d890..d550a4a79fd 100644 --- a/packages/runtime/src/types.ts +++ b/packages/runtime/src/types.ts @@ -1184,37 +1184,6 @@ export interface IMemoryManager { countMemories(roomId: UUID, unique?: boolean): Promise; } -export interface IRAGKnowledgeManager { - runtime: IAgentRuntime; - tableName: string; - - getKnowledge(params: { - query?: string; - id?: UUID; - limit?: number; - conversationContext?: string; - agentId?: UUID; - }): Promise; - createKnowledge(item: RAGKnowledgeItem): Promise; - removeKnowledge(id: UUID): Promise; - searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array | number[]; - match_threshold?: number; - match_count?: number; - searchText?: string; - }): Promise; - clearKnowledge(shared?: boolean): Promise; - processFile(file: { - path: string; - content: string; - type: "pdf" | "md" | "txt"; - isShared: boolean; - }): Promise; - cleanupDeletedKnowledgeFiles(): Promise; - generateScopedId(path: string, isShared: boolean): UUID; -} - export type CacheOptions = { expires?: number; }; @@ -1256,7 +1225,6 @@ export abstract class Service { export interface IAgentRuntime { // Properties agentId: UUID; - serverUrl: string; databaseAdapter: IDatabaseAdapter; character: Character; providers: Provider[]; @@ -1272,7 +1240,6 @@ export interface IAgentRuntime { descriptionManager: IMemoryManager; documentsManager: IMemoryManager; knowledgeManager: IMemoryManager; - ragKnowledgeManager: IRAGKnowledgeManager; loreManager: IMemoryManager; cacheManager: ICacheManager; @@ -1282,8 +1249,6 @@ export interface IAgentRuntime { // but I think the real solution is forthcoming as a base client interface clients: Record; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter | null; - initialize(): Promise; registerMemoryManager(manager: IMemoryManager): void; @@ -1591,32 +1556,6 @@ export interface VerifiableInferenceResult { timestamp: number; } -/** - * Interface for verifiable inference adapters - */ -export interface IVerifiableInferenceAdapter { - options: any; - /** - * Generate text with verifiable proof - * @param context The input text/prompt - * @param modelClass The model class/name to use - * @param options Additional provider-specific options - * @returns Promise containing the generated text and proof data - */ - generateText( - context: string, - modelClass: string, - options?: VerifiableInferenceOptions, - ): Promise; - - /** - * Verify the proof of a generated response - * @param result The result containing response and proof to verify - * @returns Promise indicating if the proof is valid - */ - verifyProof(result: VerifiableInferenceResult): Promise; -} - export enum TokenizerType { Auto = "auto", TikToken = "tiktoken", diff --git a/packages/runtime/src/utils.ts b/packages/runtime/src/utils.ts index 2c1555b011e..6bf7362571a 100644 --- a/packages/runtime/src/utils.ts +++ b/packages/runtime/src/utils.ts @@ -1,3 +1,3 @@ -export { elizaLogger } from "./logger.ts"; +export { logger } from "./logger.ts"; export { embed } from "./embedding.ts"; export { AgentRuntime } from "./runtime.ts"; diff --git a/packages/runtime/tsconfig.json b/packages/runtime/tsconfig.json index e23e5f6a27c..966a08604bf 100644 --- a/packages/runtime/tsconfig.json +++ b/packages/runtime/tsconfig.json @@ -22,6 +22,6 @@ "allowArbitraryExtensions": true, "customConditions": ["@elizaos/source"] }, - "include": ["src/**/*", "../plugin-anthropic/src/localembeddingManager.ts"], + "include": ["src/**/*", "../plugin-anthropic/src/localembeddingManager.ts", "../../agent/src/defaultCharacter.ts"], "exclude": ["node_modules", "dist", "src/**/*.d.ts", "types/**/*.test.ts"] }