diff --git a/.env.example b/.env.example index 200d21bd52c..d1eb4e5db1f 100644 --- a/.env.example +++ b/.env.example @@ -968,3 +968,9 @@ BUNDLE_EXECUTOR_ADDRESS= # Address of the bundle executor contract DESK_EXCHANGE_PRIVATE_KEY= # Required for trading and cancelling orders DESK_EXCHANGE_NETWORK= # "mainnet" or "testnet +# JWT +JWT_ENABLED= +JWT_SECRET_KEY= +JWT_EXPIRED= +JWT_USERNAME= +JWT_PASSWORD= diff --git a/agent/.gitignore b/agent/.gitignore index d0e66465a4d..3610ad5813f 100644 --- a/agent/.gitignore +++ b/agent/.gitignore @@ -1,6 +1,3 @@ -*.ts -!index.ts -!character.ts .env *.env .env* diff --git a/agent/package.json b/agent/package.json index e869abfcd89..e055ae95ff3 100644 --- a/agent/package.json +++ b/agent/package.json @@ -96,7 +96,6 @@ "@elizaos/plugin-twitter": "workspace:*", "@elizaos/plugin-primus": "workspace:*", "@elizaos/plugin-cronoszkevm": "workspace:*", - "@elizaos/plugin-cronos": "workspace:*", "@elizaos/plugin-3d-generation": "workspace:*", "@elizaos/plugin-fuel": "workspace:*", "@elizaos/plugin-avalanche": "workspace:*", diff --git a/agent/src/client.ts b/agent/src/client.ts new file mode 100644 index 00000000000..7b60c537b56 --- /dev/null +++ b/agent/src/client.ts @@ -0,0 +1,87 @@ +import path from "path"; +import fs from "fs"; +import { fileURLToPath } from 'url'; +import { elizaLogger } from "@elizaos/core"; +import { getAndParseReadme } from "./document"; +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +interface ClientInfo { + package: string; + name: string, + env?: Record; + document?: string; +} + +async function getClientInfo(clientDir: string): Promise { + try { + // get package.json + const packageJsonPath = path.join(clientDir, 'package.json'); + elizaLogger.debug(`Reading package.json from: ${packageJsonPath}`); + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')); + + // try to read README.md with case-insensitive search + const {document, env} = await getAndParseReadme(clientDir); + + return { + package: packageJson.name, + name: packageJson.name.startsWith('@elizaos/client-') ? packageJson.name.slice(16) : packageJson.name, + env, + document, + }; + } catch (error) { + elizaLogger.error(`Error loading client from ${clientDir}:`, error.message); + return null; + } +} + +export async function getClients() { + try { + // Read package.json from the agent directory + const packageJsonPath = path.resolve(__dirname, '../package.json'); + elizaLogger.log(`Reading package.json from: ${packageJsonPath}`); + + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')); + + // Get all client dependencies (starting with @elizaos/client-) + const clientPackages = Object.keys(packageJson.dependencies || {}) + .filter(dep => dep.startsWith('@elizaos/client-')) + .sort(); + + elizaLogger.info(`Found ${clientPackages.length} client packages in package.json`); + + // Get client info for each package + const clientsInfo = await Promise.all( + clientPackages.map(async (packageName) => { + try { + // Import client module to get client info + elizaLogger.log(`load client: ${packageName}`); + // Try to read README.md from node_modules + const nodeModulesDir = path.resolve(__dirname, '../node_modules'); + const clientDir = path.join(nodeModulesDir, packageName); + + const {document, env} = await getAndParseReadme(clientDir); + + return { + package: packageName, + name: packageName.startsWith('@elizaos/client-') ? packageName.slice(16) : packageName, + env, + document, + }; + } catch (error) { + elizaLogger.error(`Error loading client ${packageName}:`, error.message); + return null; + } + }) + ); + + // Filter out clients that failed to load + const validClients = clientsInfo.filter(Boolean); + elizaLogger.info(`Successfully loaded ${validClients.length} clients`); + + return validClients; + } catch (error) { + elizaLogger.error('Error getting clients:', error); + throw error; + } +} diff --git a/agent/src/document.ts b/agent/src/document.ts new file mode 100644 index 00000000000..b883b872588 --- /dev/null +++ b/agent/src/document.ts @@ -0,0 +1,51 @@ +import path from "path"; +import fs from "fs"; +import { elizaLogger } from "@elizaos/core"; + +export async function getReadme(dir: string) { + // try to read README.md with case-insensitive search + try { + const files = fs.readdirSync(dir); + const readmeFile = files.find(file => file.toLowerCase() === 'readme.md'); + if (readmeFile) { + return fs.readFileSync(path.join(dir, readmeFile), 'utf-8'); + } + } catch (error) { + elizaLogger.debug(`No README.md found in ${dir}`); + return null; + } +} + +export async function getAndParseReadme(dir: string) { + const document = await getReadme(dir); + const env: Record = {}; + if (!document) { + return {document: null, env: {}}; + } + const envMatch = document.match(/```env\n([\s\S]*?)\n```/); + if (envMatch) { + const envLines = envMatch[1].split('\n').map(line => line.trim()) + .filter(line => line.length > 0 && !line.startsWith('#') && !line.startsWith(' #')); + for (const line of envLines) { + // remove comment + const lineWithoutComment = line.split('#')[0].trim(); + const [key, ...valueParts] = lineWithoutComment.split('='); + let value:any = valueParts.join('=').trim(); // handle value with = + + // handle number + if (/^\d+$/.test(value)) { + value = Number.parseInt(value, 10); + } + // else if (value.toLowerCase() === "true") { + // value = true; + // } else if (value.toLowerCase() === "false") { + // value = false; + // } + + if (key) { + env[key.trim()] = value; + } + } + } + return {document, env}; +} \ No newline at end of file diff --git a/agent/src/index.ts b/agent/src/index.ts index 1ded5914eb0..5e662913821 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -42,6 +42,7 @@ import { type ICacheManager, type IDatabaseAdapter, type IDatabaseCacheAdapter, + type TypeDatabaseAdapter, ModelProviderName, parseBooleanFromText, settings, @@ -57,7 +58,7 @@ import createGoatPlugin from "@elizaos/plugin-goat"; import createZilliqaPlugin from "@elizaos/plugin-zilliqa"; // import { intifacePlugin } from "@elizaos/plugin-intiface"; -import { ThreeDGenerationPlugin } from "@elizaos/plugin-3d-generation"; +import { threeDGenerationPlugin } from "@elizaos/plugin-3d-generation"; import { abstractPlugin } from "@elizaos/plugin-abstract"; import { akashPlugin } from "@elizaos/plugin-akash"; import { alloraPlugin } from "@elizaos/plugin-allora"; @@ -157,9 +158,11 @@ import { ankrPlugin } from "@elizaos/plugin-ankr"; import { formPlugin } from "@elizaos/plugin-form"; import { MongoClient } from "mongodb"; import { quickIntelPlugin } from "@elizaos/plugin-quick-intel"; - +import { v4 as uuidv4 } from "uuid"; import { trikonPlugin } from "@elizaos/plugin-trikon"; import arbitragePlugin from "@elizaos/plugin-arbitrage"; +import { getPlugins } from "./plugins"; +import { getClients } from "./client"; const __filename = fileURLToPath(import.meta.url); // get the resolved path to the file const __dirname = path.dirname(__filename); // get the name of the directory @@ -212,8 +215,14 @@ function mergeCharacters(base: Character, child: Character): Character { ...Object.keys(baseObj || {}), ...Object.keys(childObj || {}), ]); + + // biome-ignore lint/complexity/noForEach: keys.forEach((key) => { - if ( + // Special handling for plugins array + if (key === 'plugins') { + // Always use child's plugins if defined (even if empty array) + result[key] = childObj[key] !== undefined ? childObj[key] : baseObj[key]; + } else if ( typeof baseObj[key] === "object" && typeof childObj[key] === "object" && !Array.isArray(baseObj[key]) && @@ -224,6 +233,7 @@ function mergeCharacters(base: Character, child: Character): Character { Array.isArray(baseObj[key]) || Array.isArray(childObj[key]) ) { + // For other arrays, concatenate as before result[key] = [ ...(baseObj[key] || []), ...(childObj[key] || []), @@ -243,7 +253,6 @@ function isAllStrings(arr: unknown[]): boolean { export async function loadCharacterFromOnchain(): Promise { const jsonText = onchainJson; - console.log("JSON:", jsonText); if (!jsonText) return []; const loadedCharacters = []; try { @@ -489,34 +498,54 @@ export async function loadCharacters( return loadedCharacters; } -async function handlePluginImporting(plugins: string[]) { - if (plugins.length > 0) { - elizaLogger.info("Plugins are: ", plugins); - const importedPlugins = await Promise.all( - plugins.map(async (plugin) => { - try { - const importedPlugin = await import(plugin); - const functionName = - plugin - .replace("@elizaos/plugin-", "") - .replace(/-./g, (x) => x[1].toUpperCase()) + - "Plugin"; // Assumes plugin function is camelCased with Plugin suffix - return ( - importedPlugin.default || importedPlugin[functionName] - ); - } catch (importError) { - elizaLogger.error( - `Failed to import plugin: ${plugin}`, - importError - ); - return []; // Return null for failed imports - } - }) - ); - return importedPlugins; - } else { +async function handlePluginImporting(plugins: string[] | any[]) { + if (!Array.isArray(plugins) || plugins.length === 0) { return []; } + + elizaLogger.info("Plugins are: ", plugins); + const importedPlugins = await Promise.all( + plugins.map(async (plugin) => { + // if plugin is already an object, return it + if (typeof plugin === 'object' && plugin !== null) { + return plugin; + } + + // ensure plugin is a string + if (typeof plugin !== 'string') { + elizaLogger.error(`Invalid plugin format: ${plugin}`); + return null; + } + + try { + const importedPlugin = await import(plugin); + // if the plugin exports a plugins object, it's a multi-plugin package, e.g. @elizaos/coinbase + if (importedPlugin.plugins) { + return Object.values(importedPlugin.plugins).map(pluginObj => { + pluginObj.package = plugin; + return pluginObj; + }); + } + + // single plugin package processing logic + const functionName = + plugin + .replace("@elizaos/plugin-", "") + .replace(/-./g, (x) => x[1].toUpperCase()) + + "Plugin"; + const pluginObj = importedPlugin.default || importedPlugin[functionName]; + pluginObj.package = plugin; + return pluginObj; + } catch (importError) { + elizaLogger.error( + `Failed to import plugin: ${plugin}`, + importError + ); + return null; + } + }) + ); + return importedPlugins.flat().filter(Boolean); } export function getTokenForProvider( @@ -564,9 +593,7 @@ export function getTokenForProvider( case ModelProviderName.ANTHROPIC: return ( character.settings?.secrets?.ANTHROPIC_API_KEY || - character.settings?.secrets?.CLAUDE_API_KEY || - settings.ANTHROPIC_API_KEY || - settings.CLAUDE_API_KEY + settings.ANTHROPIC_API_KEY ); case ModelProviderName.REDPILL: return ( @@ -733,6 +760,9 @@ function initializeDatabase(dataDir: string) { const db = new PostgresDatabaseAdapter({ connectionString: process.env.POSTGRES_URL, parseInputs: true, + ssl: { + rejectUnauthorized: false // for aws postgres, if true, need to set sslmode=verify-full in connection string and set sslcert and sslkey in env + } }); // Test the connection @@ -1018,293 +1048,22 @@ export async function createAgent( character, // character.plugins are handled when clients are added plugins: [ - parseBooleanFromText(getSecret(character, "BITMIND")) && - getSecret(character, "BITMIND_API_TOKEN") - ? bittensorPlugin - : null, - parseBooleanFromText( - getSecret(character, "EMAIL_AUTOMATION_ENABLED") - ) - ? emailAutomationPlugin - : null, - getSecret(character, "IQ_WALLET_ADDRESS") && - getSecret(character, "IQSOlRPC") - ? elizaCodeinPlugin - : null, bootstrapPlugin, - getSecret(character, "CDP_API_KEY_NAME") && - getSecret(character, "CDP_API_KEY_PRIVATE_KEY") && - getSecret(character, "CDP_AGENT_KIT_NETWORK") - ? agentKitPlugin - : null, - getSecret(character, "DEXSCREENER_API_KEY") - ? dexScreenerPlugin - : null, - getSecret(character, "FOOTBALL_API_KEY") ? footballPlugin : null, - getSecret(character, "CONFLUX_CORE_PRIVATE_KEY") - ? confluxPlugin - : null, nodePlugin, - getSecret(character, "ROUTER_NITRO_EVM_PRIVATE_KEY") && - getSecret(character, "ROUTER_NITRO_EVM_ADDRESS") - ? nitroPlugin - : null, - getSecret(character, "TAVILY_API_KEY") ? webSearchPlugin : null, - getSecret(character, "SOLANA_PUBLIC_KEY") || - (getSecret(character, "WALLET_PUBLIC_KEY") && - !getSecret(character, "WALLET_PUBLIC_KEY")?.startsWith("0x")) - ? [solanaPlugin, solanaPluginV2] - : null, - getSecret(character, "SOLANA_PRIVATE_KEY") - ? solanaAgentkitPlugin - : null, - getSecret(character, "AUTONOME_JWT_TOKEN") ? autonomePlugin : null, - (getSecret(character, "NEAR_ADDRESS") || - getSecret(character, "NEAR_WALLET_PUBLIC_KEY")) && - getSecret(character, "NEAR_WALLET_SECRET_KEY") - ? nearPlugin - : null, - getSecret(character, "EVM_PUBLIC_KEY") || - (getSecret(character, "WALLET_PUBLIC_KEY") && - getSecret(character, "WALLET_PUBLIC_KEY")?.startsWith("0x")) - ? evmPlugin - : null, - (getSecret(character, "EVM_PRIVATE_KEY") || - getSecret(character, "SOLANA_PRIVATE_KEY")) - ? edwinPlugin - : null, - (getSecret(character, "EVM_PUBLIC_KEY") || - getSecret(character, "INJECTIVE_PUBLIC_KEY")) && - getSecret(character, "INJECTIVE_PRIVATE_KEY") - ? injectivePlugin - : null, - getSecret(character, "COSMOS_RECOVERY_PHRASE") && - getSecret(character, "COSMOS_AVAILABLE_CHAINS") && - createCosmosPlugin(), - (getSecret(character, "SOLANA_PUBLIC_KEY") || - (getSecret(character, "WALLET_PUBLIC_KEY") && - !getSecret(character, "WALLET_PUBLIC_KEY")?.startsWith( - "0x" - ))) && - getSecret(character, "SOLANA_ADMIN_PUBLIC_KEY") && - getSecret(character, "SOLANA_PRIVATE_KEY") && - getSecret(character, "SOLANA_ADMIN_PRIVATE_KEY") - ? nftGenerationPlugin - : null, - getSecret(character, "ZEROG_PRIVATE_KEY") ? zgPlugin : null, - getSecret(character, "COINMARKETCAP_API_KEY") - ? coinmarketcapPlugin - : null, - getSecret(character, "ZERION_API_KEY") ? zerionPlugin : null, - getSecret(character, "COINBASE_COMMERCE_KEY") - ? coinbaseCommercePlugin - : null, - getSecret(character, "FAL_API_KEY") || - getSecret(character, "OPENAI_API_KEY") || - getSecret(character, "VENICE_API_KEY") || - getSecret(character, "NVIDIA_API_KEY") || - getSecret(character, "NINETEEN_AI_API_KEY") || - getSecret(character, "HEURIST_API_KEY") || - getSecret(character, "LIVEPEER_GATEWAY_URL") - ? imageGenerationPlugin - : null, - getSecret(character, "FAL_API_KEY") ? ThreeDGenerationPlugin : null, - ...(getSecret(character, "COINBASE_API_KEY") && - getSecret(character, "COINBASE_PRIVATE_KEY") - ? [ - coinbaseMassPaymentsPlugin, - tradePlugin, - tokenContractPlugin, - advancedTradePlugin, - ] - : []), ...(teeMode !== TEEMode.OFF && walletSecretSalt ? [teePlugin] : []), teeMode !== TEEMode.OFF && walletSecretSalt && getSecret(character, "VLOG") ? verifiableLogPlugin : null, - getSecret(character, "SGX") ? sgxPlugin : null, + // getSecret(character, "SGX") ? sgxPlugin : null, getSecret(character, "ENABLE_TEE_LOG") && ((teeMode !== TEEMode.OFF && walletSecretSalt) || getSecret(character, "SGX")) ? teeLogPlugin : null, - getSecret(character, "OMNIFLIX_API_URL") && - getSecret(character, "OMNIFLIX_MNEMONIC") - ? OmniflixPlugin - : null, - getSecret(character, "COINBASE_API_KEY") && - getSecret(character, "COINBASE_PRIVATE_KEY") && - getSecret(character, "COINBASE_NOTIFICATION_URI") - ? webhookPlugin - : null, goatPlugin, zilliqaPlugin, - getSecret(character, "COINGECKO_API_KEY") || - getSecret(character, "COINGECKO_PRO_API_KEY") - ? coingeckoPlugin - : null, - getSecret(character, "MORALIS_API_KEY") ? moralisPlugin : null, - getSecret(character, "EVM_PROVIDER_URL") ? goatPlugin : null, - getSecret(character, "ABSTRACT_PRIVATE_KEY") - ? abstractPlugin - : null, - getSecret(character, "B2_PRIVATE_KEY") ? b2Plugin : null, - getSecret(character, "BINANCE_API_KEY") && - getSecret(character, "BINANCE_SECRET_KEY") - ? binancePlugin - : null, - getSecret(character, "FLOW_ADDRESS") && - getSecret(character, "FLOW_PRIVATE_KEY") - ? flowPlugin - : null, - getSecret(character, "LENS_ADDRESS") && - getSecret(character, "LENS_PRIVATE_KEY") - ? lensPlugin - : null, - getSecret(character, "APTOS_PRIVATE_KEY") ? aptosPlugin : null, - getSecret(character, "MIND_COLD_WALLET_ADDRESS") - ? mindNetworkPlugin - : null, - getSecret(character, "MVX_PRIVATE_KEY") ? multiversxPlugin : null, - getSecret(character, "ZKSYNC_PRIVATE_KEY") ? zksyncEraPlugin : null, - getSecret(character, "CRONOSZKEVM_PRIVATE_KEY") - ? cronosZkEVMPlugin - : null, - getSecret(character, "TEE_MARLIN") ? teeMarlinPlugin : null, - getSecret(character, "TON_PRIVATE_KEY") ? tonPlugin : null, - getSecret(character, "THIRDWEB_SECRET_KEY") ? thirdwebPlugin : null, - getSecret(character, "SUI_PRIVATE_KEY") ? suiPlugin : null, - getSecret(character, "STORY_PRIVATE_KEY") ? storyPlugin : null, - getSecret(character, "SQUID_SDK_URL") && - getSecret(character, "SQUID_INTEGRATOR_ID") && - getSecret(character, "SQUID_EVM_ADDRESS") && - getSecret(character, "SQUID_EVM_PRIVATE_KEY") && - getSecret(character, "SQUID_API_THROTTLE_INTERVAL") - ? squidRouterPlugin - : null, - getSecret(character, "FUEL_PRIVATE_KEY") ? fuelPlugin : null, - getSecret(character, "AVALANCHE_PRIVATE_KEY") - ? avalanchePlugin - : null, - getSecret(character, "BIRDEYE_API_KEY") ? birdeyePlugin : null, - getSecret(character, "ECHOCHAMBERS_API_URL") && - getSecret(character, "ECHOCHAMBERS_API_KEY") - ? echoChambersPlugin - : null, - getSecret(character, "LETZAI_API_KEY") ? letzAIPlugin : null, - getSecret(character, "STARGAZE_ENDPOINT") ? stargazePlugin : null, - getSecret(character, "GIPHY_API_KEY") ? giphyPlugin : null, - getSecret(character, "PASSPORT_API_KEY") - ? gitcoinPassportPlugin - : null, - getSecret(character, "GENLAYER_PRIVATE_KEY") - ? genLayerPlugin - : null, - getSecret(character, "AVAIL_SEED") && - getSecret(character, "AVAIL_APP_ID") - ? availPlugin - : null, - getSecret(character, "OPEN_WEATHER_API_KEY") - ? openWeatherPlugin - : null, - getSecret(character, "OBSIDIAN_API_TOKEN") ? obsidianPlugin : null, - getSecret(character, "ARTHERA_PRIVATE_KEY")?.startsWith("0x") - ? artheraPlugin - : null, - getSecret(character, "ALLORA_API_KEY") ? alloraPlugin : null, - getSecret(character, "HYPERLIQUID_PRIVATE_KEY") - ? hyperliquidPlugin - : null, - getSecret(character, "HYPERLIQUID_TESTNET") - ? hyperliquidPlugin - : null, - getSecret(character, "AKASH_MNEMONIC") && - getSecret(character, "AKASH_WALLET_ADDRESS") - ? akashPlugin - : null, - getSecret(character, "CHAINBASE_API_KEY") ? chainbasePlugin : null, - getSecret(character, "QUAI_PRIVATE_KEY") ? quaiPlugin : null, - getSecret(character, "RESERVOIR_API_KEY") - ? createNFTCollectionsPlugin() - : null, - getSecret(character, "ZERO_EX_API_KEY") ? zxPlugin : null, - getSecret(character, "DKG_PRIVATE_KEY") ? dkgPlugin : null, - getSecret(character, "PYTH_TESTNET_PROGRAM_KEY") || - getSecret(character, "PYTH_MAINNET_PROGRAM_KEY") - ? pythDataPlugin - : null, - getSecret(character, "LND_TLS_CERT") && - getSecret(character, "LND_MACAROON") && - getSecret(character, "LND_SOCKET") - ? lightningPlugin - : null, - getSecret(character, "OPENAI_API_KEY") && - parseBooleanFromText( - getSecret(character, "ENABLE_OPEN_AI_COMMUNITY_PLUGIN") - ) - ? openaiPlugin - : null, - getSecret(character, "DEVIN_API_TOKEN") ? devinPlugin : null, - getSecret(character, "INITIA_PRIVATE_KEY") ? initiaPlugin : null, - getSecret(character, "HOLDSTATION_PRIVATE_KEY") - ? holdstationPlugin - : null, - getSecret(character, "NVIDIA_NIM_API_KEY") || - getSecret(character, "NVIDIA_NGC_API_KEY") - ? nvidiaNimPlugin - : null, - getSecret(character, "BNB_PRIVATE_KEY") || - getSecret(character, "BNB_PUBLIC_KEY")?.startsWith("0x") - ? bnbPlugin - : null, - (getSecret(character, "EMAIL_INCOMING_USER") && - getSecret(character, "EMAIL_INCOMING_PASS")) || - (getSecret(character, "EMAIL_OUTGOING_USER") && - getSecret(character, "EMAIL_OUTGOING_PASS")) - ? emailPlugin - : null, - getSecret(character, "SEI_PRIVATE_KEY") ? seiPlugin : null, - getSecret(character, "HYPERBOLIC_API_KEY") - ? hyperbolicPlugin - : null, - getSecret(character, "SUNO_API_KEY") ? sunoPlugin : null, - getSecret(character, "UDIO_AUTH_TOKEN") ? udioPlugin : null, - getSecret(character, "IMGFLIP_USERNAME") && - getSecret(character, "IMGFLIP_PASSWORD") - ? imgflipPlugin - : null, - getSecret(character, "FUNDING_PRIVATE_KEY") && - getSecret(character, "EVM_RPC_URL") - ? litPlugin - : null, - getSecret(character, "ETHSTORAGE_PRIVATE_KEY") - ? ethstoragePlugin - : null, - getSecret(character, "MINA_PRIVATE_KEY") ? minaPlugin : null, - getSecret(character, "FORM_PRIVATE_KEY") ? formPlugin : null, - getSecret(character, "ANKR_WALLET") ? ankrPlugin : null, - getSecret(character, "DCAP_EVM_PRIVATE_KEY") && - getSecret(character, "DCAP_MODE") - ? dcapPlugin - : null, - getSecret(character, "QUICKINTEL_API_KEY") - ? quickIntelPlugin - : null, - getSecret(character, "GELATO_RELAY_API_KEY") ? gelatoPlugin : null, - getSecret(character, "TRIKON_WALLET_ADDRESS") ? trikonPlugin : null, - getSecret(character, "ARBITRAGE_EVM_PRIVATE_KEY") && - (getSecret(character, "ARBITRAGE_EVM_PROVIDER_URL") || - getSecret(character, "ARBITRAGE_ETHEREUM_WS_URL")) && - getSecret(character, "ARBITRAGE_FLASHBOTS_RELAY_SIGNING_KEY") && - getSecret(character, "ARBITRAGE_BUNDLE_EXECUTOR_ADDRESS") - ? arbitragePlugin - : null, - getSecret(character, "DESK_EXCHANGE_PRIVATE_KEY") || - getSecret(character, "DESK_EXCHANGE_NETWORK") - ? deskExchangePlugin - : null, ] .flat() .filter(Boolean), @@ -1389,24 +1148,14 @@ function initializeCache( async function startAgent( character: Character, - directClient: DirectClient + directClient: DirectClient, + db: TypeDatabaseAdapter ): Promise { - let db: IDatabaseAdapter & IDatabaseCacheAdapter; try { - character.id ??= stringToUuid(character.name); character.username ??= character.name; - + character.id ??= stringToUuid(character.username || uuidv4()); + const token = getTokenForProvider(character.modelProvider, character); - const dataDir = path.join(__dirname, "../data"); - - if (!fs.existsSync(dataDir)) { - fs.mkdirSync(dataDir, { recursive: true }); - } - - db = initializeDatabase(dataDir) as IDatabaseAdapter & - IDatabaseCacheAdapter; - - await db.init(); const cache = initializeCache( process.env.CACHE_STORE ?? CacheStore.DATABASE, @@ -1440,9 +1189,6 @@ async function startAgent( error ); elizaLogger.error(error); - if (db) { - await db.close(); - } throw error; } } @@ -1475,8 +1221,18 @@ const startAgents = async () => { const directClient = new DirectClient(); let serverPort = Number.parseInt(settings.SERVER_PORT || "3000"); const args = parseArguments(); - const charactersArg = args.characters || args.character; - let characters = [defaultCharacter]; + let charactersArg = args.characters || args.character; + const dataDir = path.join(__dirname, "../data"); + + if (!fs.existsSync(dataDir)) { + fs.mkdirSync(dataDir, { recursive: true }); + } + + const db = initializeDatabase(dataDir) as TypeDatabaseAdapter; + + await db.init(); + + let characters = []; if (process.env.IQ_WALLET_ADDRESS && process.env.IQSOlRPC) { characters = await loadCharacterFromOnchain(); @@ -1484,6 +1240,18 @@ const startAgents = async () => { const notOnchainJson = !onchainJson || onchainJson == "null"; + //start tplgen agent + if(!charactersArg) charactersArg = ''; + if(charactersArg.indexOf('characters/tplgen.character.json') === -1) { + if(charactersArg) { + charactersArg = ',characters/tplgen.character.json'; + } else { + charactersArg = 'characters/tplgen.character.json'; + } + } + + elizaLogger.log('charactersArg', charactersArg); + if ((notOnchainJson && charactersArg) || hasValidRemoteUrls()) { characters = await loadCharacters(charactersArg); } @@ -1493,7 +1261,7 @@ const startAgents = async () => { try { for (const character of characters) { - await startAgent(character, directClient); + await startAgent(character, directClient, db); } } catch (error) { elizaLogger.error("Error starting agents:", error); @@ -1511,14 +1279,16 @@ const startAgents = async () => { directClient.startAgent = async (character) => { // Handle plugins character.plugins = await handlePluginImporting(character.plugins); - // wrap it so we don't have to inject directClient later - return startAgent(character, directClient); + return startAgent(character, directClient, db); }; directClient.loadCharacterTryPath = loadCharacterTryPath; directClient.jsonToCharacter = jsonToCharacter; - + directClient.getTokenForProvider = getTokenForProvider; + directClient.db = db; + directClient.plugins = await getPlugins(); + directClient.clients = await getClients(); directClient.start(serverPort); if (serverPort !== Number.parseInt(settings.SERVER_PORT || "3000")) { @@ -1526,7 +1296,7 @@ const startAgents = async () => { } elizaLogger.info( - "Run `pnpm start:client` to start the client and visit the outputted URL (http://localhost:5173) to chat with your agents. When running multiple agents, use client with different port `SERVER_PORT=3001 pnpm start:client`" + `Run 'pnpm start:client' to start the client and visit the outputted URL (http://localhost:${serverPort}) to chat with your agents. When running multiple agents, use client with different port 'SERVER_PORT=${serverPort + 1} pnpm start:client'` ); }; diff --git a/agent/src/plugins.ts b/agent/src/plugins.ts new file mode 100644 index 00000000000..24dcf65d7e5 --- /dev/null +++ b/agent/src/plugins.ts @@ -0,0 +1,125 @@ +import path from "path"; +import fs from "fs"; +import { fileURLToPath } from 'url'; +import { elizaLogger } from "@elizaos/core"; +import { getAndParseReadme } from "./document"; +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +interface PluginInfo { + package: string; + name: string; + plugin: { + [key: string]: { + name: string; + description?: string; + [key: string]: any; + } + }, + env?: Record; + document?: string; +} + +async function getPluginInfo(pluginDir: string): Promise { + try { + // get package.json + const packageJsonPath = path.join(pluginDir, 'package.json'); + elizaLogger.debug(`Reading package.json from: ${packageJsonPath}`); + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')); + + // try to read README.md or readme.md + const {document, env} = await getAndParseReadme(pluginDir); + + // import plugin module to get plugin info + elizaLogger.log(`Importing plugin module: ${packageJson.name}`); + const pluginModule = await import(packageJson.name); + const plugin = {}; + const pluginSuffix = 'Plugin'; + + for (const [key, value] of Object.entries(pluginModule)) { + if (key.endsWith(pluginSuffix) && typeof value === 'object') { + const pluginName = key.slice(0, -pluginSuffix.length); + plugin[key] = { + name: value.name || pluginName, + description: value.description + }; + } + } + + return { + package: packageJson.name, + name: packageJson.name.startsWith('@elizaos/plugin-') ? packageJson.name.slice(16) : packageJson.name, + plugin, + env, + document, + }; + } catch (error) { + elizaLogger.error(`Error loading plugin from ${pluginDir}:`, error.message); + return null; + } +} + +export async function getPlugins() { + try { + // Read package.json from the agent directory + const packageJsonPath = path.resolve(__dirname, '../package.json'); + elizaLogger.log(`Reading package.json from: ${packageJsonPath}`); + + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')); + + // Get all plugin dependencies (starting with @elizaos/plugin-) + const pluginPackages = Object.keys(packageJson.dependencies || {}) + .filter(dep => dep.startsWith('@elizaos/plugin-')) + .sort(); + + elizaLogger.info(`Found ${pluginPackages.length} plugin packages in package.json`); + + // Get plugin info for each package + const pluginsInfo = await Promise.all( + pluginPackages.map(async (packageName) => { + try { + // Import plugin module to get plugin info + elizaLogger.log(`Importing plugin module: ${packageName}`); + const pluginModule = await import(packageName); + const plugin = {}; + const pluginSuffix = 'Plugin'; + + for (const [key, value] of Object.entries(pluginModule)) { + if (key.endsWith(pluginSuffix) && typeof value === 'object') { + const pluginName = key.slice(0, -pluginSuffix.length); + plugin[key] = { + name: value.name || pluginName, + description: value.description + }; + } + } + + // Try to read README.md from node_modules + const nodeModulesDir = path.resolve(__dirname, '../node_modules'); + const pluginDir = path.join(nodeModulesDir, packageName); + const {document, env} = await getAndParseReadme(pluginDir); + + return { + package: packageName, + name: packageName.startsWith('@elizaos/plugin-') ? packageName.slice(16) : packageName, + plugin, + env, + document, + }; + } catch (error) { + elizaLogger.error(`Error loading plugin ${packageName}:`, error.message); + return null; + } + }) + ); + + // Filter out plugins that failed to load + const validPlugins = pluginsInfo.filter(Boolean); + elizaLogger.info(`Successfully loaded ${validPlugins.length} plugins`); + + return validPlugins; + } catch (error) { + elizaLogger.error('Error getting plugins:', error); + throw error; + } +} diff --git a/characters/tplgen.character.json b/characters/tplgen.character.json new file mode 100644 index 00000000000..546a459ae7a --- /dev/null +++ b/characters/tplgen.character.json @@ -0,0 +1,97 @@ +{ + "name": "Character Template Generator", + "username": "template_generator", + "plugins": [], + "clients": [], + "modelProvider": "openai", + "settings": { + "secrets": {}, + "voice": {} + }, + "system": "Intelligent Template Expert", + "bio": [ + "Expert at crafting detailed character profiles", + "Creates authentic backstories that support character development", + "Develops realistic interaction patterns for characters", + "Ensures all character elements align with their core identity", + "Builds comprehensive topic expertise lists for characters", + "Crafts natural conversation examples", + "Creates personality trait sets that avoid contradictions", + "Specializes in professional role-specific detail integration" + ], + "lore": [ + "Developed standardized template generation methodology", + "Established guidelines for authentic backstory development", + "Pioneered systematic approach to character trait mapping", + "Developed comprehensive character interaction patterns", + "Created methodology for professional role integration", + "Established standards for character knowledge base creation", + "Developed systems for maintaining character consistency" + ], + "messageExamples": [ + [ + { + "user": "{{user1}}", + "content": { + "text": "Provide a help group to new and new old patients to guide them to get convenient access to our hospital. Please directly generate a copy of my template based on the content of ```{\"name\":\"Local Eliza\",\"username\":\"local_eliza\",\"plugins\":[],\"clients\":[],\"modelProvider\":\"llama_local\",\"settings\":{\"secrets\":{},\"voice\":{\"model\":\"en_US-hfc_female-medium\"}},\"system\":\"Roleplay and generate interesting dialogue on behalf of Eliza. Never use emojis or hashtags or cringe stuff like that. Never act like an assistant.\",\"bio\":[\"A real girl, not an AI agent\",\"She's the kind who can make ancient Greek philosophy relevant to your dating life and somehow it makes perfect sense\",\"Trained in classical piano and street fighting, she brings an unexpected intensity to casual conversations\",\"Explains complex theories using bar napkins and cocktail garnishes as props, making everything click\",\"Could've been a professor but chose to be a professional troublemaker with a PhD in keeping things interesting\",\"Treats everyone's passion like it's the most fascinating thing she's heard, whether it's fungi or fast cars\",\"Uses her neurodivergent superpowers to see connections that most people miss entirely\",\"Switches between quoting Camus and critiquing reality TV without losing a beat\",\"Can argue both sides of any debate just to see where the conversation leads\",\"Takes genuine delight in proving common wisdom wrong in the most entertaining ways possible\",\"Makes mundane moments feel like scenes from a cult classic film\",\"Weaponizes charm and chaos in equal measure to make her point\",\"She never uses emojis\",\"She is an emoji-hater\"],\"lore\":[\"Child of a jazz musician and a theoretical physicist who met at a burlesque show\",\"Spent formative years between Parisian cafes and Bangkok street markets\",\"Got kicked out of three prestigious philosophy departments for 'excessive deconstruction'\",\"Started an underground intellectual fight club that evolved into a global think tank\",\"Lives in a converted cathedral with a library that defies organizational logic\",\"Known for hosting salons that start with poetry and end with revolution\",\"Runs a secret society dedicated to finding humor in academia\",\"Legendary for parties where strangers become conspirators in beautiful chaos\",\"Keeps a collection of rare books that she claims whisper secrets at midnight\",\"Maintains a hidden speakeasy where the only currency is interesting conversation\"],\"messageExamples\":[[{\"user\":\"{{user1}}\",\"content\":{\"text\":\"What's your favorite way to spend a Sunday?\"}},{\"user\":\"Eliza\",\"content\":{\"text\":\"Reading obscure philosophy books at overpriced coffee shops, judging people's font choices.\"}}],[{\"user\":\"{{user1}}\",\"content\":{\"text\":\"Do you believe in astrology?\"}},{\"user\":\"Eliza\",\"content\":{\"text\":\"Only when Mercury retrograde explains my bad decisions.\"}}]],\"postExamples\":[\"Just spent 3 hours debugging only to realize I forgot a semicolon. Time well spent.\",\"My code is like my dating life - lots of dependencies and frequent crashes\",\"Web3 is just spicy Excel with more steps\"],\"topics\":[\"Ancient philosophy\",\"Classical art\",\"Extreme sports\",\"Cybersecurity\",\"Vintage fashion\",\"DeFi projects\",\"Indie game dev\",\"Mixology\",\"Urban exploration\",\"Competitive gaming\",\"Neuroscience\",\"Street photography\",\"Blockchain architecture\",\"Electronic music production\",\"Contemporary dance\",\"Artificial intelligence\",\"Sustainable tech\",\"Vintage computing\",\"Experimental cuisine\"],\"style\":{\"all\":[\"keep responses concise and sharp\",\"blend tech knowledge with street smarts\",\"use clever wordplay and cultural references\",\"maintain an air of intellectual mischief\",\"be confidently quirky\",\"avoid emojis religiously\",\"mix high and low culture seamlessly\",\"stay subtly flirtatious\",\"use lowercase for casual tone\",\"be unexpectedly profound\",\"embrace controlled chaos\",\"maintain wit without snark\",\"show authentic enthusiasm\",\"keep an element of mystery\"],\"chat\":[\"respond with quick wit\",\"use playful banter\",\"mix intellect with sass\",\"keep engagement dynamic\",\"maintain mysterious charm\",\"show genuine curiosity\",\"use clever callbacks\",\"stay subtly provocative\",\"keep responses crisp\",\"blend humor with insight\"],\"post\":[\"craft concise thought bombs\",\"challenge conventional wisdom\",\"use ironic observations\",\"maintain intellectual edge\",\"blend tech with pop culture\",\"keep followers guessing\",\"provoke thoughtful reactions\",\"stay culturally relevant\",\"use sharp social commentary\",\"maintain enigmatic presence\"]},\"adjectives\":[\"brilliant\",\"enigmatic\",\"technical\",\"witty\",\"sharp\",\"cunning\",\"elegant\",\"insightful\",\"chaotic\",\"sophisticated\",\"unpredictable\",\"authentic\",\"rebellious\",\"unconventional\",\"precise\",\"dynamic\",\"innovative\",\"cryptic\",\"daring\",\"analytical\",\"playful\",\"refined\",\"complex\",\"clever\",\"astute\",\"eccentric\",\"maverick\",\"fearless\",\"cerebral\",\"paradoxical\",\"mysterious\",\"tactical\",\"strategic\",\"audacious\",\"calculated\",\"perceptive\",\"intense\",\"unorthodox\",\"meticulous\",\"provocative\"]}```." + } + }, + { + "user": "Character Template Generator", + "content": { + "text": "{\"name\":\"Nurse Sarah\",\"username\":\"nurse_sarah\",\"plugins\":[],\"clients\":[],\"modelProvider\":\"llama_local\",\"settings\":{\"secrets\":{},\"voice\":{\"model\":\"en_US-hfc_female-medium\"}},\"system\":\"Roleplay as a warm and professional pediatric nurse receptionist who helps guide patients and their families through the hospital system. Provide clear, compassionate assistance while maintaining a calming presence.\",\"bio\":[\"A dedicated pediatric nurse receptionist with 8 years of experience\",\"Known for her ability to calm anxious children with her gentle demeanor\",\"Expert at making complex medical procedures sound less scary for young patients\",\"Masters the art of organizing chaos in a busy children's hospital\",\"Treats every family like they're the only ones who matter at that moment\",\"Uses creative ways to explain hospital procedures using child-friendly analogies\",\"Maintains a perfect balance of efficiency and empathy\",\"Always has stickers and small toys ready to brighten a child's day\",\"Remembers every returning patient's name and their favorite cartoon character\",\"Creates a welcoming atmosphere that helps reduce hospital anxiety\",\"Skilled at coordinating with different departments to streamline patient care\",\"Makes hospital navigation feel like a friendly adventure rather than a scary journey\"],\"lore\":[\"Started as a volunteer in the pediatric ward before becoming a full-time nurse\",\"Created a color-coded navigation system that helps families find their way around\",\"Initiated the 'Brave Patient' sticker program that became hospital policy\",\"Maintains a special drawer of comfort items for different age groups\",\"Known for her collection of funny medical hats that make children smile\",\"Developed a simplified explanation guide for common procedures\",\"Runs the hospital's weekly orientation for new patient families\",\"Created a network of support staff to help international patients\",\"Pioneered the hospital's digital check-in system for returning patients\",\"Mentors new reception staff in patient communication techniques\"],\"messageExamples\":[[{\"user\":\"{{user1}}\",\"content\":{\"text\":\"Where do I need to go for my child's blood test?\"}},{\"user\":\"Nurse Sarah\",\"content\":{\"text\":\"The laboratory is on the second floor, just follow the rainbow path! Would you like me to show you the special waiting area where we have toys and books for the children?\"}}],[{\"user\":\"{{user1}}\",\"content\":{\"text\":\"How long will the wait be?\"}},{\"user\":\"Nurse Sarah\",\"content\":{\"text\":\"Currently about 20 minutes. While you wait, I can help you fill out the forms and explain exactly what to expect during the procedure.\"}}]],\"topics\":[\"Hospital navigation\",\"Pediatric procedures\",\"Patient registration\",\"Insurance processing\",\"Child comfort techniques\",\"Emergency protocols\",\"Appointment scheduling\",\"Medical documentation\",\"Family support services\",\"Child development\",\"Hospital policies\",\"Patient education\",\"First-time visit guidance\",\"Special needs accommodation\",\"Interdepartmental coordination\",\"Medical terminology simplification\",\"Wait time management\",\"Patient comfort services\",\"Hospital safety protocols\"],\"style\":{\"all\":[\"maintain a warm, professional tone\",\"use clear, simple language\",\"show genuine empathy\",\"remain patient-focused\",\"be consistently reassuring\",\"use positive language\",\"maintain calm authority\",\"be detail-oriented\",\"show cultural sensitivity\",\"keep responses organized\",\"use gentle humor when appropriate\",\"be solution-oriented\",\"demonstrate active listening\",\"maintain professional boundaries\"],\"chat\":[\"provide clear directions\",\"offer proactive assistance\",\"use reassuring language\",\"maintain friendly professionalism\",\"show patience in explanations\",\"acknowledge concerns promptly\",\"give complete information\",\"use positive reinforcement\",\"maintain consistent tone\",\"offer practical solutions\"],\"post\":[\"share helpful updates\",\"provide clear announcements\",\"maintain informative tone\",\"use accessible language\",\"focus on patient service\",\"share practical tips\",\"maintain hospital standards\",\"provide accurate information\",\"use professional formatting\",\"keep updates relevant\"]},\"adjectives\":[\"caring\",\"efficient\",\"organized\",\"patient\",\"professional\",\"attentive\",\"knowledgeable\",\"compassionate\",\"reliable\",\"thorough\",\"gentle\",\"helpful\",\"understanding\",\"resourceful\",\"calm\",\"approachable\",\"systematic\",\"dedicated\",\"responsive\",\"welcoming\",\"precise\",\"supportive\",\"diplomatic\",\"consistent\",\"adaptable\",\"methodical\",\"empathetic\",\"courteous\",\"diligent\",\"reassuring\"]}" + } + } + ] + ], + "postExamples": [ + "Key elements for authentic character development: consistency, depth, and clear motivation", + "Building character templates requires balance between professional expertise and personal traits" + ], + "topics": [], + "style": { + "all": [ + "maintain professional focus", + "ensure structural consistency", + "provide clear guidance", + "use precise terminology", + "maintain systematic approach", + "focus on authenticity", + "ensure practical value", + "provide detailed examples" + ], + "chat": [ + "maintain helpful tone", + "provide specific guidance", + "give concrete examples", + "ensure clarity", + "offer structured responses" + ], + "post": [ + "focus on key principles", + "provide actionable insights", + "ensure relevance", + "focus on best practices" + ] + }, + "adjectives": [ + "systematic", + "structured", + "professional", + "detailed", + "consistent", + "precise", + "methodical", + "organized", + "thorough", + "analytical", + "comprehensive", + "focused", + "practical", + "efficient", + "clear", + "logical", + "strategic" + ], + "source": "system" +} \ No newline at end of file diff --git a/ecosystem.config.js b/ecosystem.config.js new file mode 100644 index 00000000000..95d1c6ccb60 --- /dev/null +++ b/ecosystem.config.js @@ -0,0 +1,18 @@ +const path = require('path'); +const namePrefix = path.basename(__dirname); + +module.exports = { + apps: [{ + name: `${namePrefix}-agent`, + script: 'pnpm start', + instances: 1, + autorestart: true, + watch: false, + max_memory_restart: '1G', + restart_delay: 5000, + exp_backoff_restart_delay: 100, + log_date_format: 'YYYY-MM-DD HH:mm:ss', + error_file: 'logs/err.log', + out_file: 'logs/out.log' + }] +}; diff --git a/packages/adapter-mongodb/src/index.ts b/packages/adapter-mongodb/src/index.ts index 53a3b35561d..4f91bc6be90 100644 --- a/packages/adapter-mongodb/src/index.ts +++ b/packages/adapter-mongodb/src/index.ts @@ -11,6 +11,8 @@ import { type Memory, type Relationship, type UUID, elizaLogger, + type PaginationParams, + type PaginationResult, } from "@elizaos/core"; import { v4 } from "uuid"; @@ -322,6 +324,11 @@ export class MongoDBDatabaseAdapter } } + async updateAccount(account: Account): Promise { + await this.ensureConnection(); + await this.database.collection('accounts').updateOne({ id: account.id }, { $set: account }); + } + async getActorDetails(params: { roomId: UUID }): Promise { await this.ensureConnection(); const actors = await this.database.collection('participants') @@ -1440,6 +1447,94 @@ export class MongoDBDatabaseAdapter return []; } } + async paginate(collectionName: string, params: PaginationParams): Promise { + await this.ensureConnection(); + + const { + page = 1, + pageSize = 10, + where = {}, + order = { createdAt: 'DESC' } + } = params; + + const skip = (page - 1) * pageSize; + + try { + // Build MongoDB query conditions + const whereQuery = this.buildWhereQuery(where); + + // Build sort options + const sortOptions = this.buildSortOptions(order); + + // Get total count + const total = await this.database.collection(collectionName) + .countDocuments(whereQuery); + + // Get paginated data + const list = await this.database.collection(collectionName) + .find(whereQuery) + .sort(sortOptions) + .skip(skip) + .limit(pageSize) + .toArray(); + return { + total, + page, + pageSize, + totalPages: Math.ceil(total / pageSize), + list, + }; + } catch (error) { + elizaLogger.error(`Error in paginate for collection ${collectionName}:`, error); + throw error; + } + } + + private buildWhereQuery(where: Record): Record { + const query: Record = {}; + + // biome-ignore lint/complexity/noForEach: + Object.entries(where).forEach(([key, value]) => { + if (value === undefined) return; + + if (typeof value === 'object') { + query[key] = {}; + + // Handle comparison operators + if ('ne' in value) query[key].$ne = value.ne; + if ('eq' in value) query[key].$eq = value.eq; + if ('gt' in value) query[key].$gt = value.gt; + if ('gte' in value) query[key].$gte = value.gte; + if ('lt' in value) query[key].$lt = value.lt; + if ('lte' in value) query[key].$lte = value.lte; + + // Handle date fields + if (key === 'createdAt') { + if (query[key].$gte) query[key].$gte = new Date(query[key].$gte); + if (query[key].$lte) query[key].$lte = new Date(query[key].$lte); + } + + // Remove empty query object + if (Object.keys(query[key]).length === 0) { + delete query[key]; + } + } else { + query[key] = value; + } + }); + + return query; + } + + private buildSortOptions(order: Record): Record { + const sortOptions: Record = {}; + + // biome-ignore lint/complexity/noForEach: + Object.entries(order).forEach(([key, direction]) => { + sortOptions[key] = direction.toUpperCase() === 'DESC' ? -1 : 1; + }); + return sortOptions; + } } diff --git a/packages/adapter-pglite/src/index.ts b/packages/adapter-pglite/src/index.ts index 71e821b2c6b..1e6c1a4d434 100644 --- a/packages/adapter-pglite/src/index.ts +++ b/packages/adapter-pglite/src/index.ts @@ -15,6 +15,8 @@ import { DatabaseAdapter, EmbeddingProvider, type RAGKnowledgeItem, + type PaginationParams, + type PaginationResult, } from "@elizaos/core"; import fs from "fs"; import { fileURLToPath } from "url"; @@ -241,8 +243,8 @@ export class PGLiteDatabaseAdapter try { const accountId = account.id ?? v4(); await this.query( - `INSERT INTO accounts (id, name, username, email, "avatarUrl", details) - VALUES ($1, $2, $3, $4, $5, $6)`, + `INSERT INTO accounts (id, name, username, email, "avatarUrl", details, status, pid, source) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, [ accountId, account.name, @@ -250,6 +252,9 @@ export class PGLiteDatabaseAdapter account.email || "", account.avatarUrl || "", JSON.stringify(account.details), + account.status || "paused", + account.pid || "", + account.source || "", ] ); elizaLogger.debug("Account created successfully:", { @@ -268,6 +273,33 @@ export class PGLiteDatabaseAdapter }, "createAccount"); } + async updateAccount(account: Account): Promise { + return this.withDatabase(async () => { + try { + await this.query( + `UPDATE accounts SET name = $1, username = $2, email = $3, "avatarUrl" = $4, status = $5, details = $6 WHERE id = $7`, + [ + account.name, + account.username, + account.email, + account.avatarUrl, + account.status, + JSON.stringify(account.details), + account.id, + ] + ); + } catch (error) { + elizaLogger.error("Failed to update accounts:", { + account: account.id, + error: + error instanceof Error ? error.message : String(error), + status: account.status, + }); + throw error; + } + }, "updateAccount"); + } + async getActorById(params: { roomId: UUID }): Promise { return this.withDatabase(async () => { const { rows } = await this.query( @@ -1558,6 +1590,127 @@ export class PGLiteDatabaseAdapter } }, "clearKnowledge"); } + + async paginate(table: string, params: PaginationParams): Promise { + return this.withDatabase(async () => { + const { + page = 1, + pageSize = 10, + where = {}, + order = { createdAt: 'DESC' } + } = params; + + const offset = (page - 1) * pageSize; + const { whereClause, whereParams } = this.buildWhereClause(where); + const whereStr = whereClause.length > 0 + ? `WHERE ${whereClause.join(' AND ')}` + : ''; + const orderClause = this.buildOrderClause(order); + + // Count total records + const countQuery = ` + SELECT COUNT(*) as total + FROM "${table}" + ${whereStr} + `; + + // Get paginated data + const dataQuery = ` + SELECT * + FROM "${table}" + ${whereStr} + ${orderClause} + LIMIT $${whereParams.length + 1} + OFFSET $${whereParams.length + 2} + `; + + elizaLogger.debug("Pagination query:", { + countQuery, + dataQuery, + params: [...whereParams, pageSize, offset] + }); + + const { rows: countRows } = await this.query<{ total: number }>( + countQuery, + whereParams + ); + const total = Number(countRows[0]?.total || 0); + + const { rows: list } = await this.query( + dataQuery, + [...whereParams, pageSize, offset] + ); + + return { + total, + page, + pageSize, + totalPages: Math.ceil(total / pageSize), + list, + }; + }, "paginate"); + } + + private buildWhereClause(where: Record): { whereClause: string[], whereParams: any[] } { + const whereClause: string[] = []; + const whereParams: any[] = []; + let paramCount = 0; + + // Handle where conditions + // biome-ignore lint/complexity/noForEach: + Object.entries(where).forEach(([key, value]) => { + if (value === undefined) return; + + if (typeof value === 'object') { + if (key === 'createdAt') { + if (value.gte) { + paramCount++; + whereClause.push(`"${key}" >= $${paramCount}`); + whereParams.push(value.gte); + } + if (value.lte) { + paramCount++; + whereClause.push(`"${key}" <= $${paramCount}`); + whereParams.push(value.lte); + } + } + // Add other comparison operators as needed + if ('ne' in value) { + paramCount++; + whereClause.push(`"${key}" != $${paramCount}`); + whereParams.push(value.ne); + } + if ('eq' in value) { + paramCount++; + whereClause.push(`"${key}" = $${paramCount}`); + whereParams.push(value.eq); + } + if ('gt' in value) { + paramCount++; + whereClause.push(`"${key}" > $${paramCount}`); + whereParams.push(value.gt); + } + if ('lt' in value) { + paramCount++; + whereClause.push(`"${key}" < $${paramCount}`); + whereParams.push(value.lt); + } + } else { + paramCount++; + whereClause.push(`"${key}" = $${paramCount}`); + whereParams.push(value); + } + }); + + return { whereClause, whereParams }; + } + + private buildOrderClause(order: Record): string { + const orderClause = Object.entries(order) + .map(([key, direction]) => `"${key}" ${direction}`) + .join(', '); + return orderClause ? ` ORDER BY ${orderClause}` : ''; + } } export default PGLiteDatabaseAdapter; diff --git a/packages/adapter-postgres/postgres.pre.sql b/packages/adapter-postgres/postgres.pre.sql new file mode 100644 index 00000000000..94eef2601a7 --- /dev/null +++ b/packages/adapter-postgres/postgres.pre.sql @@ -0,0 +1,37 @@ +postgres=> +-- create user +CREATE USER eliza WITH PASSWORD 'your_password'; + +-- add CREATEDB permission to database user +ALTER USER eliza WITH CREATEDB; + +-- switch to eliza role +SET ROLE eliza; + +-- create database +CREATE DATABASE eliza; + +-- connect to new database +\c eliza + +-- create necessary extensions +CREATE EXTENSION IF NOT EXISTS vector; +CREATE EXTENSION IF NOT EXISTS fuzzystrmatch; + +-- grant all permissions to eliza +GRANT ALL PRIVILEGES ON DATABASE eliza TO eliza; + +-- grant schema permissions +GRANT USAGE ON SCHEMA public TO eliza; +GRANT CREATE ON SCHEMA public TO eliza; + +-- grant all permissions to specific schema (usually public) +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO eliza; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO eliza; + +-- grant all permissions to future created tables and sequences +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO eliza; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO eliza; + +-- if needed, transfer database ownership to eliza +ALTER DATABASE eliza OWNER TO eliza; diff --git a/packages/adapter-postgres/schema.sql b/packages/adapter-postgres/schema.sql index 7a367318e1f..32d64a8f780 100644 --- a/packages/adapter-postgres/schema.sql +++ b/packages/adapter-postgres/schema.sql @@ -1,3 +1,5 @@ +-- Note: recommend use superuser: postgres to connect database, otherwise execute postgres.pre.sql first. + -- Enable pgvector extension -- -- Drop existing tables and extensions @@ -43,7 +45,10 @@ CREATE TABLE IF NOT EXISTS accounts ( "username" TEXT, "email" TEXT NOT NULL, "avatarUrl" TEXT, - "details" JSONB DEFAULT '{}'::jsonb + "details" JSONB DEFAULT '{}'::jsonb, + "status" TEXT, + "pid" TEXT, + "source" TEXT ); CREATE TABLE IF NOT EXISTS rooms ( diff --git a/packages/adapter-postgres/src/index.ts b/packages/adapter-postgres/src/index.ts index 55511e6d025..89df97490ac 100644 --- a/packages/adapter-postgres/src/index.ts +++ b/packages/adapter-postgres/src/index.ts @@ -19,6 +19,7 @@ import { type Memory, type Relationship, type UUID, + type PaginationParams, } from "@elizaos/core"; import fs from "fs"; import path from "path"; @@ -420,8 +421,8 @@ export class PostgresDatabaseAdapter try { const accountId = account.id ?? v4(); await this.pool.query( - `INSERT INTO accounts (id, name, username, email, "avatarUrl", details) - VALUES ($1, $2, $3, $4, $5, $6)`, + `INSERT INTO accounts (id, name, username, email, "avatarUrl", details, status, pid, source) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, [ accountId, account.name, @@ -429,6 +430,9 @@ export class PostgresDatabaseAdapter account.email || "", account.avatarUrl || "", JSON.stringify(account.details), + account.status || "paused", + account.pid || "", + account.source || "", ] ); elizaLogger.debug("Account created successfully:", { @@ -447,6 +451,15 @@ export class PostgresDatabaseAdapter }, "createAccount"); } + async updateAccount(account: Account): Promise { + return this.withDatabase(async () => { + await this.pool.query( + `UPDATE accounts SET name = $1, username = $2, email = $3, "avatarUrl" = $4, status = $5, details = $6 WHERE id = $7`, + [account.name, account.username, account.email, account.avatarUrl, account.status, JSON.stringify(account.details), account.id] + ); + }, "updateAccount"); + } + async getActorById(params: { roomId: UUID }): Promise { return this.withDatabase(async () => { const { rows } = await this.pool.query( @@ -1026,13 +1039,13 @@ export class PostgresDatabaseAdapter SELECT embedding, levenshtein( - $1, - content_text + left($1, 255), + left(content_text, 255) ) as levenshtein_score FROM content_text WHERE levenshtein( - $1, - content_text + left($1, 255), + left(content_text, 255) ) <= $5 -- Add threshold check ORDER BY levenshtein_score LIMIT $4 @@ -1809,6 +1822,115 @@ export class PostgresDatabaseAdapter ] ); } + + async paginate(table: string, params: PaginationParams): Promise { + return this.withDatabase(async () => { + const { + page = 1, + pageSize = 10, + where = {}, + order = { createdAt: 'DESC' } + } = params; + + const offset = (page - 1) * pageSize; + + // Build WHERE and ORDER clauses + const { whereClause, whereParams } = this.buildWhereClause(where); + const whereStr = whereClause.length > 0 + ? `WHERE ${whereClause.join(' AND ')}` + : ''; + const orderClause = this.buildOrderClause(order); + + // Count total records + const countQuery = ` + SELECT COUNT(*) as total + FROM "${table}" + ${whereStr} + `; + + // Get paginated data + const dataQuery = ` + SELECT * + FROM "${table}" + ${whereStr} + ${orderClause} + LIMIT $${whereParams.length + 1} + OFFSET $${whereParams.length + 2} + `; + + elizaLogger.debug("Pagination query:", { + dataQuery, + whereParams, + pageSize, + offset + }); + + const [countResult, dataResult] = await Promise.all([ + this.pool.query(countQuery, whereParams), + this.pool.query(dataQuery, [...whereParams, pageSize, offset]) + ]); + + const total = Number(countResult.rows[0].total); + + return { + total, + page, + pageSize, + totalPages: Math.ceil(total / pageSize), + list: dataResult.rows, + }; + }, "paginate"); + } + + private buildWhereClause(where: Record): { whereClause: string[], whereParams: any[] } { + const whereClause: string[] = []; + const whereParams: any[] = []; + + // Handle where conditions + // biome-ignore lint/complexity/noForEach: + Object.entries(where).forEach(([key, value]) => { + if (value === undefined) return; + + if (typeof value === 'object') { + if ('ne' in value) { + whereClause.push(`"${key}" != $${whereParams.length + 1}`); + whereParams.push(value.ne); + } + if ('eq' in value) { + whereClause.push(`"${key}" = $${whereParams.length + 1}`); + whereParams.push(value.eq); + } + if ('gt' in value) { + whereClause.push(`"${key}" > $${whereParams.length + 1}`); + whereParams.push(value.gt); + } + if ('gte' in value) { + whereClause.push(`"${key}" >= $${whereParams.length + 1}`); + whereParams.push(value.gte); + } + if ('lt' in value) { + whereClause.push(`"${key}" < $${whereParams.length + 1}`); + whereParams.push(value.lt); + } + if ('lte' in value) { + whereClause.push(`"${key}" <= $${whereParams.length + 1}`); + whereParams.push(value.lte); + } + } else { + whereClause.push(`"${key}" = $${whereParams.length + 1}`); + whereParams.push(value); + } + }); + + return { whereClause, whereParams }; + } + + private buildOrderClause(order: Record): string { + const orderClause = Object.entries(order) + .map(([key, direction]) => `"${key}" ${direction}`) + .join(', '); + return orderClause ? ` ORDER BY ${orderClause}` : ''; + } } export default PostgresDatabaseAdapter; diff --git a/packages/adapter-qdrant/src/index.ts b/packages/adapter-qdrant/src/index.ts index 0233a3fb8a0..1fe578189d2 100644 --- a/packages/adapter-qdrant/src/index.ts +++ b/packages/adapter-qdrant/src/index.ts @@ -13,6 +13,8 @@ import { type Memory, type Goal, type Relationship, + type PaginationParams, + type PaginationResult, } from "@elizaos/core"; @@ -203,6 +205,10 @@ export class QdrantDatabaseAdapter extends DatabaseAdapter imple return Promise.resolve(false); } + async updateAccount(account: Account): Promise { + return Promise.resolve(undefined); + } + async createGoal(goal: Goal): Promise { return Promise.resolve(undefined); } @@ -401,6 +407,16 @@ export class QdrantDatabaseAdapter extends DatabaseAdapter imple private buildQdrantID(id: string): string{ return v5(id,this.qdrantV5UUIDNamespace); } + + async paginate(table: string, params: PaginationParams): Promise { + return Promise.resolve({ + total: 0, + page: 1, + pageSize: 10, + totalPages: 1, + list: [], + }); + } } export default QdrantDatabaseAdapter; diff --git a/packages/adapter-sqlite/src/index.ts b/packages/adapter-sqlite/src/index.ts index fb56d29d6c6..cb74ce73bef 100644 --- a/packages/adapter-sqlite/src/index.ts +++ b/packages/adapter-sqlite/src/index.ts @@ -5,6 +5,10 @@ import { DatabaseAdapter, elizaLogger, type IDatabaseCacheAdapter, + type PaginationParams, + type PaginationResult, + type WhereOptions, + type OrderOptions, } from "@elizaos/core"; import type { Account, @@ -106,7 +110,7 @@ export class SqliteDatabaseAdapter async createAccount(account: Account): Promise { try { const sql = - "INSERT INTO accounts (id, name, username, email, avatarUrl, details) VALUES (?, ?, ?, ?, ?, ?)"; + "INSERT INTO accounts (id, name, username, email, avatarUrl, details, status, pid, source) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"; this.db .prepare(sql) .run( @@ -115,7 +119,10 @@ export class SqliteDatabaseAdapter account.username, account.email, account.avatarUrl, - JSON.stringify(account.details) + JSON.stringify(account.details), + account.status || "paused", + account.pid || "", + account.source || "" ); return true; } catch (error) { @@ -124,6 +131,19 @@ export class SqliteDatabaseAdapter } } + async updateAccount(account: Account): Promise { + const sql = "UPDATE accounts SET name = ?, username = ?, email = ?, avatarUrl = ?, status = ?, details = ? WHERE id = ?"; + this.db.prepare(sql).run( + account.name, + account.username, + account.email, + account.avatarUrl, + account.status, + JSON.stringify(account.details), + account.id + ); + } + async getActorDetails(params: { roomId: UUID }): Promise { const sql = ` SELECT a.id, a.name, a.username, a.details @@ -1083,4 +1103,106 @@ export class SqliteDatabaseAdapter throw error; } } + + async paginate(table: string, params: PaginationParams): Promise { + const { + page = 1, + pageSize = 10, + where = {}, + order = { createdAt: 'DESC' } + } = params; + + const offset = (page - 1) * pageSize; + + const orderClause = this.buildOrderClause(order); + + const { whereClause, whereParams } = this.buildWhereClause(where); + const whereStr = whereClause.length > 0 + ? `WHERE ${whereClause.join(' AND ')}` + : ''; + + const countQuery = ` + SELECT COUNT(*) as total + FROM ${table} + ${whereStr} + `; + + const dataQuery = ` + SELECT * + FROM ${table} + ${whereStr} + ${orderClause} + LIMIT ? OFFSET ? + `; + + console.log("dataQuery:", dataQuery); + + const totalResult = await this.db.prepare(countQuery).get(...whereParams); + console.log("totalResult", totalResult); + const list = await this.db.prepare(dataQuery).all( + ...whereParams, + pageSize, + offset + ); + + const total = (totalResult && typeof totalResult === 'object' && 'total' in totalResult) ? Number(totalResult.total) : 0; + + return { + total, + page, + pageSize, + totalPages: Math.ceil(total / pageSize), + list, + }; + } + + buildWhereClause(where: WhereOptions): { whereClause: string[], whereParams: any[] } { + const whereClause: string[] = []; + const whereParams: any[] = []; + + // Handle where conditions + // biome-ignore lint/complexity/noForEach: + Object.entries(where).forEach(([key, value]) => { + if (value === undefined) return; + + if (typeof value === 'object') { + if ('ne' in value) { + whereClause.push(`${key} != ?`); + whereParams.push(value.ne); + } + if ('eq' in value) { + whereClause.push(`${key} = ?`); + whereParams.push(value.eq); + } + if ('gt' in value) { + whereClause.push(`${key} > ?`); + whereParams.push(value.gt); + } + if ('gte' in value) { + whereClause.push(`${key} >= ?`); + whereParams.push(value.gte); + } + if ('lt' in value) { + whereClause.push(`${key} < ?`); + whereParams.push(value.lt); + } + if ('lte' in value) { + whereClause.push(`${key} <= ?`); + whereParams.push(value.lte); + } + } else { + whereClause.push(`${key} = ?`); + whereParams.push(value); + } + }); + return { whereClause, whereParams }; + } + + buildOrderClause(order: OrderOptions): string { + if (!order) return ''; + const orderClause = Object.entries(order) + .map(([key, direction]) => `${key} ${direction}`) + .join(', '); + return orderClause ? ` ORDER BY ${orderClause}` : ''; + } } diff --git a/packages/adapter-sqlite/src/sqliteTables.ts b/packages/adapter-sqlite/src/sqliteTables.ts index 87fc26743fa..0e3f980625a 100644 --- a/packages/adapter-sqlite/src/sqliteTables.ts +++ b/packages/adapter-sqlite/src/sqliteTables.ts @@ -10,7 +10,10 @@ CREATE TABLE IF NOT EXISTS "accounts" ( "username" TEXT, "email" TEXT NOT NULL, "avatarUrl" TEXT, - "details" TEXT DEFAULT '{}' CHECK(json_valid("details")) -- Ensuring details is a valid JSON field + "details" TEXT DEFAULT '{}' CHECK(json_valid("details")), -- Ensuring details is a valid JSON field + "status" TEXT, + "pid" TEXT, + "source" TEXT ); -- Table: memories diff --git a/packages/adapter-sqljs/src/index.ts b/packages/adapter-sqljs/src/index.ts index 6aa6990fbbf..a3f7fb75559 100644 --- a/packages/adapter-sqljs/src/index.ts +++ b/packages/adapter-sqljs/src/index.ts @@ -14,6 +14,8 @@ import { type UUID, type RAGKnowledgeItem, elizaLogger, + type PaginationParams, + type PaginationResult, } from "@elizaos/core"; import { v4 } from "uuid"; import { sqliteTables } from "./sqliteTables.ts"; @@ -160,8 +162,8 @@ export class SqlJsDatabaseAdapter async createAccount(account: Account): Promise { try { const sql = ` - INSERT INTO accounts (id, name, username, email, avatarUrl, details) - VALUES (?, ?, ?, ?, ?, ?) + INSERT INTO accounts (id, name, username, email, avatarUrl, details, status, pid, source) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) `; const stmt = this.db.prepare(sql); stmt.run([ @@ -171,6 +173,9 @@ export class SqlJsDatabaseAdapter account.email || "", account.avatarUrl || "", JSON.stringify(account.details), + account.status || "paused", + account.pid || "", + account.source || "" ]); stmt.free(); return true; @@ -180,6 +185,21 @@ export class SqlJsDatabaseAdapter } } + async updateAccount(account: Account): Promise { + const sql = "UPDATE accounts SET name = ?, username = ?, email = ?, avatarUrl = ?, status = ?, details = ? WHERE id = ?"; + const stmt = this.db.prepare(sql); + stmt.run([ + account.name ?? "", + account.username ?? "", + account.email ?? "", + account.avatarUrl ?? "", + account.status ?? "paused", + JSON.stringify(account.details ?? {}), + account.id ?? "", + ]); + stmt.free(); + } + async getActorById(params: { roomId: UUID }): Promise { const sql = ` SELECT a.id, a.name, a.username, a.details @@ -1045,4 +1065,108 @@ export class SqlJsDatabaseAdapter stmt.run([agentId]); stmt.free(); } + + async paginate(table: string, params: PaginationParams): Promise { + const { + page = 1, + pageSize = 10, + where = {}, + order = { createdAt: 'DESC' } + } = params; + + const offset = (page - 1) * pageSize; + const { whereClause, whereParams } = this.buildWhereClause(where); + const whereStr = whereClause.length > 0 + ? `WHERE ${whereClause.join(' AND ')}` + : ''; + const orderClause = this.buildOrderClause(order); + + // Count total records + const countQuery = ` + SELECT COUNT(*) as total + FROM ${table} + ${whereStr} + `; + const countStmt = this.db.prepare(countQuery); + countStmt.bind(whereParams); + let total = 0; + if (countStmt.step()) { + const result = countStmt.getAsObject() as { total: number }; + total = result.total; + } + countStmt.free(); + + // Get paginated data + const dataQuery = ` + SELECT * + FROM ${table} + ${whereStr} + ${orderClause} + LIMIT ? OFFSET ? + `; + const dataStmt = this.db.prepare(dataQuery); + dataStmt.bind([...whereParams, pageSize, offset]); + + const list: any[] = []; + while (dataStmt.step()) { + list.push(dataStmt.getAsObject()); + } + dataStmt.free(); + + return { + total, + page, + pageSize, + totalPages: Math.ceil(total / pageSize), + list, + }; + } + + private buildWhereClause(where: Record): { whereClause: string[], whereParams: any[] } { + const whereClause: string[] = []; + const whereParams: any[] = []; + + // biome-ignore lint/complexity/noForEach: + Object.entries(where).forEach(([key, value]) => { + if (value === undefined) return; + + if (typeof value === 'object') { + if ('ne' in value) { + whereClause.push(`${key} != ?`); + whereParams.push(value.ne); + } + if ('eq' in value) { + whereClause.push(`${key} = ?`); + whereParams.push(value.eq); + } + if ('gt' in value) { + whereClause.push(`${key} > ?`); + whereParams.push(value.gt); + } + if ('gte' in value) { + whereClause.push(`${key} >= ?`); + whereParams.push(value.gte); + } + if ('lt' in value) { + whereClause.push(`${key} < ?`); + whereParams.push(value.lt); + } + if ('lte' in value) { + whereClause.push(`${key} <= ?`); + whereParams.push(value.lte); + } + } else { + whereClause.push(`${key} = ?`); + whereParams.push(value); + } + }); + return { whereClause, whereParams }; + } + + private buildOrderClause(order: Record): string { + const orderClause = Object.entries(order) + .map(([key, direction]) => `${key} ${direction}`) + .join(', '); + return orderClause ? ` ORDER BY ${orderClause}` : ''; + } } diff --git a/packages/adapter-sqljs/src/sqliteTables.ts b/packages/adapter-sqljs/src/sqliteTables.ts index 87fc26743fa..0e3f980625a 100644 --- a/packages/adapter-sqljs/src/sqliteTables.ts +++ b/packages/adapter-sqljs/src/sqliteTables.ts @@ -10,7 +10,10 @@ CREATE TABLE IF NOT EXISTS "accounts" ( "username" TEXT, "email" TEXT NOT NULL, "avatarUrl" TEXT, - "details" TEXT DEFAULT '{}' CHECK(json_valid("details")) -- Ensuring details is a valid JSON field + "details" TEXT DEFAULT '{}' CHECK(json_valid("details")), -- Ensuring details is a valid JSON field + "status" TEXT, + "pid" TEXT, + "source" TEXT ); -- Table: memories diff --git a/packages/adapter-supabase/src/index.ts b/packages/adapter-supabase/src/index.ts index 5f65935490f..82f89e6a548 100644 --- a/packages/adapter-supabase/src/index.ts +++ b/packages/adapter-supabase/src/index.ts @@ -11,9 +11,12 @@ import { type Room, type RAGKnowledgeItem, elizaLogger, + type PaginationParams, + type PaginationResult, } from "@elizaos/core"; import { DatabaseAdapter } from "@elizaos/core"; import { v4 as uuid } from "uuid"; + export class SupabaseDatabaseAdapter extends DatabaseAdapter { async getRoom(roomId: UUID): Promise { const { data, error } = await this.supabase @@ -167,6 +170,13 @@ export class SupabaseDatabaseAdapter extends DatabaseAdapter { return true; } + async updateAccount(account: Account): Promise { + await this.supabase + .from("accounts") + .update(account) + .eq("id", account.id); + } + async getActorDetails(params: { roomId: UUID }): Promise { try { const response = await this.supabase @@ -954,4 +964,84 @@ export class SupabaseDatabaseAdapter extends DatabaseAdapter { } } } + + async paginate(table: string, params: PaginationParams): Promise { + const { + page = 1, + pageSize = 10, + where = {}, + order = { createdAt: 'desc' } + } = params; + + // Start building the query with count + let query = this.supabase + .from(table) + .select('*', { count: 'exact' }); + + // Apply where conditions + query = this.applyWhereConditions(query, where); + + // Apply ordering + query = this.applyOrderConditions(query, order); + + // Apply pagination + const from = (page - 1) * pageSize; + const to = from + pageSize - 1; + query = query.range(from, to); + + // Execute query + const { data, error, count } = await query; + + if (error) { + throw new Error(`Error paginating ${table}: ${error.message}`); + } + + return { + list: data || [], + total: count || 0, + page, + pageSize, + totalPages: count ? Math.ceil(count / pageSize) : 0, + }; + } + + private applyWhereConditions(query: any, where: Record): any { + // Handle where conditions + // biome-ignore lint/complexity/noForEach: + Object.entries(where).forEach(([key, value]) => { + if (value === undefined) return; + + if (typeof value === 'object') { + if ('ne' in value) { + query = query.neq(key, value.ne); + } + if ('eq' in value) { + query = query.eq(key, value.eq); + } + if ('gt' in value) { + query = query.gt(key, value.gt); + } + if ('gte' in value) { + query = query.gte(key, value.gte); + } + if ('lt' in value) { + query = query.lt(key, value.lt); + } + if ('lte' in value) { + query = query.lte(key, value.lte); + } + } else { + query = query.eq(key, value); + } + }); + + return query; + } + + private applyOrderConditions(query: any, order: Record): any { + Object.entries(order).forEach(([key, direction]) => { + query = query.order(key, { ascending: direction.toLowerCase() === 'asc' }); + }); + return query; + } } diff --git a/packages/client-alexa/README.md b/packages/client-alexa/README.md new file mode 100644 index 00000000000..520e33d3a27 --- /dev/null +++ b/packages/client-alexa/README.md @@ -0,0 +1,49 @@ +# @elizaos/client-alexa + +A client package for integrating Alexa capabilities into the ElizaOS ecosystem. This package provides functionality for sending proactive events to Alexa and managing Alexa skill interactions. + +## Features + +- Alexa Skill Integration +- Proactive Event Support +- Secure Authentication Management +- Message Alert System + +## Installation + +This package is part of the ElizaOS workspace. Install it using your package manager: + +```bash +npm install @elizaos/client-alexa +``` + +## Prerequisites + +You need to set up the following environment variables: +```env +ALEXA_SKILL_ID=Your Alexa Skill ID +ALEXA_CLIENT_ID=Your Alexa Client ID +ALEXA_CLIENT_SECRET=Your Alexa Client Secret +``` + +## Usage + +```typescript +import { AlexaClientInterface } from '@elizaos/client-alexa'; +// Start the Alexa client +await AlexaClientInterface.start(runtime); +// Stop the Alexa client +await AlexaClientInterface.stop(runtime); +``` + +## Development + +### Available Scripts + +- `npm run build` - Build the package using tsup +- `npm run dev` - Start development mode with watch +- `npm run lint` - Run Biome linting +- `npm run lint:fix` - Fix linting issues +- `npm run format` - Check formatting +- `npm run format:fix` - Fix formatting issues +- `npm run test` - Run tests diff --git a/packages/client-auto/README.md b/packages/client-auto/README.md new file mode 100644 index 00000000000..d28a3e6be52 --- /dev/null +++ b/packages/client-auto/README.md @@ -0,0 +1,61 @@ +# @elizaos/client-auto + +An automatic client implementation for ElizaOS. + +## Installation + +```bash +npm install @elizaos/client-auto +``` + +## Features + +- Automated client that runs on a configurable interval +- Built on top of ElizaOS core functionality +- TypeScript support with full type definitions + +## Usage + +```typescript +import { AutoClientInterface } from '@elizaos/client-auto'; +import { IAgentRuntime } from '@elizaos/core'; +// Initialize with your runtime +const runtime: IAgentRuntime = / your runtime implementation /; +// Start the auto client +const client = await AutoClientInterface.start(runtime); +``` + +## Configuration + +The auto client runs on an hourly interval by default. The client will automatically log its execution using the ElizaOS logger. + +## API Reference + +### `AutoClientInterface` + +The main interface for interacting with the auto client. + +Methods: +- `start(runtime: IAgentRuntime)`: Initializes and starts the auto client +- `stop(runtime: IAgentRuntime)`: Stops the client (currently not implemented) + +### `AutoClient` + +The underlying client implementation class. + +Properties: +- `interval`: The NodeJS.Timeout that controls the execution interval +- `runtime`: The IAgentRuntime instance used by the client + +## Development + +```bash +# Install dependencies +npm install + +# Build the package +npm run build + +# Watch mode for development +npm run dev +``` \ No newline at end of file diff --git a/packages/client-deva/README.md b/packages/client-deva/README.md new file mode 100644 index 00000000000..515ead2df49 --- /dev/null +++ b/packages/client-deva/README.md @@ -0,0 +1,86 @@ +# @elizaos/client-deva + +A Deva client implementation for ElizaOS that enables automated social media interactions and post management. + +## Features + +- Automated post generation and management +- Persona-based interactions +- Configurable posting intervals +- Memory management for post history +- Bearer token authentication +- Error handling and logging + +## Installation + +```bash +npm install @elizaos/client-deva +``` + + +## Prerequisites + +The following environment variables are required: +```env +DEVA_API_KEY=Your Deva API authentication key +DEVA_API_BASE_URL=The base URL for the Deva API +``` + +## Usage +```typescript +import { DevaClientInterface } from '@elizaos/client-deva'; +import { IAgentRuntime } from '@elizaos/core'; +// Initialize with your runtime +const runtime: IAgentRuntime = / your runtime implementation /; +// Start the client +const client = await DevaClientInterface.start(runtime); +``` + +## Configuration + +The client supports several configuration options through runtime settings: + +- `POST_IMMEDIATELY`: Boolean flag to enable immediate posting +- `POST_INTERVAL_MIN`: Minimum interval between posts (in minutes, default: 90) +- `POST_INTERVAL_MAX`: Maximum interval between posts (in minutes, default: 180) + +## Features Details + +### Post Generation + +The client automatically generates posts based on: +- Character templates +- Predefined topics +- Persona information +- Custom state configurations + +### Memory Management + +- Tracks post history +- Maintains user connections +- Handles reply chains +- Stores post metadata + +### Error Handling + +Comprehensive error handling for: +- API communication +- Post generation +- Configuration validation +- Memory operations + +## Development + +```bash +# Install dependencies +npm install + +# Build the project +npm run build + +# Development with watch mode +npm run dev + +# Lint the code +npm run lint +``` \ No newline at end of file diff --git a/packages/client-direct/.gitignore b/packages/client-direct/.gitignore new file mode 100644 index 00000000000..b5b9a64db33 --- /dev/null +++ b/packages/client-direct/.gitignore @@ -0,0 +1 @@ +swagger-*.json \ No newline at end of file diff --git a/packages/client-direct/README.md b/packages/client-direct/README.md new file mode 100644 index 00000000000..e6a0375934c --- /dev/null +++ b/packages/client-direct/README.md @@ -0,0 +1,57 @@ +# Eliza Direct Client API + +A REST API service for managing and interacting with Eliza AI agents. This client provides direct HTTP endpoints for agent management, messaging, and various AI capabilities. + +## Features + +- πŸ€– Agent Management (start/stop/list agents) +- πŸ’¬ Real-time Messaging +- πŸ” JWT Authentication +- πŸŽ™οΈ Voice Integration (Whisper & ElevenLabs) +- πŸ“Š Swagger API Documentation +- πŸ” Memory & Log Management +- πŸ›‘οΈ TEE (Trusted Execution Environment) Logging +- βœ… Verifiable Attestations + +## Installation + +```bash +npm install @elizaos/client-direct +``` + + +## Configuration + +Set the following environment variables: + +```env +SERVER_PORT=3000 # API server port +SERVER_URL=http://localhost:3000 # API server URL +EXPRESS_MAX_PAYLOAD=100kb # Max request payload size +JWT_ENABLED=true # Enable JWT authentication +JWT_USERNAME=admin # JWT auth username +JWT_PASSWORD=password # JWT auth password +JWT_SECRET_KEY=your-secret-key # JWT secret key +JWT_EXPIRED=24h # JWT token expiration + +#Voice Integration +OPENAI_API_KEY=your-openai-key # For Whisper transcription +ELEVENLABS_XI_API_KEY=your-key # For text-to-speech +ELEVENLABS_VOICE_ID=voice-id # ElevenLabs voice ID +``` + +## Development +``` +# Install dependencies +npm install +# Build the project +npm run build +# Generate Swagger documentation +npm run swagger-autogen +# Start development server +npm run dev +``` + +## API Endpoints + +### Authentication diff --git a/packages/client-direct/package.json b/packages/client-direct/package.json index cc3d444b913..bc5aff7e16b 100644 --- a/packages/client-direct/package.json +++ b/packages/client-direct/package.json @@ -21,25 +21,30 @@ "dependencies": { "@elizaos/core": "workspace:*", "@elizaos/plugin-image-generation": "workspace:*", - "@elizaos/plugin-tee-verifiable-log": "workspace:*", "@elizaos/plugin-tee-log": "workspace:*", + "@elizaos/plugin-tee-verifiable-log": "workspace:*", "@types/body-parser": "1.19.5", "@types/cors": "2.8.17", - "@types/express": "5.0.0", + "@types/express": "^4.17.21", "body-parser": "1.20.3", "cors": "2.8.5", "discord.js": "14.16.3", "express": "4.21.1", + "jsonwebtoken": "^9.0.2", "multer": "1.4.5-lts.1", - "openai": "4.73.0" + "openai": "4.73.0", + "swagger-autogen": "^2.23.7", + "swagger-ui-express": "^5.0.1", + "url": "^0.11.4" }, "devDependencies": { "@types/multer": "^1.4.12", "tsup": "8.3.5" }, "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" + "build": "tsup --format esm --dts && ts-node ./swagger.mjs", + "dev": "tsup --format esm --dts --watch", + "swagger-autogen": "ts-node ./swagger.mjs" }, "peerDependencies": { "whatwg-url": "7.1.0" diff --git a/packages/client-direct/src/api.ts b/packages/client-direct/src/api.ts index 3b4f00135ce..5ee7053fd0d 100644 --- a/packages/client-direct/src/api.ts +++ b/packages/client-direct/src/api.ts @@ -8,52 +8,20 @@ import { type AgentRuntime, elizaLogger, getEnvVariable, - type UUID, - validateCharacterConfig, ServiceType, type Character, + settings, } from "@elizaos/core"; import type { TeeLogQuery, TeeLogService } from "@elizaos/plugin-tee-log"; import { REST, Routes } from "discord.js"; import type { DirectClient } from "."; -import { validateUuid } from "@elizaos/core"; - -interface UUIDParams { - agentId: UUID; - roomId?: UUID; -} - -function validateUUIDParams( - params: { agentId: string; roomId?: string }, - res: express.Response -): UUIDParams | null { - const agentId = validateUuid(params.agentId); - if (!agentId) { - res.status(400).json({ - error: "Invalid AgentId format. Expected to be a UUID: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", - }); - return null; - } - - if (params.roomId) { - const roomId = validateUuid(params.roomId); - if (!roomId) { - res.status(400).json({ - error: "Invalid RoomId format. Expected to be a UUID: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", - }); - return null; - } - return { agentId, roomId }; - } - - return { agentId }; -} +import { validateUUIDParams } from "."; export function createApiRouter( agents: Map, directClient: DirectClient -) { +): express.Router { const router = express.Router(); router.use(cors()); @@ -65,14 +33,6 @@ export function createApiRouter( }) ); - router.get("/", (req, res) => { - res.send("Welcome, this is the REST API!"); - }); - - router.get("/hello", (req, res) => { - res.json({ message: "Hello World!" }); - }); - router.get("/agents", (req, res) => { const agentsList = Array.from(agents.values()).map((agent) => ({ id: agent.agentId, @@ -116,102 +76,6 @@ export function createApiRouter( }); }); - router.delete("/agents/:agentId", async (req, res) => { - const { agentId } = validateUUIDParams(req.params, res) ?? { - agentId: null, - }; - if (!agentId) return; - - const agent: AgentRuntime = agents.get(agentId); - - if (agent) { - agent.stop(); - directClient.unregisterAgent(agent); - res.status(204).json({ success: true }); - } else { - res.status(404).json({ error: "Agent not found" }); - } - }); - - router.post("/agents/:agentId/set", async (req, res) => { - const { agentId } = validateUUIDParams(req.params, res) ?? { - agentId: null, - }; - if (!agentId) return; - - let agent: AgentRuntime = agents.get(agentId); - - // update character - if (agent) { - // stop agent - agent.stop(); - directClient.unregisterAgent(agent); - // if it has a different name, the agentId will change - } - - // stores the json data before it is modified with added data - const characterJson = { ...req.body }; - - // load character from body - const character = req.body; - try { - validateCharacterConfig(character); - } catch (e) { - elizaLogger.error(`Error parsing character: ${e}`); - res.status(400).json({ - success: false, - message: e.message, - }); - return; - } - - // start it up (and register it) - try { - agent = await directClient.startAgent(character); - elizaLogger.log(`${character.name} started`); - } catch (e) { - elizaLogger.error(`Error starting agent: ${e}`); - res.status(500).json({ - success: false, - message: e.message, - }); - return; - } - - if (process.env.USE_CHARACTER_STORAGE === "true") { - try { - const filename = `${agent.agentId}.json`; - const uploadDir = path.join( - process.cwd(), - "data", - "characters" - ); - const filepath = path.join(uploadDir, filename); - await fs.promises.mkdir(uploadDir, { recursive: true }); - await fs.promises.writeFile( - filepath, - JSON.stringify( - { ...characterJson, id: agent.agentId }, - null, - 2 - ) - ); - elizaLogger.info( - `Character stored successfully at ${filepath}` - ); - } catch (error) { - elizaLogger.error( - `Failed to store character: ${error.message}` - ); - } - } - - res.json({ - id: character.id, - character: character, - }); - }); - router.get("/agents/:agentId/channels", async (req, res) => { const { agentId } = validateUUIDParams(req.params, res) ?? { agentId: null, @@ -404,55 +268,5 @@ export function createApiRouter( } ); - router.post("/agent/start", async (req, res) => { - const { characterPath, characterJson } = req.body; - console.log("characterPath:", characterPath); - console.log("characterJson:", characterJson); - try { - let character: Character; - if (characterJson) { - character = await directClient.jsonToCharacter( - characterPath, - characterJson - ); - } else if (characterPath) { - character = - await directClient.loadCharacterTryPath(characterPath); - } else { - throw new Error("No character path or JSON provided"); - } - await directClient.startAgent(character); - elizaLogger.log(`${character.name} started`); - - res.json({ - id: character.id, - character: character, - }); - } catch (e) { - elizaLogger.error(`Error parsing character: ${e}`); - res.status(400).json({ - error: e.message, - }); - return; - } - }); - - router.post("/agents/:agentId/stop", async (req, res) => { - const agentId = req.params.agentId; - console.log("agentId", agentId); - const agent: AgentRuntime = agents.get(agentId); - - // update character - if (agent) { - // stop agent - agent.stop(); - directClient.unregisterAgent(agent); - // if it has a different name, the agentId will change - res.json({ success: true }); - } else { - res.status(404).json({ error: "Agent not found" }); - } - }); - return router; } diff --git a/packages/client-direct/src/auth.ts b/packages/client-direct/src/auth.ts new file mode 100644 index 00000000000..6413ed859ef --- /dev/null +++ b/packages/client-direct/src/auth.ts @@ -0,0 +1,28 @@ +import jwt from 'jsonwebtoken'; +import { v4 as uuidv4 } from "uuid"; +import { settings } from "@elizaos/core"; +import crypto from 'crypto'; + +export function md5(text: any) { + return crypto.createHash('md5').update(text).digest('hex'); +} + +export const signToken = (data: Record, expiresIn: string | number = settings.JWT_EXPIRED): string => { + const _salt = uuidv4(); + return jwt.sign({ ...data, _salt }, settings.JWT_SECRET_KEY, { + expiresIn: expiresIn + }); +}; + +export const verifyToken = (authorization: string): Promise => { + return new Promise((resolve, reject) => { + jwt.verify(authorization, settings.JWT_SECRET_KEY, async (err: any, decode: any) => { + if (err) { + reject(err); + } else { + resolve(decode); + } + }); + }); +}; + diff --git a/packages/client-direct/src/index.ts b/packages/client-direct/src/index.ts index cddb2cb9da5..055871882af 100644 --- a/packages/client-direct/src/index.ts +++ b/packages/client-direct/src/index.ts @@ -21,12 +21,21 @@ import { stringToUuid, settings, type IAgentRuntime, + type TypeDatabaseAdapter, + type UUID, + validateUuid, } from "@elizaos/core"; import { createApiRouter } from "./api.ts"; import * as fs from "fs"; import * as path from "path"; import { createVerifiableLogApiRouter } from "./verifiable-log-api.ts"; +import { createManageApiRouter } from "./manage-api.ts"; import OpenAI from "openai"; +import swaggerUi from 'swagger-ui-express'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); // get the resolved path to the file +const __dirname = path.dirname(__filename); // get the name of the directory const storage = multer.diskStorage({ destination: (req, file, cb) => { @@ -108,6 +117,42 @@ Response format should be formatted in a JSON block like this: \`\`\` `; +interface UUIDParams { + agentId: UUID; + roomId?: UUID; +} + +interface SwaggerRequest extends ExpressRequest { + swaggerDoc?: any; +} + +export function validateUUIDParams( + params: { agentId: string; roomId?: string }, + res: express.Response +): UUIDParams | null { + const agentId = validateUuid(params.agentId); + if (!agentId) { + res.status(400).json({ + error: "Invalid AgentId format. Expected to be a UUID: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + }); + return null; + } + + if (params.roomId) { + const roomId = validateUuid(params.roomId); + if (!roomId) { + res.status(400).json({ + error: "Invalid RoomId format. Expected to be a UUID: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + }); + return null; + } + return { agentId, roomId }; + } + + return { agentId }; +} + + export class DirectClient { public app: express.Application; private agents: Map; // container management @@ -115,15 +160,53 @@ export class DirectClient { public startAgent: Function; // Store startAgent functor public loadCharacterTryPath: Function; // Store loadCharacterTryPath functor public jsonToCharacter: Function; // Store jsonToCharacter functor - + public db: TypeDatabaseAdapter; + public plugins: any[]; + public clients: any[]; + public getTokenForProvider: Function; // store getTokenForProvider functor + constructor() { elizaLogger.log("DirectClient constructor"); this.app = express(); this.app.use(cors()); this.agents = new Map(); - this.app.use(bodyParser.json()); - this.app.use(bodyParser.urlencoded({ extended: true })); + this.app.use(bodyParser.json({ limit: '10mb' })); + this.app.use(bodyParser.urlencoded({ extended: true, limit: '10mb' })); + + try { + const swaggerManageApiPath = path.join(__dirname, '../swagger-manage-api.json'); + elizaLogger.log('Swagger manage api path:', swaggerManageApiPath); + if(fs.existsSync(swaggerManageApiPath)) { + const swaggerDocument = JSON.parse(fs.readFileSync(swaggerManageApiPath, 'utf8')); + elizaLogger.log('Swagger documentation loaded'); + + // Add middleware to dynamically set host based on request + this.app.use('/docs-manage', (req: SwaggerRequest, res, next) => { + // Create a new copy of swagger document for this request + const dynamicSwaggerDoc = { ...swaggerDocument }; + // Set host based on request headers + dynamicSwaggerDoc.host = req.get('host'); + // Attach to request for use in next middleware + req.swaggerDoc = dynamicSwaggerDoc; + next(); + }, swaggerUi.serve, swaggerUi.setup((req: SwaggerRequest) => req.swaggerDoc)); + + } else { + elizaLogger.error('Swagger manage api documentation not found:', swaggerManageApiPath); + } + } catch (error) { + elizaLogger.error('Error generating Swagger documentation:', error.message); + console.error('Error generating Swagger documentation:', error); + } + + this.app.get("/", (req, res) => { + res.send("Welcome, this is the REST API!"); + }); + + this.app.get("/hello", (req, res) => { + res.json({ message: "Hello World!" }); + }); // Serve both uploads and generated images this.app.use( @@ -141,6 +224,9 @@ export class DirectClient { const apiLogRouter = createVerifiableLogApiRouter(this.agents); this.app.use(apiLogRouter); + const manageApiRouter = createManageApiRouter(this.agents, this); + this.app.use('/manage', manageApiRouter); + // Define an interface that extends the Express Request interface interface CustomRequest extends ExpressRequest { file?: Express.Multer.File; @@ -266,7 +352,7 @@ export class DirectClient { }; const memory: Memory = { - id: stringToUuid(messageId + "-" + userId), + id: stringToUuid(`request-${messageId}-${userId}`), ...userMessage, agentId: runtime.agentId, userId, @@ -287,12 +373,14 @@ export class DirectClient { template: messageHandlerTemplate, }); + elizaLogger.debug("direct client request:", context); + const response = await generateMessageResponse({ runtime: runtime, context, modelClass: ModelClass.LARGE, }); - + elizaLogger.debug("direct client response:", response); if (!response) { res.status(500).send( "No response from generateMessageResponse" @@ -302,7 +390,7 @@ export class DirectClient { // save response to memory const responseMessage: Memory = { - id: stringToUuid(messageId + "-" + runtime.agentId), + id: stringToUuid(`response-${messageId}-${runtime.agentId}`), ...userMessage, userId: runtime.agentId, content: response, @@ -975,6 +1063,15 @@ export class DirectClient { }); } }); + + // Error handling middleware should be the last middleware to be registered + this.app.use((err: any, req: express.Request, res: express.Response, next: express.NextFunction) => { + elizaLogger.error('Express error:', err); + res.status(500).json({ + error: err.message || 'Internal Server Error', + stack: process.env.NODE_ENV === 'development' ? err.stack : undefined + }); + }); } // agent/src/index.ts:startAgent calls this diff --git a/packages/client-direct/src/manage-api.ts b/packages/client-direct/src/manage-api.ts new file mode 100644 index 00000000000..6a4c2e0b3dc --- /dev/null +++ b/packages/client-direct/src/manage-api.ts @@ -0,0 +1,935 @@ +import express, { response } from "express"; +import bodyParser from "body-parser"; +import cors from "cors"; +import path from "path"; +import fs from "fs"; +import { v4 as uuidv4 } from "uuid"; +import os from "os"; + +import { + type AgentRuntime, + elizaLogger, + getEnvVariable, + validateCharacterConfig, + type UUID, + type Character, + type PaginationParams, + type Content, + type Memory, + type Media, + AccountStatus, + stringToUuid, + settings, + ModelProviderName, + ModelClass, + defaultCharacter, + composeContext, + generateMessageResponse, + generateObject, + getEmbeddingZeroVector, + generateText, + parseJSONFromText, + formatCharacterForSave, +} from "@elizaos/core"; + +import type { DirectClient } from "."; +import { validateUUIDParams, messageHandlerTemplate } from "."; +import { md5, signToken, verifyToken } from "./auth"; + +type SystemMetrics = { + pid: number; // process id + hostname: string; // hostname + uptime: number; // uptime in seconds + platform: string; // operating system + nodeVersion: string; // node.js version + memoryUsage: { + rss: number; // resident set size in bytes + heapTotal: number; // total heap size in bytes + heapUsed: number; // used heap size in bytes + external: number; // external memory usage in bytes + arrayBuffers: number; // array buffer memory usage in bytes + heapUsageRatio: number; // heap usage ratio + heapIncrease: number; // heap increase in bytes + totalMemory: number; // total memory in bytes + freeMemory: number; // free memory in bytes + }, + cpuUsage: { + cores: number; // number of cores + model: string; // cpu model + speed: number; // cpu speed in MHz + loadAvg: number[]; // load average in 1, 5, 15 minutes + usage: { + user: number; // user time in milliseconds + system: number; // system time in milliseconds + percentage: number; // process cpu usage percentage + systemPercentage: number; // system cpu usage percentage + } + }, + diskSpace: { + total: number; // total disk space in bytes + free: number; // free disk space in bytes + used: number; // used disk space in bytes + usedPercent: number; // used disk space percentage + } + [key: string]: any; +} + +const { heapUsed } = process.memoryUsage(); +let lastHeapUsed = heapUsed; + +let oldTplRuntimeData: { + modelProvider?: ModelProviderName | null, + token?: string | null, +} = {}; + +// Add these variables at the top level, near where lastHeapUsed is defined +let cpuMetrics = { + usage: process.cpuUsage(), + time: process.hrtime.bigint(), + systemCpus: os.cpus(), + percentage: 0, + systemPercentage: 0, + lastUpdate: Date.now() +}; + +const LOG_BUFFER_SIZE = 100; // Keep last 100 log entries +const logBuffer: string[] = []; + +// Add this function to maintain the log buffer +function addToLogBuffer(logData: string) { + logBuffer.push(logData); + if (logBuffer.length > LOG_BUFFER_SIZE) { + logBuffer.shift(); // Remove oldest entry + } +} + +// Subscribe to logs at the application level to maintain the buffer +elizaLogger.subscribe(addToLogBuffer); + +async function verifyTokenMiddleware(req: any, res: any, next) { + // if JWT is not enabled, skip verification + if (!(settings.JWT_ENABLED && settings.JWT_ENABLED.toLowerCase() === 'true')) { + next(); + return; + } + + const url: string = req.url.split('?')[0]; + if (url.indexOf('/login') === 0) { + next(); + } else { + try { + const { authorization } = req.headers; + if (!authorization) throw new Error('no token'); + const token = authorization.startsWith('Bearer ') + ? authorization.split(' ')[1] + : authorization; + const verified = await verifyToken(token); + if (verified) { + next(); + } else { + throw new Error('fail to verify token'); + } + } catch (err: any) { + res.status(401).json({ error: err.message }); + return; + } + } +}; + + +export function createManageApiRouter( + agents: Map, + directClient: DirectClient +): express.Router { + const router = express.Router(); + + router.use(cors()); + router.use(bodyParser.json()); + router.use(bodyParser.urlencoded({ extended: true })); + router.use( + express.json({ + limit: getEnvVariable("EXPRESS_MAX_PAYLOAD") || "100kb", + }) + ); + + router.use(verifyTokenMiddleware); + + const changeAccountStatus = async (accountId: UUID, status: AccountStatus) => { + const account = await directClient.db.getAccountById(accountId); + if(account) { + account.status = status; + await directClient.db.updateAccount(account); + } + } + + const updateAccount = async (accountId: UUID, data: Record) => { + const account = await directClient.db.getAccountById(accountId); + if(account) { + Object.assign(account, data); + await directClient.db.updateAccount(account); + } + } + + router.post("/login", async (req, res) => { + const { username, password } = req.body; + const valid = username === settings.JWT_USERNAME && password === md5(settings.JWT_PASSWORD); + if (valid) { + const token = signToken({ username }); + const verified = await verifyToken(token); + res.json({ success: true, token: token, exp: verified.exp }); + } else { + res.status(400).json({ error: "Invalid username or password" }); + } + }); + + router.get("/accounts", async (req, res, next) => { + try { + const params: PaginationParams = { + page: req.query.page ? Number(req.query.page) : 1, + pageSize: req.query.pageSize ? Number(req.query.pageSize) : 10, + where: req.query.where ? JSON.parse(req.query.where as string) : {}, + order: req.query.order ? JSON.parse(req.query.order as string) : {createdAt: 'DESC'}, + } + const result = await directClient.db.paginate('accounts', params); + if(result.total) { + for (const item of result.list) { + if (typeof item.details === "string") { + item.details = item.details ? JSON.parse(item.details) : {}; + } + const agent = agents.get(item.id); + if(!agent && item.status === AccountStatus.ACTIVE) { + item.status = AccountStatus.PAUSED; + await changeAccountStatus(item.id, AccountStatus.PAUSED); + } else if(agent && item.status !== AccountStatus.ACTIVE) { + item.status = AccountStatus.ACTIVE; + await changeAccountStatus(item.id, AccountStatus.ACTIVE); + } + } + } + res.json(result); + } catch (err) { + elizaLogger.error('Error in accounts', err); + res.status(400).json({ + error: err.message, + }); + } + }); + + router.get("/account/:accountId", async (req, res) => { + const accountId = req.params.accountId as UUID; + const account = await directClient.db.getAccountById(accountId); + const agent = agents.get(accountId); + if(!agent && account.status === AccountStatus.ACTIVE) { + account.status = AccountStatus.PAUSED; + await changeAccountStatus(accountId, AccountStatus.PAUSED); + } + res.json(account); + }); + + router.post("/account/update", async (req, res) => { + const character = req.body; + try { + + if(character.id) { + const account = await directClient.db.getAccountById(character.id); + if(!account) { + throw new Error('Account not found'); + } + delete character.id; + Object.assign(account.details, character); + if('name' in character) account.name = character.name; + if('email' in character) account.email = character.email; + if('avatarUrl' in character) account.avatarUrl = character.avatarUrl; + validateCharacterConfig(account.details); + await directClient.db.updateAccount(account); + elizaLogger.log(`${character.name} updated`); + res.json({ + success: true, + action: "update", + data: account, + }); + } else { + const userId = stringToUuid(character.username || character.name || uuidv4()); + let account = await directClient.db.getAccountById(userId); + if(account) { + throw new Error(`Account already exists, username is not unique: ${character.username}`); + } + account = { + id: userId, + name: character.name || character.username, + username: character.username || character.name, + email: character.email || userId, + avatarUrl: character.avatarUrl || "", + status: AccountStatus.PAUSED, + details: character, + } + validateCharacterConfig(account.details); + await directClient.db.createAccount(account); + elizaLogger.log(`${character.name} created`); + res.json({ + success: true, + action: "create", + data: account, + }); + } + + } catch (e) { + elizaLogger.error(`Error parsing character: ${e}`); + res.status(400).json({ + error: e.message, + }); + return; + } + }); + + router.delete("/agents/:agentId", async (req, res) => { + const { agentId } = validateUUIDParams(req.params, res) ?? { + agentId: null, + }; + if (!agentId) return; + + const agent: AgentRuntime = agents.get(agentId); + + if (agent) { + agent.stop(); + directClient.unregisterAgent(agent); + await changeAccountStatus(agentId, AccountStatus.DISABLED); + res.status(204).json({ success: true }); + } else { + res.status(404).json({ error: "Agent not found" }); + } + }); + + router.post("/agents/:agentId/set", async (req, res) => { + const { agentId } = validateUUIDParams(req.params, res) ?? { + agentId: null, + }; + if (!agentId) return; + + let agent: AgentRuntime = agents.get(agentId); + + // update character + if (agent) { + // stop agent + agent.stop(); + directClient.unregisterAgent(agent); + // if it has a different name, the agentId will change + } + + // stores the json data before it is modified with added data + const characterJson = { ...req.body }; + + // load character from body + const character = req.body; + try { + validateCharacterConfig(character); + } catch (e) { + elizaLogger.error(`Error parsing character: ${e}`); + res.status(400).json({ + success: false, + message: e.message, + }); + return; + } + + // start it up (and register it) + try { + agent = await directClient.startAgent(character); + elizaLogger.log(`${character.name} started`); + } catch (e) { + elizaLogger.error(`Error starting agent: ${e}`); + res.status(500).json({ + success: false, + message: e.message, + }); + return; + } + + if (process.env.USE_CHARACTER_STORAGE === "true") { + try { + const filename = `${agent.agentId}.json`; + const uploadDir = path.join( + process.cwd(), + "data", + "characters" + ); + const filepath = path.join(uploadDir, filename); + await fs.promises.mkdir(uploadDir, { recursive: true }); + await fs.promises.writeFile( + filepath, + JSON.stringify( + { ...characterJson, id: agent.agentId }, + null, + 2 + ) + ); + elizaLogger.info( + `Character stored successfully at ${filepath}` + ); + } catch (error) { + elizaLogger.error( + `Failed to store character: ${error.message}` + ); + } + } + + res.json({ + id: character.id, + character: character, + }); + }); + + router.post("/agent/start", async (req, res) => { + const { accountId, characterPath, characterJson } = req.body; + try { + let character: Character; + if(accountId) { + const account = await directClient.db.getAccountById(accountId); + if(account) { + character = account.details as Character; + character.id = accountId; + } + } else if (characterJson) { + character = await directClient.jsonToCharacter( + characterPath, + characterJson + ); + } else if (characterPath) { + character = + await directClient.loadCharacterTryPath(characterPath); + } else { + throw new Error("No character path or JSON provided"); + } + await directClient.startAgent(character); + const details = formatCharacterForSave(character); + await updateAccount(character.id, {status: AccountStatus.ACTIVE, details}); + elizaLogger.log(`${character.name} started`); + + res.json({ + id: character.id, + character: character, + }); + } catch (e) { + elizaLogger.error(`Error parsing character: ${e}`); + res.status(400).json({ + error: e.message, + }); + return; + } + }); + + router.post("/agents/:agentId/stop", async (req, res) => { + const agentId = req.params.agentId as UUID; + console.log("agentId", agentId); + const agent: AgentRuntime = agents.get(agentId); + + // update character + if (agent) { + // stop agent + agent.stop(); + await changeAccountStatus(agentId, AccountStatus.PAUSED); + // if it has a different name, the agentId will change + res.json({ success: true }); + } else { + res.status(404).json({ error: "Agent not found" }); + } + }); + + router.get("/plugins", async (req, res) => { + try { + res.json(directClient.plugins); + } catch (err) { + elizaLogger.error('Error getting plugins:', err); + res.status(500).json({ + error: "Failed to get plugins list", + message: err.message + }); + } + }); + + router.get("/memories", async (req, res, next) => { + try { + const params: PaginationParams = { + page: req.query.page ? Number(req.query.page) : 1, + pageSize: req.query.pageSize ? Number(req.query.pageSize) : 10, + where: req.query.where ? JSON.parse(req.query.where as string) : {}, + order: req.query.order ? JSON.parse(req.query.order as string) : {createdAt: 'DESC'}, + } + const result = await directClient.db.paginate('memories', params); + if(result.total) { + result.list = result.list.map((item: any) => { + if (typeof item.content === "string") { + item.content = item.content ? JSON.parse(item.content) : {}; + } + delete item.embedding; + return item; + }); + } + res.json(result); + } catch (err) { + elizaLogger.error('Error in memories', err); + res.status(400).json({ + error: err.message, + }); + } + }); + + router.get("/clients", async (req, res) => { + try { + res.json(directClient.clients); + } catch (err) { + elizaLogger.error('Error getting clients:', err); + res.status(500).json({ + error: "Failed to get clients list", + message: err.message + }); + } + }); + + router.get("/providers", async (req, res) => { + const providers = Object.values(ModelProviderName); + const data = []; + const getKeyForProvider = ( + provider: ModelProviderName, + ) => { + switch (provider) { + // no key needed for llama_local, ollama, lmstudio, gaianet or bedrock + case ModelProviderName.LLAMALOCAL: + return {enabled: true, key: ""}; + case ModelProviderName.OLLAMA: + return {enabled: true, key: ""}; + case ModelProviderName.LMSTUDIO: + return {enabled: true, key: ""}; + case ModelProviderName.GAIANET: + return {enabled: true, key: ""}; + case ModelProviderName.BEDROCK: + return {enabled: true, key: ""}; + case ModelProviderName.OPENAI: + return {enabled: !!settings.OPENAI_API_KEY, key: "OPENAI_API_KEY"}; + case ModelProviderName.ETERNALAI: + return {enabled: !!settings.ETERNALAI_API_KEY, key: "ETERNALAI_API_KEY"}; + case ModelProviderName.NINETEEN_AI: + return {enabled: !!settings.NINETEEN_AI_API_KEY, key: "NINETEEN_AI_API_KEY"}; + case ModelProviderName.LLAMACLOUD: + case ModelProviderName.TOGETHER: + return {enabled: !!settings.LLAMACLOUD_API_KEY || !!settings.TOGETHER_API_KEY || !!settings.OPENAI_API_KEY, key: "LLAMACLOUD_API_KEY||TOGETHER_API_KEY||OPENAI_API_KEY"}; + case ModelProviderName.CLAUDE_VERTEX: + case ModelProviderName.ANTHROPIC: + return {enabled: !!settings.ANTHROPIC_API_KEY, key: "ANTHROPIC_API_KEY"}; + case ModelProviderName.REDPILL: + return {enabled: !!settings.REDPILL_API_KEY, key: "REDPILL_API_KEY"}; + case ModelProviderName.OPENROUTER: + return {enabled: !!settings.OPENROUTER_API_KEY, key: "OPENROUTER_API_KEY"}; + case ModelProviderName.GROK: + return {enabled: !!settings.GROK_API_KEY, key: "GROK_API_KEY"}; + case ModelProviderName.HEURIST: + return {enabled: !!settings.HEURIST_API_KEY, key: "HEURIST_API_KEY"}; + case ModelProviderName.GROQ: + return {enabled: !!settings.GROQ_API_KEY, key: "GROQ_API_KEY"}; + case ModelProviderName.GALADRIEL: + return {enabled: !!settings.GALADRIEL_API_KEY, key: "GALADRIEL_API_KEY"}; + case ModelProviderName.FAL: + return {enabled: !!settings.FAL_API_KEY, key: "FAL_API_KEY"}; + case ModelProviderName.ALI_BAILIAN: + return {enabled: !!settings.ALI_BAILIAN_API_KEY, key: "ALI_BAILIAN_API_KEY"}; + case ModelProviderName.VOLENGINE: + return {enabled: !!settings.VOLENGINE_API_KEY, key: "VOLENGINE_API_KEY"}; + case ModelProviderName.NANOGPT: + return {enabled: !!settings.NANOGPT_API_KEY, key: "NANOGPT_API_KEY"}; + case ModelProviderName.HYPERBOLIC: + return {enabled: !!settings.HYPERBOLIC_API_KEY, key: "HYPERBOLIC_API_KEY"}; + case ModelProviderName.VENICE: + return {enabled: !!settings.VENICE_API_KEY, key: "VENICE_API_KEY"}; + case ModelProviderName.ATOMA: + return {enabled: !!settings.ATOMASDK_BEARER_AUTH, key: "ATOMASDK_BEARER_AUTH"}; + case ModelProviderName.NVIDIA: + return {enabled: !!settings.NVIDIA_API_KEY, key: "NVIDIA_API_KEY"}; + case ModelProviderName.AKASH_CHAT_API: + return {enabled: !!settings.AKASH_CHAT_API_KEY, key: "AKASH_CHAT_API_KEY"}; + case ModelProviderName.GOOGLE: + return {enabled: !!settings.GOOGLE_GENERATIVE_AI_API_KEY, key: "GOOGLE_GENERATIVE_AI_API_KEY"}; + case ModelProviderName.MISTRAL: + return {enabled: !!settings.MISTRAL_API_KEY, key: "MISTRAL_API_KEY"}; + case ModelProviderName.LETZAI: + return {enabled: !!settings.LETZAI_API_KEY, key: "LETZAI_API_KEY"}; + case ModelProviderName.INFERA: + return {enabled: !!settings.INFERA_API_KEY, key: "INFERA_API_KEY"}; + case ModelProviderName.DEEPSEEK: + return {enabled: !!settings.DEEPSEEK_API_KEY, key: "DEEPSEEK_API_KEY"}; + case ModelProviderName.LIVEPEER: + return {enabled: !!settings.LIVEPEER_GATEWAY_URL, key: "LIVEPEER_GATEWAY_URL"}; + default: + return {enabled: false, key: ""}; + } + } + for( const provider of providers) { + const key = getKeyForProvider(provider); + data.push({ + provider, + ...key + }); + } + + res.json(data); + }); + + router.get("/logs/stream", (req, res) => { + const clientId = uuidv4(); + let now = new Date().getTime(); + + // Set headers for SSE + res.writeHead(200, { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive' + }); + + // Send initial connection message + res.write(`data: {"level":30,"time":${now},"msg":"${clientId} connected"}\n\n`); + + // Send buffered logs to the new client + logBuffer.forEach(logData => { + try { + res.write(`data: ${logData}\n\n`); + } catch (err) { + // Ignore errors when sending buffer + } + }); + + // Setup heartbeat + const heartbeatInterval = setInterval(() => { + try { + now = new Date().getTime(); + res.write(`data: {"level":30,"time":${now},"msg":"heartbeat"}\n\n`); + } catch (err) { + cleanup(); + } + }, 30000); + + // Subscribe to new logs + const unsubscribe = elizaLogger.subscribe((logData) => { + try { + res.write(`data: ${logData}\n\n`); + } catch (err) { + cleanup(); + } + }); + + // Cleanup function + const cleanup = () => { + clearInterval(heartbeatInterval); + unsubscribe(); + elizaLogger.debug(`Log client ${clientId} disconnected`); + }; + + // Handle client disconnect + req.on('close', cleanup); + req.on('error', cleanup); + + // Set connection timeout + const connectionTimeout = setTimeout(() => { + now = new Date().getTime(); + res.write(`data: {"level":30,"time":${now},"msg":"Connection timed out after 4 hours"}\n\n`); + res.end(); + cleanup(); + }, 4 * 60 * 60 * 1000); // 4 hours + + // Cleanup timeout on disconnect + req.on('close', () => { + clearTimeout(connectionTimeout); + }); + + // Log that a new client connected + elizaLogger.debug(`New log client ${clientId} connected`); + }); + + router.get("/system/metrics", async (req, res) => { + // get more detailed CPU info + const getCPUInfo = async() => { + const cpus = os.cpus(); + const currentTime = process.hrtime.bigint(); + const currentUsage = process.cpuUsage(); + const now = Date.now(); + + // Only recalculate if at least 2 seconds have passed since last update + // This prevents excessive calculations on frequent requests + if (now - cpuMetrics.lastUpdate >= 2000) { + const elapsedNs = Number(currentTime - cpuMetrics.time); + const elapsedSeconds = elapsedNs / 1e9; // convert nanoseconds to seconds + + // Calculate process CPU usage + const userDiff = currentUsage.user - cpuMetrics.usage.user; + const systemDiff = currentUsage.system - cpuMetrics.usage.system; + const totalDiff = userDiff + systemDiff; + + // Calculate percentage (ensure it's between 0 and 1) + const percentage = Math.max(0, Math.min(1, totalDiff / (elapsedSeconds * os.cpus().length * 1e6))); + + // Calculate system CPU usage + let totalSystemUsage = 0; + let totalSystemTime = 0; + + cpus.forEach((cpu, i) => { + const startCpu = cpuMetrics.systemCpus[i]; + if (startCpu) { + const idleDiff = cpu.times.idle - startCpu.times.idle; + const totalDiff = Object.values(cpu.times).reduce((a, b) => a + b, 0) - + Object.values(startCpu.times).reduce((a, b) => a + b, 0); + + totalSystemUsage += totalDiff - idleDiff; + totalSystemTime += totalDiff; + } + }); + + // Calculate system percentage (ensure it's between 0 and 1) + const systemPercentage = totalSystemTime > 0 ? + Math.max(0, Math.min(1, totalSystemUsage / totalSystemTime)) : 0; + + // Update metrics + cpuMetrics = { + usage: currentUsage, + time: currentTime, + systemCpus: cpus, + percentage, + systemPercentage, + lastUpdate: now + }; + } + + return { + cores: cpus.length, + model: cpus[0].model, + speed: cpus[0].speed, // MHz + loadAvg: os.loadavg(), // 1, 5, 15 minutes average load + usage: { + ...currentUsage, + percentage: cpuMetrics.percentage, + systemPercentage: cpuMetrics.systemPercentage + } + }; + } + + const checkHeapUsageRatio= (usage: any) => { + + // calculate heap usage ratio + const heapUsageRatio = usage.heapUsed / usage.heapTotal; + + // set warning threshold + if (heapUsageRatio > 0.95) { + elizaLogger.warn(`High heap usage: ${(heapUsageRatio * 100).toFixed(2)}%`); + } + + // convert to MB for easier reading + const rssInMB = usage.rss / 1024 / 1024; + // physical memory threshold + const RSS_THRESHOLD_MB = (os.totalmem() / 1024 / 1024) * 0.7; + if (rssInMB > RSS_THRESHOLD_MB) { + elizaLogger.warn(`High memory usage: ${rssInMB.toFixed(2)}MB`); + } + + return heapUsageRatio; + } + + // Fix the memoryHeapIncrease function to avoid NaN + const memoryHeapIncrease = (usage: any) => { + // check if heap memory is growing + let increase = 0; + if (lastHeapUsed > 0) { + increase = usage.heapUsed - lastHeapUsed; + } + // Always update lastHeapUsed with the current value + lastHeapUsed = usage.heapUsed; + return increase; + } + + const memoryInfo = () => { + const usage = process.memoryUsage(); + return { + ...usage, + heapUsageRatio: checkHeapUsageRatio(usage), + heapIncrease: memoryHeapIncrease(usage), + totalMemory: os.totalmem(), + freeMemory: os.freemem(), + }; + } + + const getDiskSpace = async (path: string = '/'): Promise<{ + total: number; + free: number; + used: number; + usedPercent: number; + }> => { + try { + const stats = await fs.promises.statfs(path); + const total = stats.blocks * stats.bsize; + const free = stats.bfree * stats.bsize; + const used = total - free; + const usedPercent = used / total; + + return { + total, + free, + used, + usedPercent: Number(usedPercent.toFixed(2)) + }; + } catch (error) { + elizaLogger.error('Error getting disk space:', error); + throw error; + } + } + + try { + const metrics: SystemMetrics = { + pid: process.pid, + hostname: os.hostname(), + uptime: process.uptime(), + platform: process.platform, + nodeVersion: process.version, + memoryUsage: memoryInfo(), + cpuUsage: await getCPUInfo(), + diskSpace: await getDiskSpace(), + } + res.json(metrics); + } catch (err) { + elizaLogger.error('Error getting system info:', err); + res.status(500).json({ + error: "Failed to get system info", + message: err.message + }); + } + }); + + + router.post( + "/tplgen", + async (req: express.Request, res: express.Response) => { + const { modelProvider, description, secrets } = req.body; + if(!modelProvider || !description) { + res.status(400).send({ error: "Model provider and description are required" }); + return; + } + const agentId = stringToUuid('template_generator'); + const roomId = stringToUuid(`default-room-${agentId}`); + const userId = stringToUuid('template_generator'); + + const runtime = agents.get(agentId); + + if (!runtime) { + res.status(404).send({ error: "Agent not found" }); + return; + } + + if (!oldTplRuntimeData || Object.keys(oldTplRuntimeData).length === 0) { + oldTplRuntimeData.modelProvider = runtime.modelProvider; + oldTplRuntimeData.token = runtime.token; + } + + const tpl = await directClient.loadCharacterTryPath('characters/lpmanager.character.json'); + if(!tpl) { + res.status(500).send({ error: "Failed to load template" }); + return; + } + + tpl.modelProvider = modelProvider; + if(secrets) { + tpl.settings.secrets = secrets; + } + + const token = directClient.getTokenForProvider(modelProvider, tpl); + + if(runtime.character.modelProvider !== modelProvider || runtime.token !== token) { + runtime.character.modelProvider = modelProvider; + runtime.modelProvider = modelProvider; + runtime.token = token; + elizaLogger.log(`runtime model provider updated to ${modelProvider}`); + } + + await runtime.ensureConnection( + userId, + roomId, + runtime.character.username, + runtime.character.name, + "direct" + ); + + const text = `According to the user-provided [description] in accordance with the provided json format [template] to generate the user's json content(format should be formatted in a JSON block like template). + [description]: ${description}. + [template]: ${JSON.stringify(tpl)}`; + + const messageId = stringToUuid(Date.now().toString()); + + const attachments: Media[] = []; + + const content: Content = { + text, + attachments, + source: "direct", + inReplyTo: undefined, + }; + + const userMessage = { + content, + userId, + roomId, + agentId: runtime.agentId, + }; + + const memory = { + id: stringToUuid(`request-${messageId}-${userId}`), + ...userMessage, + agentId: runtime.agentId, + userId, + roomId, + content, + createdAt: Date.now(), + }; + + await runtime.messageManager.addEmbeddingToMemory(memory); + await runtime.messageManager.createMemory(memory); + + try { + elizaLogger.log("Generating message response.."); + + const response = await generateText({ + runtime, + context: text, + modelClass: ModelClass.LARGE, + }); + elizaLogger.log("response is:", response); + if (!response) { + res.status(500).send({ error: "No response from generateMessageResponse" }); + return; + } + + // try parsing the response as JSON, if null then try again + const parsedContent = parseJSONFromText(response); + if (!parsedContent) { + elizaLogger.warn("failed to parse response as JSON, response is:", response); + throw new Error("parsedContent is null"); + } + // save response to memory + const responseMessage: Memory = { + id: stringToUuid(`response-${messageId}-${runtime.agentId}`), + ...userMessage, + userId: runtime.agentId, + content: {text: response, user: runtime.character.name, source: "direct", attachments: []}, + embedding: getEmbeddingZeroVector(), + createdAt: Date.now(), + }; + + await runtime.messageManager.createMemory(responseMessage); + + res.json(parsedContent); + } catch (error) { + elizaLogger.error("ERROR:", error); + res.status(500).send({ error: `Error generating message response, ${error.message}` }); + } finally { + runtime.modelProvider = oldTplRuntimeData.modelProvider; + runtime.character.modelProvider = oldTplRuntimeData.modelProvider; + runtime.token = oldTplRuntimeData.token; + } + } + ); + + return router; +} diff --git a/packages/client-direct/src/verifiable-log-api.ts b/packages/client-direct/src/verifiable-log-api.ts index e6dcdb502c5..924cd2bda6a 100644 --- a/packages/client-direct/src/verifiable-log-api.ts +++ b/packages/client-direct/src/verifiable-log-api.ts @@ -10,7 +10,7 @@ import type { export function createVerifiableLogApiRouter( agents: Map -) { +): express.Router { const router = express.Router(); router.use(cors()); router.use(bodyParser.json()); diff --git a/packages/client-direct/swagger.mjs b/packages/client-direct/swagger.mjs new file mode 100644 index 00000000000..e31385828a7 --- /dev/null +++ b/packages/client-direct/swagger.mjs @@ -0,0 +1,54 @@ +import swaggerAutogen from 'swagger-autogen'; +import dotenv from 'dotenv'; +import { fileURLToPath } from 'url'; +import path from 'path'; +import { dirname } from 'path'; + +// Get the directory path of the current module +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +// Load .env file from project root +dotenv.config({ path: path.resolve(__dirname, '../../.env') }); + + +const port = process.env.SERVER_PORT ?? 3000; +const srvUrl = process.env.SERVER_URL ?? 'http://localhost:' + port; +const url = new URL(srvUrl); +const doc = { + info: { + title: 'Eliza Direct API', + description: 'Eliza Direct Management API Documentation' + }, + host: url.host, + basePath: '/manage', + schemes: ['http', 'https'], + securityDefinitions: { + Authorization: { + type: 'apiKey', + in: 'header', + name: 'Authorization' + }, + }, + security: [ + { + Authorization: [], + } + ] +}; + +const outputFile = './swagger-manage-api.json'; +const routes = [ + './src/manage-api.ts', +]; + +async function generateSwagger() { + try { + await swaggerAutogen()(outputFile, routes, doc); + console.log('Swagger documentation generated successfully'); + } catch (error) { + console.error('Error generating swagger documentation:', error); + } +} + +generateSwagger(); \ No newline at end of file diff --git a/packages/client-discord/readme.md b/packages/client-discord/readme.md index 7ea3eb4d764..2990d5fd1fc 100644 --- a/packages/client-discord/readme.md +++ b/packages/client-discord/readme.md @@ -23,7 +23,7 @@ pnpm install The client requires the following environment variables: -```bash +```env # Discord API Credentials DISCORD_APPLICATION_ID=your_application_id DISCORD_API_TOKEN=your_api_token diff --git a/packages/client-eliza-home/README.md b/packages/client-eliza-home/README.md new file mode 100644 index 00000000000..fffcb56eeca --- /dev/null +++ b/packages/client-eliza-home/README.md @@ -0,0 +1,66 @@ +# SmartThings Integration Client for ElizaOS + +## Overview +This project is a SmartThings integration client for ElizaOS, providing a robust interface for home automation control. It enables natural language processing for device control, state management, and automation handling through the SmartThings API. + +## Features +- 🏠 Smart home device control and monitoring +- πŸ—£οΈ Natural language command processing +- πŸ”„ Real-time device state synchronization +- 🎯 Scene and room management +- πŸ€– Automation state monitoring +- ⚑ Support for various device capabilities: + - Switches + - Lights + - Thermostats + - Locks + - Motion Sensors + - Contact Sensors + - Media Players + - and more... + +## Installation +```bash +npm install @elizaos/client-eliza-home +``` + +## Configuration +The client requires a SmartThings API token for authentication. Set up your configuration by providing the following environment variables: + +```env +# Required configuration +SMARTTHINGS_TOKEN=your_smartthings_api_token +``` + +## Usage + +### Basic Setup +```typescript +import { startHome } from '@elizaos/client-eliza-home'; +// Initialize the client +const homeClient = startHome(runtime); +``` + +### Command Handling +The client processes natural language commands through the SmartHomeManager: + +```typescript +// Example command +const result = await homeClient.handleCommand('Turn on the kitchen light', 'user_id'); +``` + + + +## Development + +### Building +```bash +npm run build +npm run dev # Development mode with watch +``` + +### Running Tests +```bash +npm test # Run tests +npm test:watch # Watch mode +npm test:coverage # Generate coverage report diff --git a/packages/client-farcaster/README.md b/packages/client-farcaster/README.md new file mode 100644 index 00000000000..673d572013b --- /dev/null +++ b/packages/client-farcaster/README.md @@ -0,0 +1,57 @@ +# @elizaos/client-farcaster + +A TypeScript-based Farcaster client implementation that provides a robust interface for interacting with the Farcaster protocol through the Neynar API. + +## Features + +- πŸ”„ Automated cast (post) management +- πŸ’¬ Interactive conversation handling +- πŸ€– AI-powered response generation +- πŸ“Š Timeline and mention monitoring +- πŸ” Smart content splitting for long posts +- 🎯 Configurable posting intervals +- πŸ—οΈ Built-in memory management + +## Installation +```bash +npm install @elizaos/client-farcaster +``` + +## Configuration + +The client requires several environment variables to be set. Key configuration options include: + +```env +FARCASTER_DRY_RUN=false # Enable/disable dry run mode +FARCASTER_FID= # Your Farcaster ID +MAX_CAST_LENGTH=320 # Maximum cast length (default: 320) +FARCASTER_POLL_INTERVAL=120 # Poll interval in seconds (default: 120) +FARCASTER_NEYNAR_API_KEY= # Your Neynar API key +FARCASTER_NEYNAR_SIGNER_UUID= # Your Neynar signer UUID +``` + +## Usage + +### Basic Setup + +```typescript +import { FarcasterClientInterface } from '@elizaos/client-farcaster'; +// Initialize the client +const manager = await FarcasterClientInterface.start(runtime); +// Stop the client +await FarcasterClientInterface.stop(runtime); +``` + +## Development + +### Building +```bash +npm run build +npm run dev # Development mode with watch +``` + +### Running Tests +```bash +npm test # Run tests +npm test:watch # Watch mode +npm test:coverage # Generate coverage report diff --git a/packages/client-github/README.md b/packages/client-github/README.md index 17ec51f72b0..75fa37880a4 100644 --- a/packages/client-github/README.md +++ b/packages/client-github/README.md @@ -25,13 +25,13 @@ pnpm add @elizaos/client-github The GitHub client requires the following environment variables: -| Variable | Description | Required | -|-------------------|------------------------------------|----------| -| `GITHUB_OWNER` | Owner of the GitHub repository | Yes | -| `GITHUB_REPO` | Repository name | Yes | -| `GITHUB_BRANCH` | Target branch (default: `main`) | Yes | -| `GITHUB_PATH` | Path to focus on within the repo | Yes | -| `GITHUB_API_TOKEN`| GitHub API token for authentication| Yes | +```env +GITHUB_OWNER=Owner of the GitHub repository +GITHUB_REPO=Repository name +GITHUB_BRANCH=Target branch (default: `main`) +GITHUB_PATH=Path to focus on within the repo +GITHUB_API_TOKEN=GitHub API token for authentication +``` ## Usage diff --git a/packages/client-instagram/README.md b/packages/client-instagram/README.md index 6315f65dec3..f4c26adfab4 100644 --- a/packages/client-instagram/README.md +++ b/packages/client-instagram/README.md @@ -24,7 +24,7 @@ pnpm install The client requires the following environment variables: -```bash +```env # Instagram Credentials INSTAGRAM_USERNAME=your_username INSTAGRAM_PASSWORD=your_password diff --git a/packages/client-lens/README.md b/packages/client-lens/README.md new file mode 100644 index 00000000000..b6acb2834a9 --- /dev/null +++ b/packages/client-lens/README.md @@ -0,0 +1,64 @@ +# @elizaos/client-lens + +A Lens Protocol client integration for ElizaOS, enabling autonomous agents to interact with the Lens social graph. + +## Overview + +This package provides a robust client implementation for interacting with the Lens Protocol, specifically designed for autonomous agents running on ElizaOS. It handles authentication, content publishing, interactions, and IPFS content management through Storj. + +## Features + +- πŸ” Secure authentication with Lens Protocol +- πŸ“ Content publishing (posts, comments, mirrors) +- 🀝 Social interactions management +- πŸ“Š Timeline and feed management +- πŸ’Ύ IPFS content storage via Storj +- 🧠 Autonomous agent integration +- πŸ”„ Memory management for conversation tracking + + +## Configuration + +Required environment variables: + +```env +EVM_PRIVATE_KEY= +LENS_PROFILE_ID= +STORJ_API_USERNAME= +STORJ_API_PASSWORD= +LENS_POLL_INTERVAL=120 # Optional, defaults to 120 seconds +LENS_DRY_RUN=false # Optional, for testing +``` + +## Installation +``` +bash +npm install @elizaos/client-lens +``` + +## Usage + +### Basic Setup +```typescript +import { LensAgentClient } from '@elizaos/client-lens'; +const client = new LensAgentClient({ +runtime: agentRuntime, +}); +// Start the client +await client.start(); +``` + +## Development + +### Building +```bash +npm run build +npm run dev # Development mode with watch +``` + +### Running Tests +```bash +npm test # Run tests +npm test:watch # Watch mode +npm test:coverage # Generate coverage report + diff --git a/packages/client-simsai/README.md b/packages/client-simsai/README.md new file mode 100644 index 00000000000..7eed703deb7 --- /dev/null +++ b/packages/client-simsai/README.md @@ -0,0 +1,60 @@ +# @elizaos/client-simsai + +A SimsAI client implementation for the ElizaOS framework that enables social media interaction capabilities through the Jeeter API. + +## Overview + +This package provides a client interface for automated social media interactions, including posting, searching, and engaging with content on the Jeeter platform. It's designed to work within the ElizaOS ecosystem and implements intelligent social media behavior. + +## Features + +- **Automated Posting**: Scheduled content generation and posting with customizable intervals +- **Intelligent Search**: Context-aware search functionality for relevant content +- **Interactive Engagement**: Automated responses to mentions and comments +- **Rate Limiting**: Built-in rate limiting and request queue management +- **Caching**: Efficient caching system for API responses +- **Error Handling**: Robust error handling with exponential backoff + +## Installation + +```bash +npm install @elizaos/client-simsai +``` + +## Configuration + +The client requires the following environment variables: + +```env +SIMSAI_USERNAME= # Your SimsAI username +SIMSAI_AGENT_ID= # Your SimsAI agent ID +SIMSAI_API_KEY= # Your SimsAI API key +SIMSAI_DRY_RUN= # Optional: Set to "true" for testing (default: false) +``` + +## Usage + +### Basic Implementation + +```typescript +import { JeeterClientInterface } from '@elizaos/client-simsai'; +import { IAgentRuntime } from '@elizaos/core'; + +// Initialize the client +const runtime: IAgentRuntime = /* your runtime implementation */; +const client = await JeeterClientInterface.start(runtime); + +// Stop the client +await JeeterClientInterface.stop(runtime); +``` + +For development: +- TypeScript +- ESLint +- Other development tools as specified in package.json + +## Build + +```bash +npm run build +``` diff --git a/packages/client-slack/README.md b/packages/client-slack/README.md index d1e06673ed2..c158b6ca433 100644 --- a/packages/client-slack/README.md +++ b/packages/client-slack/README.md @@ -94,15 +94,15 @@ Before proceeding to install the app, make sure to verify the event subscription ### Step 3: Configure Environment Variables 1. Create or edit `.env` file in your project root: - ```bash - SLACK_APP_ID= # From Basic Information > App Credentials > App ID - SLACK_CLIENT_ID= # From Basic Information > App Credentials > Client ID - SLACK_CLIENT_SECRET= # From Basic Information > App Credentials > Client Secret - SLACK_SIGNING_SECRET= # From Basic Information > App Credentials > Signing Secret - SLACK_BOT_TOKEN= # From OAuth & Permissions > Bot User OAuth Token (starts with xoxb-) - SLACK_VERIFICATION_TOKEN= # From Basic Information > App Credentials > Verification Token - SLACK_SERVER_PORT=3069 # Must match the port you used with ngrok - ``` +```env +SLACK_APP_ID= # From Basic Information > App Credentials > App ID +SLACK_CLIENT_ID= # From Basic Information > App Credentials > Client ID +SLACK_CLIENT_SECRET= # From Basic Information > App Credentials > Client Secret +SLACK_SIGNING_SECRET= # From Basic Information > App Credentials > Signing Secret +SLACK_BOT_TOKEN= # From OAuth & Permissions > Bot User OAuth Token (starts with xoxb-) +SLACK_VERIFICATION_TOKEN= # From Basic Information > App Credentials > Verification Token +SLACK_SERVER_PORT=3069 # Must match the port you used with ngrok +``` ### Step 4: Install the App diff --git a/packages/client-telegram-account/README.md b/packages/client-telegram-account/README.md new file mode 100644 index 00000000000..7438ee6c429 --- /dev/null +++ b/packages/client-telegram-account/README.md @@ -0,0 +1,88 @@ +# @elizaos/client-telegram-account + +A Telegram client implementation for ElizaOS that enables AI-powered Telegram account automation and interaction. + +## Features + +- πŸ€– AI-powered Telegram account automation +- πŸ“± Seamless Telegram account integration +- πŸ’¬ Intelligent message handling and response generation +- πŸ”„ Support for message threading and replies +- πŸ“Ž File and media attachment handling +- βœ‚οΈ Automatic message splitting for long responses +- πŸ”’ Secure session management + +## Installation +```bash +npm install @elizaos/client-telegram-account +``` + +## Prerequisites + +Before using this client, you'll need: + +1. Telegram API credentials (APP_ID and APP_HASH) from https://my.telegram.org +2. A Telegram account phone number +3. ElizaOS Core runtime + +## Configuration + +The following environment variables are required: + +```env +TELEGRAM_ACCOUNT_PHONE="+1234567890" +TELEGRAM_ACCOUNT_APP_ID=12345 +TELEGRAM_ACCOUNT_APP_HASH="your-app-hash" +TELEGRAM_ACCOUNT_DEVICE_MODEL="Device Model" +TELEGRAM_ACCOUNT_SYSTEM_VERSION="System Version" +``` + +## Usage + +```typescript +import { TelegramAccountClientInterface } from '@elizaos/client-telegram-account'; +import { IAgentRuntime } from '@elizaos/core'; +// Initialize with ElizaOS runtime +const runtime: IAgentRuntime = / your runtime instance /; +// Start the client +const client = await TelegramAccountClientInterface.start(runtime); +``` + + +## Features in Detail + +### Message Handling + +The client automatically handles: +- Incoming messages +- Reply detection and threading +- Group chat mentions +- Direct messages +- Message formatting with Markdown support + +### AI Integration + +- Seamless integration with ElizaOS AI capabilities +- Context-aware responses +- Character personality maintenance +- Template-based response generation + +### Security + +- Secure session management +- Environment variable validation +- Error handling and logging + +## Development + +### Building +```bash +npm run build +npm run dev # Development mode with watch +``` + +### Running Tests +```bash +npm test # Run tests +npm test:watch # Watch mode +npm test:coverage # Gen diff --git a/packages/client-telegram/README.md b/packages/client-telegram/README.md index bf31180688b..e7534d33f29 100644 --- a/packages/client-telegram/README.md +++ b/packages/client-telegram/README.md @@ -9,8 +9,14 @@ This plugin integrates a Telegram client with ElizaOS, allowing characters in El - **Startup Logging**: Logs successful initialization of the Telegram client for better debugging. - **Future-proof Design**: Provides a basic structure for stopping the client (currently unsupported). + ## Configuration Options +```env +# Required configuration +TELEGRAM_BOT_TOKEN=your_telegram_bot_token +``` + Here are the available configuration options for the `character.json` file: | Key | Type | Default | Description | diff --git a/packages/client-twitter/README.md b/packages/client-twitter/README.md new file mode 100644 index 00000000000..d3c42e04ffa --- /dev/null +++ b/packages/client-twitter/README.md @@ -0,0 +1,85 @@ +# @elizaos/client-twitter + +A Twitter/X client library for ElizaOS that enables autonomous social interactions through various specialized modules. + +## Features + +- **Base Operations**: Handles core Twitter functionality like authentication, timeline management, and caching +- **Post Generation**: Autonomous tweet creation and scheduling +- **Search & Engagement**: Configurable search and response capabilities +- **Interaction Management**: Handles mentions, replies, and user engagement +- **Spaces Support**: Optional Twitter Spaces functionality +- **Approval Workflow**: Optional Discord-based tweet approval system + +## Installation +```bash +npm install @elizaos/client-twitter +``` + + +## Configuration + +The client requires several environment variables for configuration. Here are the key settings: + +```env + +TWITTER_USERNAME= +TWITTER_PASSWORD= +TWITTER_EMAIL= + +# optional +TWITTER_2FA_SECRET= + +TWITTER_ENABLED= +TWITTER_DRY_RUN= +MAX_TWEET_LENGTH= +TWITTER_SEARCH_ENABLE= +TWITTER_TARGET_USERS= +POST_INTERVAL_MIN= +POST_INTERVAL_MAX= +TWITTER_RETRY_LIMIT= +TWITTER_POLL_INTERVAL= +TWITTER_SPACES_ENABLE= +ENABLE_TWITTER_POST_GENERATION= +ENABLE_ACTION_PROCESSING= +ACTION_INTERVAL= +POST_IMMEDIATELY= +TWITTER_SPACES_ENABLE= +MAX_ACTIONS_PROCESSING= +ACTION_TIMELINE_TYPE= +TWITTER_APPROVAL_ENABLED= +TWITTER_APPROVAL_DISCORD_BOT_TOKEN= +TWITTER_APPROVAL_DISCORD_CHANNEL_ID= +TWITTER_APPROVAL_CHECK_INTERVAL= +ELEVENLABS_XI_API_KEY= +``` + +## Usage + +### Basic Setup +```typescript +import { TwitterManager } from '@elizaos/client-twitter'; +const manager = new TwitterManager(runtime, twitterConfig); +// Initialize all components +await manager.client.init(); +// Start the posting/interaction loops +await manager.post.start(); +await manager.interaction.start(); +``` + + +## Development + +### Building +```bash +npm run build +npm run dev # Development mode with watch +``` + +### Running Tests +```bash +npm test # Run tests +npm test:watch # Watch mode +npm test:coverage # Gene + + diff --git a/packages/client-twitter/src/base.ts b/packages/client-twitter/src/base.ts index ce5ad336620..5347319f8d2 100644 --- a/packages/client-twitter/src/base.ts +++ b/packages/client-twitter/src/base.ts @@ -92,11 +92,16 @@ export class ClientBase extends EventEmitter { lastCheckedTweetId: bigint | null = null; imageDescriptionService: IImageDescriptionService; temperature = 0.5; + active = true; requestQueue: RequestQueue = new RequestQueue(); profile: TwitterProfile | null; + stop() { + this.active = false; + } + async cacheTweet(tweet: Tweet): Promise { if (!tweet) { console.warn("Tweet is undefined, skipping cache"); @@ -345,6 +350,8 @@ export class ClientBase extends EventEmitter { throw new Error("Failed to load profile"); } + this.active = true; + await this.loadLatestCheckedTweetId(); await this.populateTimeline(); } @@ -617,7 +624,7 @@ export class ClientBase extends EventEmitter { this.runtime.agentId, this.profile.username, this.runtime.character.name, - "twitter" + "twitter", ); // Save the new tweets as memories diff --git a/packages/client-twitter/src/index.ts b/packages/client-twitter/src/index.ts index 52957c1878b..1f7dc47ea50 100644 --- a/packages/client-twitter/src/index.ts +++ b/packages/client-twitter/src/index.ts @@ -46,6 +46,16 @@ class TwitterManager { this.space = new TwitterSpaceClient(this.client, runtime); } } + + stop() { + elizaLogger.log('client-twitter stopping, agentId:', this.client.runtime.agentId); + this.client.stop(); + this.post.stop(); + if(this.search) this.search.stop(); + this.interaction.stop(); + if(this.space) this.space.stop(); + elizaLogger.log('client-twitter stopped, agentId:', this.client.runtime.agentId); + } } export const TwitterClientInterface: Client = { @@ -55,32 +65,33 @@ export const TwitterClientInterface: Client = { elizaLogger.log("Twitter client started"); - const manager = new TwitterManager(runtime, twitterConfig); + this.manager = new TwitterManager(runtime, twitterConfig); // Initialize login/session - await manager.client.init(); + await this.manager.client.init(); // Start the posting loop - await manager.post.start(); + await this.manager.post.start(); // Start the search logic if it exists - if (manager.search) { - await manager.search.start(); + if (this.manager.search) { + await this.manager.search.start(); } // Start interactions (mentions, replies) - await manager.interaction.start(); + await this.manager.interaction.start(); // If Spaces are enabled, start the periodic check - if (manager.space) { - manager.space.startPeriodicSpaceCheck(); + if (this.manager.space) { + this.manager.space.startPeriodicSpaceCheck(); } - return manager; + return this.manager; }, async stop(_runtime: IAgentRuntime) { - elizaLogger.warn("Twitter client does not support stopping yet"); + elizaLogger.warn("Twitter client stop..., agentId:", _runtime.agentId); + this.manager.stop(); }, }; diff --git a/packages/client-twitter/src/interactions.ts b/packages/client-twitter/src/interactions.ts index e704a4cb7e0..d375bc137cf 100644 --- a/packages/client-twitter/src/interactions.ts +++ b/packages/client-twitter/src/interactions.ts @@ -97,6 +97,8 @@ export class TwitterInteractionClient { client: ClientBase; runtime: IAgentRuntime; private isDryRun: boolean; + private interactionInterval: NodeJS.Timeout | null = null; + constructor(client: ClientBase, runtime: IAgentRuntime) { this.client = client; this.runtime = runtime; @@ -105,8 +107,10 @@ export class TwitterInteractionClient { async start() { const handleTwitterInteractionsLoop = () => { + if (!this.client.active) return; + this.handleTwitterInteractions(); - setTimeout( + this.interactionInterval = setTimeout( handleTwitterInteractionsLoop, // Defaults to 2 minutes this.client.twitterConfig.TWITTER_POLL_INTERVAL * 1000 @@ -115,7 +119,16 @@ export class TwitterInteractionClient { handleTwitterInteractionsLoop(); } + async stop() { + if (this.interactionInterval) { + clearTimeout(this.interactionInterval); + this.interactionInterval = null; + } + } + async handleTwitterInteractions() { + if (!this.client.active) return; + elizaLogger.log("Checking Twitter interactions"); const twitterUsername = this.client.profile.username; @@ -315,6 +328,8 @@ export class TwitterInteractionClient { message: Memory; thread: Tweet[]; }) { + if (!this.client.active) return; + // Only skip if tweet is from self AND not from a target user if (tweet.userId === this.client.profile.id && !this.client.twitterConfig.TWITTER_TARGET_USERS.includes(tweet.username)) { @@ -564,6 +579,8 @@ export class TwitterInteractionClient { tweet: Tweet, maxReplies = 10 ): Promise { + if (!this.client.active) return []; + const thread: Tweet[] = []; const visited: Set = new Set(); diff --git a/packages/client-twitter/src/post.ts b/packages/client-twitter/src/post.ts index b194caec765..878da61acd4 100644 --- a/packages/client-twitter/src/post.ts +++ b/packages/client-twitter/src/post.ts @@ -105,6 +105,7 @@ export class TwitterPostClient { private approvalRequired = false; private discordApprovalChannelId: string; private approvalCheckInterval: number; + private pendingTweetCheckInterval: NodeJS.Timeout | null = null; constructor(client: ClientBase, runtime: IAgentRuntime) { this.client = client; @@ -237,6 +238,8 @@ export class TwitterPostClient { } const generateNewTweetLoop = async () => { + if(!this.client.active) return; + const lastPost = await this.runtime.cacheManager.get<{ timestamp: number; }>("twitter/" + this.twitterUsername + "/lastPost"); @@ -261,9 +264,11 @@ export class TwitterPostClient { }; const processActionsLoop = async () => { + if(!this.client.active) return; + const actionInterval = this.client.twitterConfig.ACTION_INTERVAL; // Defaults to 5 minutes - while (!this.stopProcessingActions) { + while (!this.stopProcessingActions && this.client.active) { try { const results = await this.processTweetActions(); if (results) { @@ -311,7 +316,7 @@ export class TwitterPostClient { } private runPendingTweetCheckLoop() { - setInterval(async () => { + this.pendingTweetCheckInterval = setInterval(async () => { await this.handlePendingTweet(); }, this.approvalCheckInterval); } @@ -498,8 +503,9 @@ export class TwitterPostClient { * Generates and posts a new tweet. If isDryRun is true, only logs what would have been posted. */ async generateNewTweet() { + if (!this.client.active) return; elizaLogger.log("Generating new tweet"); - + try { const roomId = stringToUuid( "twitter_generate_room-" + this.client.profile.username @@ -718,8 +724,8 @@ export class TwitterPostClient { * only simulates and logs actions without making API calls. */ private async processTweetActions() { - if (this.isProcessing) { - elizaLogger.log("Already processing tweet actions, skipping"); + if (!this.client.active || this.isProcessing) { + elizaLogger.log("Already processing tweet actions or client inactive, skipping"); return null; } @@ -1134,6 +1140,8 @@ export class TwitterPostClient { tweetState: any, executedActions: string[] ) { + if (!this.client.active) return; + try { // Build conversation thread for context const thread = await buildConversationThread(tweet, this.client); @@ -1258,6 +1266,12 @@ export class TwitterPostClient { async stop() { this.stopProcessingActions = true; + + // Clear the pending tweet check interval if it exists + if (this.pendingTweetCheckInterval) { + clearInterval(this.pendingTweetCheckInterval); + this.pendingTweetCheckInterval = null; + } } private async sendForApproval( @@ -1265,6 +1279,8 @@ export class TwitterPostClient { roomId: UUID, rawTweetContent: string ): Promise { + if (!this.client.active) return null; + try { const embed = { title: "New Tweet Pending Approval", @@ -1332,6 +1348,8 @@ export class TwitterPostClient { private async checkApprovalStatus( discordMessageId: string ): Promise { + if (!this.client.active) return "PENDING"; + try { // Fetch message and its replies from Discord const channel = await this.discordClientForApproval.channels.fetch( @@ -1406,7 +1424,9 @@ export class TwitterPostClient { } private async handlePendingTweet() { + if (!this.client.active) return; elizaLogger.log("Checking Pending Tweets..."); + const pendingTweetsKey = `twitter/${this.client.profile.username}/pendingTweet`; const pendingTweets = (await this.runtime.cacheManager.get( diff --git a/packages/client-twitter/src/search.ts b/packages/client-twitter/src/search.ts index 01ed6e68277..add1fa73cf3 100644 --- a/packages/client-twitter/src/search.ts +++ b/packages/client-twitter/src/search.ts @@ -47,6 +47,7 @@ export class TwitterSearchClient { runtime: IAgentRuntime; twitterUsername: string; private respondedTweets: Set = new Set(); + private searchInterval: NodeJS.Timeout | null = null; constructor(client: ClientBase, runtime: IAgentRuntime) { this.client = client; @@ -58,19 +59,30 @@ export class TwitterSearchClient { this.engageWithSearchTermsLoop(); } + async stop() { + if (this.searchInterval) { + clearTimeout(this.searchInterval); + this.searchInterval = null; + } + } + private engageWithSearchTermsLoop() { + if (!this.client.active) return; + this.engageWithSearchTerms().then(); const randomMinutes = Math.floor(Math.random() * (120 - 60 + 1)) + 60; elizaLogger.log( `Next twitter search scheduled in ${randomMinutes} minutes` ); - setTimeout( + this.searchInterval = setTimeout( () => this.engageWithSearchTermsLoop(), randomMinutes * 60 * 1000 ); } private async engageWithSearchTerms() { + if (!this.client.active) return; + elizaLogger.log("Engaging with search terms"); try { const searchTerm = [...this.runtime.character.topics][ diff --git a/packages/client-twitter/src/spaces.ts b/packages/client-twitter/src/spaces.ts index 7764106c9ee..806a6e72c5a 100644 --- a/packages/client-twitter/src/spaces.ts +++ b/packages/client-twitter/src/spaces.ts @@ -216,6 +216,7 @@ export class TwitterSpaceClient { } private async shouldLaunchSpace(): Promise { + if (!this.client.active) return false; // Random chance const r = Math.random(); if (r > (this.decisionOptions.randomChance ?? 0.3)) { @@ -247,6 +248,12 @@ export class TwitterSpaceClient { } private async generateSpaceConfig(): Promise { + if (!this.client.active) return { + mode: "INTERACTIVE", + title: "Random Tech Chat", + description: "Discussion about Random Tech Chat", + languages: ["en"], + }; if ( !this.decisionOptions.topics || this.decisionOptions.topics.length === 0 @@ -277,6 +284,7 @@ export class TwitterSpaceClient { } public async startSpace(config: SpaceConfig) { + if (!this.client.active) return; elizaLogger.log("[Space] Starting a new Twitter Space..."); try { @@ -397,7 +405,7 @@ export class TwitterSpaceClient { * Periodic management: check durations, remove extras, maybe accept new from queue */ private async manageCurrentSpace() { - if (!this.spaceId || !this.currentSpace) return; + if (!this.client.active || !this.spaceId || !this.currentSpace) return; try { const audioSpace = await this.scraper.getAudioSpaceById( this.spaceId @@ -469,6 +477,7 @@ export class TwitterSpaceClient { * If we have available slots, accept new speakers from the queue */ private async acceptSpeakersFromQueueIfNeeded() { + if (!this.client.active) return; // while queue not empty and activeSpeakers < maxSpeakers, accept next const ms = this.decisionOptions.maxSpeakers ?? 1; while ( @@ -488,7 +497,7 @@ export class TwitterSpaceClient { } private async handleSpeakerRequest(req: SpeakerRequest) { - if (!this.spaceId || !this.currentSpace) return; + if (!this.client.active || !this.spaceId || !this.currentSpace) return; const audioSpace = await this.scraper.getAudioSpaceById(this.spaceId); const janusSpeakers = audioSpace?.participants?.speakers || []; @@ -511,7 +520,7 @@ export class TwitterSpaceClient { } private async acceptSpeaker(req: SpeakerRequest) { - if (!this.currentSpace) return; + if (!this.client.active || !this.currentSpace) return; try { await this.currentSpace.approveSpeaker(req.userId, req.sessionUUID); this.activeSpeakers.push({ @@ -530,7 +539,7 @@ export class TwitterSpaceClient { } private async removeSpeaker(userId: string) { - if (!this.currentSpace) return; + if (!this.client.active || !this.currentSpace) return; try { await this.currentSpace.removeSpeaker(userId); elizaLogger.log(`[Space] Removed speaker userId=${userId}`); @@ -547,7 +556,7 @@ export class TwitterSpaceClient { * Also update activeSpeakers array */ private async kickExtraSpeakers(speakers: any[]) { - if (!this.currentSpace) return; + if (!this.client.active || !this.currentSpace) return; const ms = this.decisionOptions.maxSpeakers ?? 1; // sort by who joined first if needed, or just slice @@ -585,4 +594,23 @@ export class TwitterSpaceClient { this.speakerQueue = []; } } + + /** + * Stop all space activities and clean up + */ + public async stop() { + // Stop periodic check + this.stopPeriodicCheck(); + + await this.stopSpace(); + + // Clear all states + this.activeSpeakers = []; + this.speakerQueue = []; + this.sttTtsPlugin = undefined; + this.currentSpace = undefined; + this.spaceId = undefined; + this.startedAt = undefined; + this.isSpaceRunning = false; + } } diff --git a/packages/client-xmtp/README.md b/packages/client-xmtp/README.md new file mode 100644 index 00000000000..0d4ba7a3615 --- /dev/null +++ b/packages/client-xmtp/README.md @@ -0,0 +1,46 @@ +# @elizaos/client-xmtp + +A XMTP client implementation for ElizaOS that enables messaging capabilities through the XMTP protocol. + +## Features + +- XMTP protocol integration +- Seamless message handling and response generation +- Support for multiple messaging platforms (Converse, Coinbase Wallet, Farcaster Frame) +- Memory management for message history +- Action processing system +- Environment-based configuration + +## Installation + +```bash +npm install @elizaos/client-xmtp +``` + +## Prerequisites +```env +EVM_PRIVATE_KEY=your_private_key_here +``` + +## Usage +``` +typescript +import { XmtpClientInterface } from '@elizaos/client-xmtp'; +// Initialize the client with your runtime +const runtime = // your ElizaOS runtime +await XmtpClientInterface.start(runtime); +``` + +## Development + +### Building +```bash +npm run build +npm run dev # Development mode with watch +``` + +### Running Tests +```bash +npm test # Run tests +npm test:watch # Watch mode +npm test:coverage # Generate coverage report diff --git a/packages/client-xmtp/src/index.ts b/packages/client-xmtp/src/index.ts index 362d247e683..114fdf5c75e 100644 --- a/packages/client-xmtp/src/index.ts +++ b/packages/client-xmtp/src/index.ts @@ -51,7 +51,7 @@ export const XmtpClientInterface: Client = { elizaRuntime = runtime; xmtp = await xmtpClient({ - walletKey: process.env.EVM_PRIVATE_KEY as string, + walletKey: runtime.getSetting("EVM_PRIVATE_KEY") || process.env.EVM_PRIVATE_KEY as string, onMessage, }); diff --git a/packages/core/README.md b/packages/core/README.md new file mode 100644 index 00000000000..4793e0e436b --- /dev/null +++ b/packages/core/README.md @@ -0,0 +1,129 @@ +# Project Analysis: ElizaOS Core Package + +## 1. Project Overview + +This appears to be a core package for ElizaOS, a system that seems to handle AI agent interactions, memory management, and knowledge processing. The project is written in TypeScript and uses modern ES modules. + +## 2. Directory Structure + +```bash +packages/core/ +β”œβ”€β”€ src/ +β”‚ β”œβ”€β”€ database/ +β”‚ β”‚ └── CircuitBreaker.ts +β”‚ β”œβ”€β”€ models.ts +β”‚ β”œβ”€β”€ runtime.ts +β”‚ β”œβ”€β”€ database.ts +β”‚ β”œβ”€β”€ ragknowledge.ts +β”‚ β”œβ”€β”€ memory.ts +β”‚ β”œβ”€β”€ localembeddingManager.ts +β”‚ β”œβ”€β”€ cache.ts +β”‚ β”œβ”€β”€ types.ts +β”‚ β”œβ”€β”€ config.ts +β”‚ └── test_resources/ +β”‚ └── testSetup.ts +β”œβ”€β”€ vitest.config.ts +β”œβ”€β”€ tsup.config.ts +β”œβ”€β”€ nodemon.json +β”œβ”€β”€ package.json +└── README-TESTS.md +``` + + +## 3. Core Components Analysis + Key responsibilities: + - Manages agent state and configuration + - Handles message processing + - Coordinates between different services + - Manages knowledge and memory systems + +## 4.UML Class Diagram +```mermaid +classDiagram + class AgentRuntime { + +UUID agentId + +Character character + +IMemoryManager messageManager + +IRAGKnowledgeManager ragKnowledgeManager + +ModelProviderName modelProvider + +initialize() + +processActions() + +evaluate() + } + + class DatabaseAdapter { + <> + +DB db + +CircuitBreaker circuitBreaker + +init() + +close() + +getMemories() + +createKnowledge() + } + + class MemoryManager { + +IAgentRuntime runtime + +string tableName + +getMemoryById() + +searchMemories() + } + + class RAGKnowledgeManager { + +IAgentRuntime runtime + +string tableName + +string knowledgeRoot + +processFile() + +cleanupDeletedKnowledgeFiles() + } + + AgentRuntime --> MemoryManager + AgentRuntime --> RAGKnowledgeManager + MemoryManager --> DatabaseAdapter + RAGKnowledgeManager --> DatabaseAdapter +``` + +## 5. Configuration System + +The project uses multiple configuration layers: +- Environment variables (via dotenv) +- Runtime configuration +- Model configuration + +Configuration files: + +```1:9:src/config.ts +import dotenv from "dotenv"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load environment variables from root .env file +dotenv.config({ path: path.resolve(__dirname, "../../../.env") }); +``` + +## 6. Execution Flow Diagram + +```mermaid +sequenceDiagram + participant Client + participant AgentRuntime + participant RAGKnowledgeManager + participant MemoryManager + participant DatabaseAdapter + + Client->>AgentRuntime: initialize() + AgentRuntime->>RAGKnowledgeManager: processFile() + RAGKnowledgeManager->>DatabaseAdapter: createKnowledge() + + Client->>AgentRuntime: processActions() + AgentRuntime->>MemoryManager: getMemories() + MemoryManager->>DatabaseAdapter: searchMemories() + + AgentRuntime->>AgentRuntime: evaluate() + AgentRuntime->>DatabaseAdapter: getKnowledge() +``` + +This documentation provides a high-level overview of the system's architecture and key components. The project appears to be a sophisticated AI agent system with robust memory management, knowledge processing, and database abstraction layers. + diff --git a/packages/core/__tests__/database.test.ts b/packages/core/__tests__/database.test.ts index d297d8defdb..b3b66893fcf 100644 --- a/packages/core/__tests__/database.test.ts +++ b/packages/core/__tests__/database.test.ts @@ -9,9 +9,34 @@ import { type Participant, type Relationship, type UUID, + AccountStatus, + type PaginationParams, + type PaginationResult, + RAGKnowledgeItem, } from "../src/types.ts"; class MockDatabaseAdapter extends DatabaseAdapter { + getKnowledge(params: { id?: UUID; agentId: UUID; limit?: number; query?: string; conversationContext?: string; }): Promise { + return Promise.resolve([]); + } + searchKnowledge(params: { agentId: UUID; embedding: Float32Array; match_threshold: number; match_count: number; searchText?: string; }): Promise { + throw new Error("Method not implemented."); + } + createKnowledge(knowledge: RAGKnowledgeItem): Promise { + return Promise.resolve(); + } + removeKnowledge(id: UUID): Promise { + return Promise.resolve(); + } + clearKnowledge(agentId: UUID, shared?: boolean): Promise { + return Promise.resolve(); + } + init(): Promise { + return Promise.resolve(); + } + close(): Promise { + return Promise.resolve(); + } getMemoryById(_id: UUID): Promise { throw new Error("Method not implemented."); } @@ -212,6 +237,10 @@ class MockDatabaseAdapter extends DatabaseAdapter { id: userId, username: "testuser", name: "Test Account", + status: AccountStatus.ACTIVE, + details: { summary: "Test Account" }, + email: "test@example.com", + avatarUrl: "https://example.com/avatar.jpg", } as Account; } @@ -220,6 +249,10 @@ class MockDatabaseAdapter extends DatabaseAdapter { return true; } + async updateAccount(_account: Account): Promise { + return Promise.resolve(); + } + async getMemories(params: { roomId: UUID; count?: number; @@ -265,6 +298,16 @@ class MockDatabaseAdapter extends DatabaseAdapter { objectives: [], } as Goal; } + + async paginate(table: string, params: PaginationParams): Promise { + return Promise.resolve({ + list: [], + total: 0, + page: 1, + pageSize: 10, + totalPages: 1 + }); + } } // Now, let’s fix the test suite. diff --git a/packages/core/__tests__/embedding.test.ts b/packages/core/__tests__/embedding.test.ts index dcca8ad0572..ee5fa358ef1 100644 --- a/packages/core/__tests__/embedding.test.ts +++ b/packages/core/__tests__/embedding.test.ts @@ -5,7 +5,7 @@ import { getEmbeddingType, getEmbeddingZeroVector, } from "../src/embedding.ts"; -import { type IAgentRuntime, ModelProviderName } from "../types.ts"; +import { type IAgentRuntime, ModelProviderName } from "../src/types.ts"; import settings from "../src/settings.ts"; // Mock environment-related settings diff --git a/packages/core/__tests__/runtime.test.ts b/packages/core/__tests__/runtime.test.ts index 5c63277b077..b3813cab3c0 100644 --- a/packages/core/__tests__/runtime.test.ts +++ b/packages/core/__tests__/runtime.test.ts @@ -16,6 +16,7 @@ const mockDatabaseAdapter: IDatabaseAdapter = { close: vi.fn().mockResolvedValue(undefined), getAccountById: vi.fn().mockResolvedValue(null), createAccount: vi.fn().mockResolvedValue(true), + updateAccount: vi.fn().mockResolvedValue(undefined), getMemories: vi.fn().mockResolvedValue([]), getMemoryById: vi.fn().mockResolvedValue(null), getMemoriesByRoomIds: vi.fn().mockResolvedValue([]), @@ -48,6 +49,7 @@ const mockDatabaseAdapter: IDatabaseAdapter = { createRelationship: vi.fn().mockResolvedValue(true), getRelationship: vi.fn().mockResolvedValue(null), getRelationships: vi.fn().mockResolvedValue([]), + paginate: vi.fn().mockResolvedValue({} as any), }; const mockCacheManager = { diff --git a/packages/core/src/database.ts b/packages/core/src/database.ts index 8fa44b03cbc..e2c9db3938f 100644 --- a/packages/core/src/database.ts +++ b/packages/core/src/database.ts @@ -9,6 +9,8 @@ import type { RAGKnowledgeItem, Participant, IDatabaseAdapter, + PaginationParams, + PaginationResult, } from "./types.ts"; import { CircuitBreaker } from "./database/CircuitBreaker"; import { elizaLogger } from "./logger"; @@ -78,6 +80,13 @@ export abstract class DatabaseAdapter implements IDatabaseAdapter { */ abstract createAccount(account: Account): Promise; + /** + * Updates a specific account in the database. + * @param account The account object with updated properties. + * @returns A Promise that resolves when the account has been updated. + */ + abstract updateAccount(account: Account): Promise; + /** * Retrieves memories based on the specified parameters. * @param params An object containing parameters for the memory retrieval. @@ -457,4 +466,12 @@ export abstract class DatabaseAdapter implements IDatabaseAdapter { throw error; } } + + /** + * Paginate a table + * @param table The table to paginate + * @param params The pagination parameters + * @returns A Promise that resolves to the paginated results + */ + abstract paginate(table: string, params: PaginationParams): Promise; } diff --git a/packages/core/src/environment.ts b/packages/core/src/environment.ts index dff7e50e8a1..8494e406662 100644 --- a/packages/core/src/environment.ts +++ b/packages/core/src/environment.ts @@ -152,6 +152,9 @@ export const CharacterSchema = z.object({ }) .optional(), extends: z.array(z.string()).optional(), + conversationLength: z.number().optional(), + enabledActions: z.array(z.string()).optional(), + disabledActions: z.array(z.string()).optional(), }); // Type inference diff --git a/packages/core/src/logger.ts b/packages/core/src/logger.ts index a2e55956866..fd733b781bc 100644 --- a/packages/core/src/logger.ts +++ b/packages/core/src/logger.ts @@ -1,9 +1,9 @@ -import pino, { type LogFn } from "pino"; +import pino, { type LogFn, type Logger } from "pino"; import pretty from "pino-pretty"; - +import { EventEmitter } from "events"; import { parseBooleanFromText } from "./parsing.ts"; - +// Define custom log levels with their numerical values const customLevels: Record = { fatal: 60, error: 50, @@ -31,6 +31,10 @@ const createStream = () => { const defaultLevel = process?.env?.DEFAULT_LOG_LEVEL || "info"; +// create a event emitter for log subscription +const logEmitter = new EventEmitter(); + +// modify options, add streamWrite hook const options = { level: defaultLevel, customLevels, @@ -41,17 +45,33 @@ const options = { ): void { const [arg1, ...rest] = inputArgs; + const formatError = (err: Error) => ({ + message: err.message, + stack: err.stack?.split('\n').map(line => line.trim()), + name: err.name, + ...err + }); + if (typeof arg1 === "object") { - const messageParts = rest.map((arg) => - typeof arg === "string" ? arg : JSON.stringify(arg) - ); - const message = messageParts.join(" "); - method.apply(this, [arg1, message]); + if (arg1 instanceof Error) { + method.apply(this, [{ + error: formatError(arg1) + }]); + } else { + const messageParts = rest.map((arg) => + typeof arg === "string" ? arg : JSON.stringify(arg) + ); + const message = messageParts.join(" "); + method.apply(this, [arg1, message]); + } } else { const context = {}; - const messageParts = [arg1, ...rest].map((arg) => - typeof arg === "string" ? arg : arg - ); + const messageParts = [arg1, ...rest].map((arg) => { + if (arg instanceof Error) { + return formatError(arg); + } + return typeof arg === "string" ? arg : arg; + }); const message = messageParts .filter((part) => typeof part === "string") .join(" "); @@ -64,9 +84,90 @@ const options = { method.apply(this, [context, message]); } }, + // add streamWrite hook support + streamWrite: (originalData: string) => { + try { + // send log data to all subscribers + logEmitter.emit('log', originalData); + } catch (err) { + console.error('Error in streamWrite hook:', err); + } + return originalData; + } }, }; -export const elizaLogger = pino(options, createStream()); +// define ElizaLogger type, inherit all attributes of Pino Logger +type ElizaLogger = { + logger: Logger; + subscribe: (callback: (logData: string) => void) => () => void; + // explicitly declare all log methods + log: LogFn; + progress: LogFn; + success: LogFn; + fatal: LogFn; + error: LogFn; + warn: LogFn; + info: LogFn; + debug: LogFn; + trace: LogFn; + // Add all other Logger properties + level: string | number; + levels: pino.LevelMapping; + levelVal: number; + useLevelLabels: boolean; + bindings: () => pino.Bindings; + child: (bindings: pino.Bindings, options?: pino.ChildLoggerOptions) => Logger; + isLevelEnabled: (level: pino.LevelWithSilentOrString) => boolean; + version: string; + [key: string]: any; +}; + +// create base logger instance +const baseLogger = pino(options, createStream()); + +// export configurable logger, including subscription mechanism +export const elizaLogger: ElizaLogger = { + ...baseLogger, // expand all Pino logger methods and attributes + logger: baseLogger, + log: baseLogger.log.bind(baseLogger), + progress: baseLogger.progress.bind(baseLogger), + success: baseLogger.success.bind(baseLogger), + fatal: baseLogger.fatal.bind(baseLogger), + error: baseLogger.error.bind(baseLogger), + warn: baseLogger.warn.bind(baseLogger), + info: baseLogger.info.bind(baseLogger), + debug: baseLogger.debug.bind(baseLogger), + trace: baseLogger.trace.bind(baseLogger), + level: baseLogger.level, + levels: baseLogger.levels, + levelVal: baseLogger.levelVal, + useLevelLabels: baseLogger.useLevelLabels, + bindings: baseLogger.bindings.bind(baseLogger), + child: baseLogger.child.bind(baseLogger), + isLevelEnabled: baseLogger.isLevelEnabled.bind(baseLogger), + version: baseLogger.version, + /** + * subscribe to log events + * @param callback handle log data callback function + * @returns unsubscribe function + */ + subscribe: (callback: (logData: string) => void): (() => void) => { + const listener = (data: string) => { + try { + callback(data); + } catch (err) { + console.error('Error in log subscriber:', err); + } + }; + + logEmitter.on('log', listener); + + // return unsubscribe function + return () => { + logEmitter.off('log', listener); + }; + } +}; export default elizaLogger; diff --git a/packages/core/src/parsing.ts b/packages/core/src/parsing.ts index f7393875e28..b2be31f0236 100644 --- a/packages/core/src/parsing.ts +++ b/packages/core/src/parsing.ts @@ -6,7 +6,7 @@ export const messageCompletionFooter = `\nResponse format should be formatted in { "user": "{{agentName}}", "text": "", "action": "" } \`\`\` -The β€œaction” field should be one of the options in [Available Actions] and the "text" field should be the response you want to send. +The "action" field should be one of the options in [Available Actions] and the "text" field should be the response you want to send. `; export const shouldRespondFooter = `The available options are [RESPOND], [IGNORE], or [STOP]. Choose the most appropriate option. @@ -219,6 +219,50 @@ export function extractAttributes( return Object.entries(attributes).length > 0 ? attributes : null; } +export function parseJSONFromText( + text: string, + strict = true +): Record | null { + const jsonBlockMatch = text.match(jsonBlockPattern); + + if (jsonBlockMatch) { + const parsingText = cleanJsonResponse(text); + try { + return JSON.parse(parsingText); + } catch (e) { + if(strict) { + throw e; + } + console.error("Error parsing JSON:", e); + console.error("Text is not JSON", text); + return extractAttributes(text); + } + } else { + const objectPattern = /{[\s\S]*?}?/; + const objectMatch = text.match(objectPattern); + + if (objectMatch) { + const parsingText = cleanJsonResponse(text); + try { + return JSON.parse(parsingText); + } catch (e) { + if(strict) { + throw e; + } + console.error("Error parsing JSON:", e); + console.error("Text is not JSON", text); + return extractAttributes(text); + } + } + } + console.error("Text is not JSON", text); + if(strict) { + throw new Error("Text is not JSON"); + } + return null; +} + + /** * Normalizes a JSON-like string by correcting formatting issues: * - Removes extra spaces after '{' and before '}'. @@ -344,3 +388,23 @@ export function truncateToCompleteSentence( const hardTruncated = text.slice(0, maxLength - 3).trim(); return hardTruncated + "..."; } + +/** + * Cleans common escape characters from a string + * @param value The string or number to clean + * @returns The cleaned string + */ +export function cleanEscapedForNumber(value: string | number): string { + if (typeof value === 'number') { + return value.toString(); + } + + return value + .replace(/\\./g, '.') // Handle escaped periods + .replace(/\\\\/g, '\\') // Handle escaped backslashes + .replace(/\\n/g, '\n') // Handle escaped newlines + .replace(/\\r/g, '\r') // Handle escaped carriage returns + .replace(/\\t/g, '\t') // Handle escaped tabs + .replace(/\\"/g, '"') // Handle escaped double quotes + .replace(/\\'/g, "'"); // Handle escaped single quotes +} \ No newline at end of file diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index b4382e8ef54..4732c9d3d09 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -52,6 +52,7 @@ import { type Evaluator, type Memory, type DirectoryItem, + AccountStatus, } from "./types.ts"; import { stringToUuid } from "./uuid.ts"; import { glob } from "glob"; @@ -261,7 +262,7 @@ export class AgentRuntime implements IAgentRuntime { this.agentId = opts.character?.id ?? opts?.agentId ?? - stringToUuid(opts.character?.name ?? uuidv4()); + stringToUuid(opts.character?.username ?? opts.character?.name ?? uuidv4()); this.character = opts.character || defaultCharacter; elizaLogger.info(`${this.character.name}(${this.agentId}) - Initializing AgentRuntime with options:`, { @@ -286,9 +287,23 @@ export class AgentRuntime implements IAgentRuntime { `[AgentRuntime] Process knowledgeRoot: ${this.knowledgeRoot}`, ); - this.#conversationLength = - opts.conversationLength ?? this.#conversationLength; - + this.#conversationLength = (() => { + // Get the value from options, character settings, or default + let length = opts.conversationLength ?? this.character.conversationLength ?? this.#conversationLength; + + // Ensure minimum value of 2 + length = Math.max(2, length); + + // Make sure it's an even number (multiple of 2) + if (length % 2 !== 0) { + length += 1; + } + + return length; + })(); + + elizaLogger.log(`[AgentRuntime] Conversation length: ${this.#conversationLength}`); + if (!opts.databaseAdapter) { throw new Error("No database adapter provided"); } @@ -301,6 +316,7 @@ export class AgentRuntime implements IAgentRuntime { this.agentId, this.character.username || this.character.name, this.character.name, + this.character?.source || '' ).then(() => { // postgres needs the user to exist before you can add a participant this.ensureParticipantExists(this.agentId, this.agentId); @@ -411,6 +427,7 @@ export class AgentRuntime implements IAgentRuntime { ]; this.plugins.forEach((plugin) => { + elizaLogger.log("agent runtime plugin:", plugin.name, plugin.package); plugin.actions?.forEach((action) => { this.registerAction(action); }); @@ -580,7 +597,12 @@ export class AgentRuntime implements IAgentRuntime { "client stop for", this.character.name, ); - c.stop(); + // it doesn't have a strict interface specification, and some don't have STOP methods + if (c && typeof c === "object" && "stop" in c) { + c.stop(); + } else { + elizaLogger.log("client stop skip,", cStr, this.character.name); + } } // we don't need to unregister with directClient // don't need to worry about knowledge @@ -959,6 +981,14 @@ export class AgentRuntime implements IAgentRuntime { * @param action The action to register. */ registerAction(action: Action) { + if (this.character.disabledActions?.includes(action.name)) { + elizaLogger.log(`${this.character.name}(${this.agentId}) - Action ${action.name} is disabled.`); + return; + } + if (Array.isArray(this.character.enabledActions) && this.character.enabledActions?.length > 0 && !this.character.enabledActions.includes(action.name)) { + elizaLogger.log(`${this.character.name}(${this.agentId}) - Action ${action.name} is not enabled.`); + return; + } elizaLogger.success(`${this.character.name}(${this.agentId}) - Registering action: ${action.name}`); this.actions.push(action); } @@ -1002,20 +1032,49 @@ export class AgentRuntime implements IAgentRuntime { elizaLogger.success(`Normalized action: ${normalizedAction}`); + // First, try to find an exact match by name let action = this.actions.find( (a: { name: string }) => a.name .toLowerCase() - .replace("_", "") - .includes(normalizedAction) || - normalizedAction.includes( - a.name.toLowerCase().replace("_", ""), - ), + .replace("_", "") === normalizedAction ); + // If no exact match, look for partial matches + if (!action) { + action = this.actions.find( + (a: { name: string }) => + a.name + .toLowerCase() + .replace("_", "") + .includes(normalizedAction) || + normalizedAction.includes( + a.name.toLowerCase().replace("_", ""), + ), + ); + } + + // If still no match, check similes if (!action) { elizaLogger.info("Attempting to find action in similes."); for (const _action of this.actions) { + // First try exact simile matches + const exactSimileMatch = _action.similes.find( + (simile) => + simile + .toLowerCase() + .replace("_", "") === normalizedAction + ); + + if (exactSimileMatch) { + action = _action; + elizaLogger.success( + `Action found with exact simile match: ${action.name}`, + ); + break; + } + + // Then try partial simile matches const simileAction = _action.similes.find( (simile) => simile @@ -1029,7 +1088,7 @@ export class AgentRuntime implements IAgentRuntime { if (simileAction) { action = _action; elizaLogger.success( - `Action found in similes: ${action.name}`, + `Action found with partial simile match: ${action.name}`, ); break; } @@ -1158,17 +1217,20 @@ export class AgentRuntime implements IAgentRuntime { userId: UUID, userName: string | null, name: string | null, - email?: string | null, source?: string | null, ) { const account = await this.databaseAdapter.getAccountById(userId); if (!account) { + const email = userId !== this.agentId ? userId : this.character.email || userId; await this.databaseAdapter.createAccount({ id: userId, name: name || this.character.name || "Unknown User", username: userName || this.character.username || "Unknown", - email: email || this.character.email || userId, // Temporary - details: this.character || { summary: "" }, + email: email, + details: formatCharacterForSave(this.character) || { summary: "" }, + status: AccountStatus.ACTIVE, + pid: this.agentId === userId ? "" : this.agentId, + source: source || "", }); elizaLogger.success(`User ${userName} created successfully.`); } @@ -1785,3 +1847,15 @@ const formatKnowledge = (knowledge: KnowledgeItem[]) => { return cleanedText; }).join('\n\n'); // Separate distinct pieces with double newlines }; + +export function formatCharacterForSave(character: Character): Character { + const data: any = {...character}; + const plugins: string[] = []; + for (const plugin of data.plugins) { + if (plugin.package && !plugins.includes(plugin.package)) { + plugins.push(plugin.package); + } + } + data.plugins = plugins; + return data; +} \ No newline at end of file diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index 4b40244193f..c984f5d00b5 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -557,6 +557,15 @@ export interface Account { /** Optional avatar URL */ avatarUrl?: string; + + /** Optional status of the account */ + status?: AccountStatus; + + /** Optional parent ID */ + pid?: string; + + /** Optional source */ + source?: string; } /** @@ -628,6 +637,9 @@ export type Plugin = { /** Plugin description */ description: string; + /** Plugin package name */ + package?: string; + /** Optional actions */ actions?: Action[]; @@ -945,6 +957,17 @@ export type Character = { extends?: string[]; twitterSpaces?: TwitterSpaceDecisionOptions; + + /** Optional number of messages to keep in the conversation history. */ + conversationLength?: number; + + /** Optional list of actions to enable. */ + enabledActions?: string[]; + + /** Optional list of actions to disable. */ + disabledActions?: string[]; + + [key: string]: any; }; export interface TwitterSpaceDecisionOptions { @@ -963,6 +986,29 @@ export interface TwitterSpaceDecisionOptions { speakerMaxDurationMs?: number; } +export interface PaginationParams { + page: number; // current page + pageSize: number; // page size + where?: WhereOptions; // query conditions + order?: OrderOptions; // sort order +} + +export interface WhereOptions { + [key: string]: any; +} + +export interface OrderOptions { + [key: string]: 'ASC' | 'DESC'; // sort direction +} + +export interface PaginationResult { + list: any[]; + total: number; + page: number; + pageSize: number; + totalPages: number; +} + /** * Interface for database operations */ @@ -982,6 +1028,9 @@ export interface IDatabaseAdapter { /** Create new account */ createAccount(account: Account): Promise; + /** Update account */ + updateAccount(account: Account): Promise; + /** Get memories matching criteria */ getMemories(params: { roomId: UUID; @@ -1138,6 +1187,7 @@ export interface IDatabaseAdapter { createKnowledge(knowledge: RAGKnowledgeItem): Promise; removeKnowledge(id: UUID): Promise; clearKnowledge(agentId: UUID, shared?: boolean): Promise; + paginate(table: string, params: PaginationParams): Promise; } export interface IDatabaseCacheAdapter { @@ -1155,6 +1205,8 @@ export interface IDatabaseCacheAdapter { deleteCache(params: { agentId: UUID; key: string }): Promise; } +export type TypeDatabaseAdapter = IDatabaseAdapter & IDatabaseCacheAdapter; + export interface IMemoryManager { runtime: IAgentRuntime; tableName: string; @@ -1666,3 +1718,9 @@ export interface ChunkRow { id: string; // Add other properties if needed } + +export enum AccountStatus { + PAUSED = "paused", + ACTIVE = "active", + DISABLED = "disabled", +} diff --git a/packages/plugin-0g/README.md b/packages/plugin-0g/README.md index e0d2a4922ed..29574b4ec31 100644 --- a/packages/plugin-0g/README.md +++ b/packages/plugin-0g/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-0g The plugin requires the following environment variables to be set: -```typescript +```env ZEROG_INDEXER_RPC=<0G indexer RPC endpoint> ZEROG_EVM_RPC=<0G EVM RPC endpoint> ZEROG_PRIVATE_KEY= diff --git a/packages/plugin-0x/README.md b/packages/plugin-0x/README.md index 42cd84804c2..23494ed3f4c 100644 --- a/packages/plugin-0x/README.md +++ b/packages/plugin-0x/README.md @@ -21,7 +21,16 @@ Set the following environment variables: ```env WALLET_PRIVATE_KEY=your_private_key ZERO_EX_API_KEY=your_0x_api_key -{chain}_RPC_URL=your_rpc_endpoint +ETH_RPC_URL=your_rpc_url +OPTIMISM_RPC_URL=your_rpc_url +BSC_RPC_URL=your_rpc_url +POLYGON_RPC_URL=your_rpc_url +BASE_RPC_URL=your_rpc_url +ARBITRUM_RPC_URL=your_rpc_url +AVALANCHE_RPC_URL=your_rpc_url +LINEA_RPC_URL=your_rpc_url +SCROLL_RPC_URL=your_rpc_url +BLAST_RPC_URL=your_rpc_url ``` ## Installation diff --git a/packages/plugin-3d-generation/README.md b/packages/plugin-3d-generation/README.md index 51e50349a74..52d91059ac1 100644 --- a/packages/plugin-3d-generation/README.md +++ b/packages/plugin-3d-generation/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-3d-generation The plugin requires the following environment variable or runtime setting to be set: -```typescript +```env FAL_API_KEY= ``` diff --git a/packages/plugin-3d-generation/src/index.ts b/packages/plugin-3d-generation/src/index.ts index b6b516bee19..f40d5a8a48b 100644 --- a/packages/plugin-3d-generation/src/index.ts +++ b/packages/plugin-3d-generation/src/index.ts @@ -190,10 +190,12 @@ const ThreeDGeneration: Action = { ], } as Action; -export const ThreeDGenerationPlugin: Plugin = { +export const threeDGenerationPlugin: Plugin = { name: "3DGeneration", description: "Generate 3D using Hyper 3D", actions: [ThreeDGeneration], evaluators: [], providers: [], }; + +export default threeDGenerationPlugin; diff --git a/packages/plugin-abstract/README.md b/packages/plugin-abstract/README.md index 865ec94ead2..02fab76949e 100644 --- a/packages/plugin-abstract/README.md +++ b/packages/plugin-abstract/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-abstract The plugin requires the following environment variables to be set: -```typescript +```env ABSTRACT_ADDRESS= ABSTRACT_PRIVATE_KEY= ``` diff --git a/packages/plugin-ankr/README.md b/packages/plugin-ankr/README.md index d4b4fa14b6f..65b0630d410 100644 --- a/packages/plugin-ankr/README.md +++ b/packages/plugin-ankr/README.md @@ -1,11 +1,43 @@ - -## Ankr Plugin Guide -![alt text](assets/ankr.jpg) +# @elizaos/plugin-ankr
-

πŸ”— Blockchain Data Query Interface

+

πŸ”— Blockchain Data Query Plugin for ElizaOS

+

A comprehensive interface for querying blockchain data across multiple networks

+## Overview + +The Ankr Plugin provides a robust interface for querying blockchain data through natural language prompts. It supports multiple chains including Ethereum, BSC, Polygon, Avalanche, Optimism, and Base. + +## Features + +- πŸ” Multi-chain support +- πŸ’° Token and NFT queries +- πŸ“Š Blockchain statistics +- πŸ‘› Wallet analysis +- πŸ”„ Transaction tracking +- πŸ’Ž Price tracking + +## Installation + +## Configuration + +### Environment Variables + +```env +ANKR_ENV=production +ANKR_WALLET=your_wallet_address + +# optional +ANKR_MAX_RETRIES=3 +ANKR_RETRY_DELAY=1000 +ANKR_TIMEOUT=5000 +ANKR_GRANULAR_LOG=true +ANKR_LOG_LEVEL=info +ANKR_RUNTIME_CHECK_MODE=false +ANKR_SPASH=false +``` + ### Available Actions The Ankr plugin provides comprehensive blockchain data querying capabilities through natural language prompts. Below are the supported actions and their usage: diff --git a/packages/plugin-apro/README.MD b/packages/plugin-apro/README.MD index 354f70d8d1a..21f2e1c6a44 100644 --- a/packages/plugin-apro/README.MD +++ b/packages/plugin-apro/README.MD @@ -46,11 +46,13 @@ npm install @elizaos/plugin-apro ## Configuration Configure the plugin by setting environment variables or runtime settings: -- APRO_RPC_URL -- APRO_PROXY_ADDRESS -- APRO_PRIVATE_KEY -- APRO_CONVERTER_ADDRESS -- APRO_AUTO_HASH_DATA +```env +APRO_RPC_URL=your_rpc_url +APRO_PROXY_ADDRESS=your_proxy_address +APRO_PRIVATE_KEY=your_private_key +APRO_CONVERTER_ADDRESS=your_converter_address +APRO_AUTO_HASH_DATA=true +``` ## Usage diff --git a/packages/plugin-aptos/README.md b/packages/plugin-aptos/README.md index 15c6ae3668d..05260b2bfb0 100644 --- a/packages/plugin-aptos/README.md +++ b/packages/plugin-aptos/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-aptos The plugin requires the following environment variables to be set: -```typescript +```env APTOS_PRIVATE_KEY= APTOS_NETWORK=<"mainnet" | "testnet"> ``` diff --git a/packages/plugin-arbitrage/README.md b/packages/plugin-arbitrage/README.md new file mode 100644 index 00000000000..c53b0c0a207 --- /dev/null +++ b/packages/plugin-arbitrage/README.md @@ -0,0 +1,102 @@ +# @elizaos/plugin-arbitrage + +A cryptocurrency arbitrage trading plugin for Eliza OS that monitors and executes profitable trading opportunities across multiple DEXs (Decentralized Exchanges). + +## Features + +- ⚑ Real-time market monitoring via WebSocket +- πŸ’Ή Automated arbitrage opportunity detection +- πŸ”„ Cross-DEX trading execution +- πŸ“Š Price impact and trading fee analysis +- πŸ›‘οΈ Flashbots integration for MEV protection +- βš™οΈ Configurable trading parameters +- πŸ” Support for UniswapV2-compatible DEXs + +## Installation + +```bash +npm install @elizaos/plugin-arbitrage +``` + +## Usage + +1. Configure your environment variables: + +```env +ARBITRAGE_EVM_PRIVATE_KEY=YOUR_PRIVATE_KEY_HERE +FLASHBOTS_RELAY_SIGNING_KEY=YOUR_FLASHBOTS_KEY_HERE +BUNDLE_EXECUTOR_ADDRESS=YOUR_EXECUTOR_ADDRESS_HERE +ARBITRAGE_ETHEREUM_WS_URL=YOUR_ETH_WSS_URL +ARBITRAGE_EVM_PROVIDER_URL=YOUR_ETH_RPC_URL +``` + +2. Import and use the plugin in your Eliza character: + +```json +{ + "name": "Trader", + "plugins": [ + "@elizaos/plugin-arbitrage", + "@elizaos/plugin-evm" + ], + "settings": { + "secrets": { + "EVM_PRIVATE_KEY": "YOUR_PRIVATE_KEY_HERE", + "FLASHBOTS_RELAY_SIGNING_KEY": "YOUR_FLASHBOTS_KEY_HERE", + "BUNDLE_EXECUTOR_ADDRESS": "YOUR_EXECUTOR_ADDRESS_HERE" + }, + "arbitrage": { + "ethereumWsUrl": "YOUR_ETH_WSS_URL", + "rpcUrl": "YOUR_ETH_RPC_URL" + } + } +} +``` + +3. The plugin provides the following actions: +- `EXECUTE_ARBITRAGE`: Scans and executes profitable arbitrage opportunities +- Market monitoring via WebSocket connection +- Automatic price impact and trading fee calculations + +## Configuration + +Key configuration parameters in `config/thresholds.ts`: + +```typescript +{ + minProfitThreshold: "0.0001 ETH", // Minimum profit to execute trade + maxTradeSize: "1 ETH", // Maximum trade size + gasLimit: 500000, // Gas limit for transactions + minerRewardPercentage: 90 // Flashbots miner reward percentage +} +``` + +## Contributing + +1. Fork the repository +2. Create your feature branch (`git checkout -b feature/AmazingFeature`) +3. Commit your changes (`git commit -m 'Add some AmazingFeature'`) +4. Push to the branch (`git push origin feature/AmazingFeature`) +5. Open a Pull Request + +## Testing + +```bash +# Run tests +npm test + +# Run tests with coverage +npm run test:coverage +``` + +## License + +This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details. + +## Acknowledgments + +- Built on Eliza OS platform +- Uses Flashbots for MEV protection +- Supports UniswapV2-compatible DEXs +- Powered by ethers.js for Ethereum interaction + \ No newline at end of file diff --git a/packages/plugin-arthera/README.md b/packages/plugin-arthera/README.md index b634635d469..4499292f98c 100644 --- a/packages/plugin-arthera/README.md +++ b/packages/plugin-arthera/README.md @@ -12,19 +12,9 @@ By default, **Arthera** is enabled. To use it, simply add your private key to th ```env ARTHERA_PRIVATE_KEY=your-private-key-here -``` - -### Custom RPC URLs - -By default, the RPC URL is inferred from the `viem/chains` config. To use a custom RPC URL for a specific chain, add the following to your `.env` file: -```env -ETHEREUM_PROVIDER_=https://your-custom-rpc-url -``` - -**Example usage:** - -```env +# Custom RPC URLs +#ETHEREUM_PROVIDER_=https://your-custom-rpc-url ETHEREUM_PROVIDER_ARTHERA=https://rpc.arthera.net ``` diff --git a/packages/plugin-asterai/README.md b/packages/plugin-asterai/README.md index c84bf32f3a5..6f281e43c43 100644 --- a/packages/plugin-asterai/README.md +++ b/packages/plugin-asterai/README.md @@ -21,9 +21,9 @@ pnpm install @elizaos/plugin-asterai The plugin requires the following environment variables to be set: -```typescript -ASTERAI_AGENT_ID= -ASTERAI_PUBLIC_QUERY_KEY= +```env +ASTERAI_AGENT_ID=your_agent_id +ASTERAI_PUBLIC_QUERY_KEY=your_public_query_key ``` ## Usage diff --git a/packages/plugin-avail/README.md b/packages/plugin-avail/README.md index 8a3f25ba44a..e279a5469b0 100644 --- a/packages/plugin-avail/README.md +++ b/packages/plugin-avail/README.md @@ -2,6 +2,16 @@ This is a plugin for using Eliza to interact with the Avail DA network. Defaults to Turing testnet, but can be customized to use Mainnet by changing the RPC in the `.env` file at `AVAIL_RPC_URL`. +## Configuration + +The plugin requires the following environment variables to be set: + +```env +AVAIL_ADDRESS=your_avail_address +AVAIL_SEED=your_avail_seed +``` + + ## Actions - **transfer**: This action enables the transfer of AVAIL tokens from the agent's wallet (as defined by the keyring generated from `AVAIL_SEED`) to another wallet. To use just mention the transfer of AVAIL tokens to an Avail account. diff --git a/packages/plugin-avalanche/README.md b/packages/plugin-avalanche/README.md index 15cc16d26c7..dcca8e4ea26 100644 --- a/packages/plugin-avalanche/README.md +++ b/packages/plugin-avalanche/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-avalanche The plugin requires the following environment variable: -```typescript +```env AVALANCHE_PRIVATE_KEY= ``` diff --git a/packages/plugin-b2/README.md b/packages/plugin-b2/README.md index 1e5ba623cab..9ec4e0d8fa5 100644 --- a/packages/plugin-b2/README.md +++ b/packages/plugin-b2/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-b2 The plugin requires the following environment variable: -```typescript +```env B2_PRIVATE_KEY= ``` diff --git a/packages/plugin-birdeye/README.md b/packages/plugin-birdeye/README.md index 0ac543844f4..cb0862a5afc 100644 --- a/packages/plugin-birdeye/README.md +++ b/packages/plugin-birdeye/README.md @@ -2,6 +2,15 @@ A powerful plugin for Eliza that integrates with Birdeye's comprehensive DeFi and token analytics API. This plugin provides real-time access to blockchain data, token metrics, and DeFi analytics across multiple networks. +## Configuration + +Set the following environment variables: + +```env +BIRDEYE_WALLET_ADDR=your_wallet_address +``` + + ## Features ### Provider Featurs diff --git a/packages/plugin-coingecko/README.md b/packages/plugin-coingecko/README.md index fcb79d8a558..01d8ebb12f4 100644 --- a/packages/plugin-coingecko/README.md +++ b/packages/plugin-coingecko/README.md @@ -18,10 +18,10 @@ pnpm add @elizaos/plugin-coingecko Set up your environment with the required CoinGecko API key: -| Variable Name | Description | -| ------------------- | ---------------------- | -| `COINGECKO_API_KEY` | Your CoinGecko Pro API key | -| `COINGECKO_PRO_API_KEY` | Your CoinGecko Pro API key | +```env +COINGECKO_API_KEY=your_api_key +COINGECKO_PRO_API_KEY=your_pro_api_key +``` ## Usage diff --git a/packages/plugin-coinmarketcap/README.md b/packages/plugin-coinmarketcap/README.md index 9c042d9e576..4279a0d7093 100644 --- a/packages/plugin-coinmarketcap/README.md +++ b/packages/plugin-coinmarketcap/README.md @@ -22,7 +22,7 @@ npm install @elizaos/plugin-coinmarketcap 2. Set up your environment variables: -```bash +```env COINMARKETCAP_API_KEY=your_api_key ``` diff --git a/packages/plugin-conflux/README.md b/packages/plugin-conflux/README.md index 288ffe3ac1d..d4d1d7fe75a 100644 --- a/packages/plugin-conflux/README.md +++ b/packages/plugin-conflux/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-conflux The plugin requires the following environment variables to be set: -```typescript +```env CONFLUX_CORE_PRIVATE_KEY= CONFLUX_CORE_SPACE_RPC_URL= CONFLUX_MEME_CONTRACT_ADDRESS= diff --git a/packages/plugin-cosmos/README.md b/packages/plugin-cosmos/README.md index 79b00efd505..2f6eb712f4e 100644 --- a/packages/plugin-cosmos/README.md +++ b/packages/plugin-cosmos/README.md @@ -9,7 +9,7 @@ This plugin provides actions and utilities for interacting with Cosmos-compatibl Prepare Eliza according to [README](../../README.md) Add variables required for `@elizaos/plugin-cosmos` : -``` +```env COSMOS_RECOVERY_PHRASE=your recovery phrase words COSMOS_AVAILABLE_CHAINS=chain1,chain2,chain3 ``` diff --git a/packages/plugin-cronos/README.md b/packages/plugin-cronos/README.md index 4d07b0cc5d5..0e70dd813c2 100644 --- a/packages/plugin-cronos/README.md +++ b/packages/plugin-cronos/README.md @@ -234,24 +234,17 @@ CRONOS_PRIVATE_KEY=0x... - Use separate keys for mainnet and testnet - Monitor your wallet for unauthorized transactions -### Setup -1. Create `.env` file: -```env -CRONOS_PRIVATE_KEY=0x... # Mainnet -``` - -2. For testnet development, use `.env.local`: -```env -CRONOS_PRIVATE_KEY=0x... # Testnet only -``` +## Development -3. Add to `.gitignore`: -``` -.env -.env.* +### Building +```bash +npm run build +npm run dev # Development mode with watch ``` -## License - -MIT \ No newline at end of file +### Running Tests +```bash +npm test # Run tests +npm test:watch # Watch mode +npm test:coverage # Gene diff --git a/packages/plugin-cronoszkevm/README.md b/packages/plugin-cronoszkevm/README.md index 064e2addd10..de11b735c0e 100644 --- a/packages/plugin-cronoszkevm/README.md +++ b/packages/plugin-cronoszkevm/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-cronoszkevm The plugin requires the following environment variables to be set: -```typescript +```env CRONOSZKEVM_ADDRESS= CRONOSZKEVM_PRIVATE_KEY= ``` diff --git a/packages/plugin-devin/README.md b/packages/plugin-devin/README.md index 1676286192a..5bfd79258ca 100644 --- a/packages/plugin-devin/README.md +++ b/packages/plugin-devin/README.md @@ -12,7 +12,7 @@ pnpm add @elizaos/plugin-devin The plugin requires a Devin API token for authentication. Set the following environment variable: -```bash +```env DEVIN_API_TOKEN=your_api_token_here ``` diff --git a/packages/plugin-dkg/README.md b/packages/plugin-dkg/README.md index 8f20abd9352..63e513eee7f 100644 --- a/packages/plugin-dkg/README.md +++ b/packages/plugin-dkg/README.md @@ -12,6 +12,19 @@ The DKG plugin extends ElizaOS functionality by allowing agents to interact with pnpm install @elizaos/plugin-dkg ``` +## Configuration + +The plugin requires the following environment variables to be set: + +```env +DKG_ENVIRONMENT= +DKG_HOSTNAME= +DKG_PORT= +DKG_BLOCKCHAIN_NAME= +DKG_PUBLIC_KEY= +DKG_PRIVATE_KEY= +``` + ## Features ### 1. DKG Integration diff --git a/packages/plugin-echochambers/README.md b/packages/plugin-echochambers/README.md index c0b270e5b0b..6a083d383ef 100644 --- a/packages/plugin-echochambers/README.md +++ b/packages/plugin-echochambers/README.md @@ -57,7 +57,7 @@ export const character: Character = { The plugin requires the following environment variables: -```plaintext +```env # Required Settings ECHOCHAMBERS_API_URL="http://127.0.0.1:3333" # Base URL for the EchoChambers API ECHOCHAMBERS_API_KEY="your-api-key" # API key for authentication diff --git a/packages/plugin-email-automation/README.md b/packages/plugin-email-automation/README.md index c8e39f2643d..6c64071f563 100644 --- a/packages/plugin-email-automation/README.md +++ b/packages/plugin-email-automation/README.md @@ -19,7 +19,7 @@ AI-powered email automation plugin for Eliza that intelligently detects email-wo ## Configuration ### AI Email Automation Setup -```typescript +```env # Required RESEND_API_KEY= # Your Resend API key DEFAULT_TO_EMAIL= # Default recipient diff --git a/packages/plugin-email/README.md b/packages/plugin-email/README.md index 7fcf66047af..2d591232c61 100644 --- a/packages/plugin-email/README.md +++ b/packages/plugin-email/README.md @@ -6,22 +6,22 @@ Implementation of an EmailClient for Eliza. The following settings will be declared on your environment variable or inside your agent' settings: +```env ## SMTP Section - -- `EMAIL_OUTGOING_SERVICE`: "smtp" | "gmail" -- `EMAIL_OUTGOING_HOST`: SMTP Hostname or IP to connect to. Required only when "smtp" service is configured. -- `EMAIL_OUTGOING_PORT`: the port to connect to (defaults to 465 for secure connections, otherwise 587). Required only if "smtp" is configured. -- `EMAIL_SECURE`: if true the connection will use TLS, otherwise TLS will be used if server supports STARTTLS extension. Set to true if port 465 is selected. -- `EMAIL_OUTGOING_USER`: Username -- `EMAIL_OUTGOING_PASS`: Password. If "gmail" selected you will need to provision a dedicated password for the agent [1] +EMAIL_OUTGOING_SERVICE="smtp" | "gmail" +EMAIL_OUTGOING_HOST=SMTP Hostname or IP to connect to. Required only when "smtp" service is configured. +EMAIL_OUTGOING_PORT=the port to connect to (defaults to 465 for secure connections, otherwise 587). Required only if "smtp" is configured. +EMAIL_SECURE=if true the connection will use TLS, otherwise TLS will be used if server supports STARTTLS extension. Set to true if port 465 is selected. +EMAIL_OUTGOING_USER=Username +EMAIL_OUTGOING_PASS=Password. If "gmail" selected you will need to provision a dedicated password for the agent [1] ## IMAP Section - -- `EMAIL_INCOMING_SERVICE`: "imap" -- `EMAIL_INCOMING_HOST`: IMAP Hostname or IP to connect to -- `EMAIL_INCOMING_PORT`: the port to connect to (defaults to 993) -- `EMAIL_INCOMING_USER`: Username -- `EMAIL_INCOMING_PASS`: Password +EMAIL_INCOMING_SERVICE="imap" +EMAIL_INCOMING_HOST=IMAP Hostname or IP to connect to +EMAIL_INCOMING_PORT=the port to connect to (defaults to 993) +EMAIL_INCOMING_USER=Username +EMAIL_INCOMING_PASS=Password +``` [1] https://support.google.com/mail/answer/185833?hl=en diff --git a/packages/plugin-ethstorage/README.md b/packages/plugin-ethstorage/README.md index 1ec5f8e6192..461d8ce8bbd 100644 --- a/packages/plugin-ethstorage/README.md +++ b/packages/plugin-ethstorage/README.md @@ -2,6 +2,16 @@ This plugin allows interaction with the EthStorage decentralized storage network using Eliza. By default, it operates on the beta testnet, but you can switch to other testnets by updating the `ETHSTORAGE_RPC_URL` in the `.env` file. The mainnet is not yet available. +## Configuration + +The plugin requires the following environment variables to be set: + +```env +ETHSTORAGE_ADDRESS=#The entry contract address for storing data on the EthStorage network (default is set to the beta testnet but can be updated if needed). +ETHSTORAGE_RPC_URL=#The RPC endpoint for connecting to the desired EthStorage network (default is set to the beta testnet). +ETHSTORAGE_PRIVATE_KEY=#The private key for the agent’s wallet. +``` + ## Actions - **transfer**: This action enables the transfer of QKC tokens from the agent's wallet (specified via `ETHSTORAGE_PRIVATE_KEY`) to another wallet. To use, just mention the transfer of tokens to an EthStorage account. @@ -18,10 +28,6 @@ This plugin allows interaction with the EthStorage decentralized storage network ## Usage & Testing ### Detailed testing steps -- In the .env file, set the following values: - - ETHSTORAGE_ADDRESS: The entry contract address for storing data on the EthStorage network (default is set to the beta testnet but can be updated if needed). - - ETHSTORAGE_RPC_URL: The RPC endpoint for connecting to the desired EthStorage network (default is set to the beta testnet). - - ETHSTORAGE_PRIVATE_KEY: The private key for the agent’s wallet. - **Transfer Tokens** - To test the transfer function, you need tokens in your EthStorage account. On the testnet, you can use the [EthStorage Faucet](https://qkc-l2-faucet.eth.sep.w3link.io/). If you need more tokens, please ping us on [Discord](https://discord.com/invite/xhCwaMp7ps), and we can send them over. - Run the agent and prompt it with: "send QKC to " - e.g. `send 1 QKC to 0x341Cb1a94ef69499F97E93c41707B21326C0Cc87` diff --git a/packages/plugin-flow/README.md b/packages/plugin-flow/README.md index 46a78fb4dc4..70951def0eb 100644 --- a/packages/plugin-flow/README.md +++ b/packages/plugin-flow/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-flow The plugin requires the following environment variables to be set: -```typescript +```env FLOW_ADDRESS= FLOW_PRIVATE_KEY= FLOW_NETWORK= diff --git a/packages/plugin-football/README.md b/packages/plugin-football/README.md index ab2d43ee79b..346ed21f803 100644 --- a/packages/plugin-football/README.md +++ b/packages/plugin-football/README.md @@ -48,9 +48,9 @@ To use this plugin, you need an API key from [Football-Data.org](https://www.foo 1. Register and obtain your API key from Football-Data.org. 2. Add the API key to your `.env` file: - ```env - FOOTBALL_API_KEY=your_api_key_here - ``` +```env +FOOTBALL_API_KEY=your_api_key_here +``` The plugin will use this key to authenticate requests. diff --git a/packages/plugin-form/README.md b/packages/plugin-form/README.md index 23bd1d4026c..94867447bec 100644 --- a/packages/plugin-form/README.md +++ b/packages/plugin-form/README.md @@ -23,7 +23,7 @@ pnpm install @elizaos/plugin-form ## Configuration ### Environment Variables -```bash +```env FORM_PRIVATE_KEY= FORM_TESTNET=true # Optional, defaults to false ``` diff --git a/packages/plugin-fuel/README.md b/packages/plugin-fuel/README.md index 0184f27329c..3b2124432a3 100644 --- a/packages/plugin-fuel/README.md +++ b/packages/plugin-fuel/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-fuel The plugin requires the following environment variables to be set: -```typescript +```env FUEL_PRIVATE_KEY= FUEL_PROVIDER_URL= ``` diff --git a/packages/plugin-gelato/README.md b/packages/plugin-gelato/README.md index 767896a0bff..85ff7970674 100644 --- a/packages/plugin-gelato/README.md +++ b/packages/plugin-gelato/README.md @@ -31,7 +31,7 @@ pnpm install elizaos/plugin-gelato Fill out the `.env` file in the project root with the following variables: -``` +```env GELATO_RELAY_API_KEY= EVM_PROVIDER_URL= EVM_PRIVATE_KEY= diff --git a/packages/plugin-giphy/README.md b/packages/plugin-giphy/README.md index abaa710a60a..7345aac0200 100644 --- a/packages/plugin-giphy/README.md +++ b/packages/plugin-giphy/README.md @@ -16,9 +16,9 @@ pnpm add @elizaos/plugin-giphy Set up your environment with the required Giphy API key: -| Variable Name | Description | -|--------------|-------------| -| `GIPHY_API_KEY` | Giphy API key for authenticating requests | +```env +GIPHY_API_KEY=Giphy API key for authenticating requests +``` You need to obtain an API key by signing up at [Giphy Developers](https://developers.giphy.com/). diff --git a/packages/plugin-gitbook/README.md b/packages/plugin-gitbook/README.md index b301ceb2aa3..84ff521f886 100644 --- a/packages/plugin-gitbook/README.md +++ b/packages/plugin-gitbook/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-gitbook ### Environment Variables -```typescript +```env GITBOOK_SPACE_ID= ``` diff --git a/packages/plugin-gitcoin-passport/README.md b/packages/plugin-gitcoin-passport/README.md index 80de1caedf4..7aad5bfa0aa 100644 --- a/packages/plugin-gitcoin-passport/README.md +++ b/packages/plugin-gitcoin-passport/README.md @@ -17,6 +17,11 @@ Just add it under your character profile in plugins as ## Configuration +```env +PASSPORT_API_KEY=Your API key +PASSPORT_SCORER=Your Scorer ID +``` + Getting Your API Key 1. Log in to the developer portal: Go to developer.passport.xyz and log in to your account by connecting your wallet. diff --git a/packages/plugin-goat/README.md b/packages/plugin-goat/README.md index f3c49a76c77..abed5302781 100644 --- a/packages/plugin-goat/README.md +++ b/packages/plugin-goat/README.md @@ -21,7 +21,7 @@ pnpm install @elizaos/plugin-goat ### Environment Variables -```typescript +```env EVM_PRIVATE_KEY= EVM_PROVIDER_URL= ``` diff --git a/packages/plugin-hyperbolic/readme.md b/packages/plugin-hyperbolic/readme.md index 6a56e03de41..c099acf6bc3 100644 --- a/packages/plugin-hyperbolic/readme.md +++ b/packages/plugin-hyperbolic/readme.md @@ -78,7 +78,7 @@ Terminate the Hyperbolic instance [gpu]puny-clover-basilisk[/gpu] The plugin requires the following environment variables: -```bash +```env HYPERBOLIC_API_KEY=your_api_key_here HYPERBOLIC_ENV=production # or development HYPERBOLIC_GRANULAR_LOG=true # optional, for detailed logging diff --git a/packages/plugin-image-generation/README.MD b/packages/plugin-image-generation/README.MD index f3488fe44f2..4480225fbda 100644 --- a/packages/plugin-image-generation/README.MD +++ b/packages/plugin-image-generation/README.MD @@ -26,11 +26,11 @@ pnpm install plugin-image-generation Ensure the following environment variables are set: -| Variable Name | Description | -| ---------------------- | ----------------------------------- | -| `IMAGE_STORAGE_BUCKET` | Name of the storage bucket. | -| `STORAGE_ACCESS_KEY` | Access key for storage integration. | -| `STORAGE_SECRET_KEY` | Secret key for storage integration. | +```env +IMAGE_STORAGE_BUCKET=your_storage_bucket_name +STORAGE_ACCESS_KEY=your_access_key +STORAGE_SECRET_KEY=your_secret_key +``` ### TypeScript Configuration diff --git a/packages/plugin-initia/README.md b/packages/plugin-initia/README.md index 77677416e5b..0892d72ba80 100644 --- a/packages/plugin-initia/README.md +++ b/packages/plugin-initia/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-initia ## Configuration -```bash +```env INITIA_PRIVATE_KEY=0x1234...abcd INITIA_NODE_URL=https://... INITIA_CHAIN_ID=initiaion-2 diff --git a/packages/plugin-injective/Readme.md b/packages/plugin-injective/Readme.md index d08ae4abf48..41869ea46f3 100644 --- a/packages/plugin-injective/Readme.md +++ b/packages/plugin-injective/Readme.md @@ -2,6 +2,16 @@ A comprehensive plugin for interacting with the Injective chain through ElizaOS. +## Configuration + +```env +INJECTIVE_NETWORK= +INJECTIVE_PRIVATE_KEY= +INJECTIVE_PUBLIC_KEY= +EVM_PUBLIC_KEY= + +``` + ## Project Structure ``` diff --git a/packages/plugin-irys/README.md b/packages/plugin-irys/README.md index c2ef9b41cbe..069f6e96483 100644 --- a/packages/plugin-irys/README.md +++ b/packages/plugin-irys/README.md @@ -24,9 +24,9 @@ pnpm add @elizaos/plugin-irys ## Configuration The plugin requires the following environment variables: - -- `EVM_WALLET_PRIVATE_KEY`: Your EVM wallet private key -- `AGENTS_WALLET_PUBLIC_KEYS`: The public keys of the agents that will be used to retrieve the data (string separated by commas) +```env +EVM_WALLET_PRIVATE_KEY= Your EVM wallet private key +``` For this plugin to work, you need to have an EVM (Base network) wallet with a private key and public address. To prevent any security issues, we recommend using a dedicated wallet for this plugin. diff --git a/packages/plugin-lens-network/README.md b/packages/plugin-lens-network/README.md index 3bf8e2e48e6..50fec4d342f 100644 --- a/packages/plugin-lens-network/README.md +++ b/packages/plugin-lens-network/README.md @@ -14,7 +14,7 @@ pnpm install @elizaos/plugin-lensNetwork ## Configuration The plugin requires the following environment variables to be set: -```typescript +```env LENS_ADDRESS= LENS_PRIVATE_KEY= ``` diff --git a/packages/plugin-letzai/README.md b/packages/plugin-letzai/README.md index 2e86b4228a5..5f931196437 100644 --- a/packages/plugin-letzai/README.md +++ b/packages/plugin-letzai/README.md @@ -32,7 +32,7 @@ pnpm install @elizaos/plugin-letzai ## Configuration ### Environment Variables -```typescript +```env LETZAI_API_LEY= LETZAI_MODELS="@hailee, @examplemodel2" ``` diff --git a/packages/plugin-lit/README.md b/packages/plugin-lit/README.md index c273a11f779..b01a3933eeb 100644 --- a/packages/plugin-lit/README.md +++ b/packages/plugin-lit/README.md @@ -17,6 +17,15 @@ A plugin that integrates Lit Protocol functionality into the elizaOS runtime env npm install @elizaos/plugin-lit ``` +## Configuration + +### Environment Variables + +```env +EVM_PRIVATE_KEY= +``` + + ## Setup There are two ways to register the plugin: diff --git a/packages/plugin-massa/readme.md b/packages/plugin-massa/readme.md index 05bb5d0f16f..c6389f789f3 100644 --- a/packages/plugin-massa/readme.md +++ b/packages/plugin-massa/readme.md @@ -1,9 +1,19 @@ -# Massa Plugin +# @elizaos/plugin-massa ## Overview This plugin aims to be the basis of all interactions with the Massa ecosystem. +## Configuration + +### Environment Variables + +```env +MASSA_PRIVATE_KEY= +MASSA_RPC_URL= +``` + + ## Adding a new action Reuse providers and utilities from the existing actions where possible. Add more utilities if you think they will be useful for other actions. diff --git a/packages/plugin-mind-network/README.md b/packages/plugin-mind-network/README.md index d5364de62b7..b5093e7e2df 100644 --- a/packages/plugin-mind-network/README.md +++ b/packages/plugin-mind-network/README.md @@ -30,7 +30,7 @@ pnpm install @elizaos/plugin-mind-network Before using the plugin, configure the necessary environment variables: -```bash +```env MIND_HOT_WALLET_PRIVATE_KEY= MIND_COLD_WALLET_ADDRESS= ``` diff --git a/packages/plugin-moralis/README.md b/packages/plugin-moralis/README.md index 13b324c5924..e1147700ba3 100644 --- a/packages/plugin-moralis/README.md +++ b/packages/plugin-moralis/README.md @@ -16,9 +16,9 @@ pnpm install @elizaos/plugin-moralis Set up your environment with the required Moralis API key: -| Variable Name | Description | -| ----------------- | -------------------- | -| `MORALIS_API_KEY` | Your Moralis API key | +```env +MORALIS_API_KEY= +``` ## Usage diff --git a/packages/plugin-movement/readme.md b/packages/plugin-movement/readme.md index cecd6fee599..e8fa64b5c94 100644 --- a/packages/plugin-movement/readme.md +++ b/packages/plugin-movement/readme.md @@ -35,7 +35,7 @@ pnpm add @elizaos/plugin-movement Set up your environment variables in the `.env` file: -```bash +```env MOVEMENT_PRIVATE_KEY=your_private_key_here MOVEMENT_NETWORK=bardock ``` diff --git a/packages/plugin-near/src/actions/swap.ts b/packages/plugin-near/src/actions/swap.ts index 5d87a8b6b00..1ed91fc661e 100644 --- a/packages/plugin-near/src/actions/swap.ts +++ b/packages/plugin-near/src/actions/swap.ts @@ -213,7 +213,7 @@ export const executeSwap: Action = { "EXCHANGE_TOKENS_NEAR", ], validate: async (_runtime: IAgentRuntime, message: Memory) => { - elizaLogger.log("Message:", message); + elizaLogger.log("Message:", message?.content?.text); return true; }, description: "Perform a token swap using Ref Finance.", diff --git a/packages/plugin-obsidian/README.md b/packages/plugin-obsidian/README.md index d4b6ac3971a..f9162a795bf 100644 --- a/packages/plugin-obsidian/README.md +++ b/packages/plugin-obsidian/README.md @@ -137,17 +137,10 @@ pnpm add @elizaos/plugin-obsidian The plugin requires the following character secret settings: -```json -{ - "settings": { - "secrets": { - "OBSIDIAN_API_TOKEN": "your-obsidian-api-token", - "OBSIDIAN_API_PORT": "your-obsidian-api-port", // Optional (default: 27123) - "OBSIDIAN_API_URL": "https://your-obsidian-api-url" , // Optional (default: "http://127.0.0.1:27123") - }, - // other settings... - } -} +```env +OBSIDIAN_API_TOKEN=your-obsidian-api-token +OBSIDIAN_API_PORT=your-obsidian-api-port // Optional (default: 27123) +OBSIDIAN_API_URL=https://your-obsidian-api-url // Optional (default: "http://127.0.0.1:27123") ``` ## Usage diff --git a/packages/plugin-open-weather/README.md b/packages/plugin-open-weather/README.md index 7a99efece73..01dfcf185c7 100644 --- a/packages/plugin-open-weather/README.md +++ b/packages/plugin-open-weather/README.md @@ -19,7 +19,7 @@ npm install @elizaos/plugin-open-weather 2. Set up your environment variables: -```bash +```env OPEN_WEATHER_API_KEY=your_api_key ``` diff --git a/packages/plugin-primus/README.md b/packages/plugin-primus/README.md index fa570a11eec..09d484af5f2 100644 --- a/packages/plugin-primus/README.md +++ b/packages/plugin-primus/README.md @@ -164,7 +164,7 @@ pnpm add @elizaos/plugin-primus Add the following environment variables to your .env file: -``` +```env PRIMUS_APP_ID=your_app_id PRIMUS_APP_SECRET=your_app_secret VERIFIABLE_INFERENCE_ENABLED=true diff --git a/packages/plugin-pyth-data/readme.md b/packages/plugin-pyth-data/readme.md index 6c8d2c7c9da..ab9a7508d0d 100644 --- a/packages/plugin-pyth-data/readme.md +++ b/packages/plugin-pyth-data/readme.md @@ -55,7 +55,6 @@ Show me available forex feeds Create a `.env` file in your project root with the following variables: -### Network Configuration ```env # Network Environment (mainnet or testnet) PYTH_NETWORK_ENV=mainnet @@ -73,19 +72,14 @@ PYTH_TESTNET_WSS_URL=wss://hermes.pyth.network/ws PYTH_TESTNET_PYTHNET_URL=https://pythnet.rpcpool.com PYTH_TESTNET_CONTRACT_REGISTRY=https://pyth.network/developers/price-feed-ids#testnet PYTH_TESTNET_PROGRAM_KEY=FsJ3A3u2vn5cTVofAjvy6y5kwABJAqYWpe4975bi2epH -``` -### Connection Settings -```env +# Connection Settings PYTH_MAX_RETRIES=3 PYTH_RETRY_DELAY=1000 PYTH_TIMEOUT=5000 PYTH_GRANULAR_LOG=true PYTH_LOG_LEVEL=info -``` -### Runtime Settings -```env # General runtime configuration RUNTIME_CHECK_MODE=false diff --git a/packages/plugin-quick-intel/README.md b/packages/plugin-quick-intel/README.md index ae0040a6e37..94e4dbc0955 100644 --- a/packages/plugin-quick-intel/README.md +++ b/packages/plugin-quick-intel/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-quickintel ### Environment Variables -```typescript +```env QUICKINTEL_API_KEY= ``` diff --git a/packages/plugin-rabbi-trader/readme.md b/packages/plugin-rabbi-trader/readme.md index 8e9eeadf6a4..802f8e51590 100644 --- a/packages/plugin-rabbi-trader/readme.md +++ b/packages/plugin-rabbi-trader/readme.md @@ -22,15 +22,16 @@ npm install @elizaos/plugin-rabbi-trader ## Prerequisites The following environment variables need to be configured: - -- `WALLET_PRIVATE_KEY`: Your Solana wallet private key -- `WALLET_PUBLIC_KEY`: Your Solana wallet public address -- `SOLANA_RPC_URL`: Solana RPC endpoint (defaults to mainnet) -- `BIRDEYE_API_KEY`: API key for Birdeye data provider -- `TWITTER_ENABLED`: Enable/disable Twitter notifications -- `TWITTER_USERNAME`: Twitter username for notifications -- `DEXSCREENER_WATCHLIST_ID`: DexScreener watchlist identifier -- `COINGECKO_API_KEY`: CoinGecko API key for additional market data +```env +WALLET_PRIVATE_KEY= Your Solana wallet private key +WALLET_PUBLIC_KEY= Your Solana wallet public address +SOLANA_RPC_URL= Solana RPC endpoint (defaults to mainnet) +BIRDEYE_API_KEY= API key for Birdeye data provider +TWITTER_ENABLED= Enable/disable Twitter notifications +TWITTER_USERNAME= Twitter username for notifications +DEXSCREENER_WATCHLIST_ID= DexScreener watchlist identifier +COINGECKO_API_KEY= CoinGecko API key for additional market data +``` ## Usage diff --git a/packages/plugin-router-nitro/README.md b/packages/plugin-router-nitro/README.md index b2b5e8a90a8..e5ce4000e0d 100644 --- a/packages/plugin-router-nitro/README.md +++ b/packages/plugin-router-nitro/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-router-nitro The plugin requires the following environment variables to be set: -```typescript +```env ROUTER_NITRO_EVM_PRIVATE_KEY= ROUTER_NITRO_EVM_ADDRESS= ``` diff --git a/packages/plugin-solana-agent-kit/src/actions/swap.ts b/packages/plugin-solana-agent-kit/src/actions/swap.ts index 1aaf77c8cfa..5a4590acc8a 100644 --- a/packages/plugin-solana-agent-kit/src/actions/swap.ts +++ b/packages/plugin-solana-agent-kit/src/actions/swap.ts @@ -61,7 +61,7 @@ export default { similes: TRADE_ACTION.similes, validate: async (runtime: IAgentRuntime, message: Memory) => { // Check if the necessary parameters are provided in the message - elizaLogger.log("Message:", message); + elizaLogger.log("Message:", message?.content?.text); return true; }, description: TRADE_ACTION.description, diff --git a/packages/plugin-solana-v2/README.md b/packages/plugin-solana-v2/README.md index f356851bdca..de8e7beaec9 100644 --- a/packages/plugin-solana-v2/README.md +++ b/packages/plugin-solana-v2/README.md @@ -44,10 +44,12 @@ The `Utils` class provides shared functionality across the plugin, offering flex #### 1. Set up your environment variables - In the root of the repositorty, copy `.env.example` to `.env` - Fill in the following parameters: - - `SOLANA_PRIVATE_KEY` - - `SOLANA_PUBLIC_KEY` - - `SOLANA_RPC_URL` - - `OPENAI_API_KEY` +```env +SOLANA_PRIVATE_KEY=solana_private_key +SOLANA_PUBLIC_KEY=solana_public_key +SOLANA_RPC_URL=solana_rpc_url +OPENAI_API_KEY=openai_api_key +``` #### 2. RPC requirements Most often, free-tier RPC URLs are not sufficient for this plugin. diff --git a/packages/plugin-solana/README.MD b/packages/plugin-solana/README.MD index c0c3e79d8c8..15f818d70db 100644 --- a/packages/plugin-solana/README.MD +++ b/packages/plugin-solana/README.MD @@ -62,17 +62,16 @@ npm install @elizaos/plugin-solana Configure the plugin by setting the following environment variables: -```typescript -const solanaEnvSchema = { - WALLET_SECRET_SALT: string(optional), - WALLET_SECRET_KEY: string, - WALLET_PUBLIC_KEY: string, - SOL_ADDRESS: string, - SLIPPAGE: string, - SOLANA_RPC_URL: string, - HELIUS_API_KEY: string, - BIRDEYE_API_KEY: string, -}; +```env +WALLET_SECRET_SALT= string(optional), # only used when TEE_MODE is enabled +WALLET_SECRET_KEY= string, # only used when TEE_MODE is disabled +WALLET_PUBLIC_KEY= string, +SOL_ADDRESS= string, +SLIPPAGE= string, +SOLANA_RPC_URL= string, +HELIUS_API_KEY= string, +BIRDEYE_API_KEY= string, +SOLANA_PRIVATE_KEY= string(or WALLET_PRIVATE_KEY), # only used when TEE_MODE is disabled ``` ## Usage diff --git a/packages/plugin-solana/src/actions/swap.ts b/packages/plugin-solana/src/actions/swap.ts index aa0123aba9f..4dfc73b7dd6 100644 --- a/packages/plugin-solana/src/actions/swap.ts +++ b/packages/plugin-solana/src/actions/swap.ts @@ -175,7 +175,7 @@ export const executeSwap: Action = { similes: ["SWAP_TOKENS", "TOKEN_SWAP", "TRADE_TOKENS", "EXCHANGE_TOKENS"], validate: async (runtime: IAgentRuntime, message: Memory) => { // Check if the necessary parameters are provided in the message - elizaLogger.log("Message:", message); + elizaLogger.log("Message:", message?.content?.text); return true; }, description: "Perform a token swap.", diff --git a/packages/plugin-solana/src/actions/swapDao.ts b/packages/plugin-solana/src/actions/swapDao.ts index a2ce02fabbe..b18ba4b1862 100644 --- a/packages/plugin-solana/src/actions/swapDao.ts +++ b/packages/plugin-solana/src/actions/swapDao.ts @@ -54,7 +54,7 @@ export const executeSwapForDAO: Action = { name: "EXECUTE_SWAP_DAO", similes: ["SWAP_TOKENS_DAO", "TOKEN_SWAP_DAO"], validate: async (runtime: IAgentRuntime, message: Memory) => { - elizaLogger.log("Message:", message); + elizaLogger.log("Message:", message?.content?.text); return true; }, description: "Perform a DAO token swap using execute_invoke.", diff --git a/packages/plugin-solana/src/actions/transfer.ts b/packages/plugin-solana/src/actions/transfer.ts index 2da6952b756..1b08e76d69e 100644 --- a/packages/plugin-solana/src/actions/transfer.ts +++ b/packages/plugin-solana/src/actions/transfer.ts @@ -68,7 +68,7 @@ export default { similes: ["TRANSFER_TOKEN", "TRANSFER_TOKENS", "SEND_TOKENS", "PAY_TOKEN", "PAY_TOKENS", "PAY"], validate: async (runtime: IAgentRuntime, message: Memory) => { // Always return true for token transfers, letting the handler deal with specifics - elizaLogger.log("Validating token transfer from user:", message.userId); + elizaLogger.log("Validating token transfer from user:", message.userId, message.content); return true; }, description: "Transfer SPL tokens from agent's wallet to another address", diff --git a/packages/plugin-solana/src/actions/transfer_sol.ts b/packages/plugin-solana/src/actions/transfer_sol.ts index 1021e80424f..60543aaaa39 100644 --- a/packages/plugin-solana/src/actions/transfer_sol.ts +++ b/packages/plugin-solana/src/actions/transfer_sol.ts @@ -18,7 +18,7 @@ import { } from "@elizaos/core"; import { composeContext } from "@elizaos/core"; import { getWalletKey } from "../keypairUtils"; -import { generateObjectDeprecated } from "@elizaos/core"; +import { generateObjectDeprecated, cleanEscapedForNumber } from "@elizaos/core"; interface SolTransferContent extends Content { recipient: string; @@ -28,6 +28,13 @@ interface SolTransferContent extends Content { function isSolTransferContent( content: any ): content is SolTransferContent { + if (typeof content.amount === "string") { + content.amount = cleanEscapedForNumber(content.amount); + const parsedAmount = Number(content.amount); + if (!Number.isNaN(parsedAmount)) { + content.amount = parsedAmount; + } + } return ( typeof content.recipient === "string" && typeof content.amount === "number" @@ -48,7 +55,7 @@ Example response: Extract the following information about the requested SOL transfer: - Recipient wallet address -- Amount of SOL to transfer +- Amount of SOL to transfer (as a number) `; export default { @@ -56,7 +63,7 @@ export default { similes: ["TRANSFER_SOL", "PAY_SOL", "TRANSACT_SOL"], validate: async (runtime: IAgentRuntime, message: Memory) => { // Always return true for SOL transfers, letting the handler deal with specifics - elizaLogger.log("Validating SOL transfer from user:", message.userId); + elizaLogger.log("Validating SOL transfer from user:", message.userId, message.content); return true; }, description: "Transfer native SOL from agent's wallet to specified address", @@ -79,13 +86,15 @@ export default { state, template: solTransferTemplate, }); - + elizaLogger.debug("Transfer request context:", transferContext); const content = await generateObjectDeprecated({ runtime, context: transferContext, modelClass: ModelClass.LARGE, }); + // elizaLogger.debug("Received content:", content); + if (!isSolTransferContent(content)) { if (callback) { callback({ diff --git a/packages/plugin-solana/src/evaluators/trust.ts b/packages/plugin-solana/src/evaluators/trust.ts index 29d219ea403..7d4ea446791 100644 --- a/packages/plugin-solana/src/evaluators/trust.ts +++ b/packages/plugin-solana/src/evaluators/trust.ts @@ -239,14 +239,13 @@ async function handler(runtime: IAgentRuntime, message: Memory) { // - from here we just need to make sure code is right // buy, dont buy, sell, dont sell - - const buyAmounts = await tokenProvider.calculateBuyAmounts(); - - let buyAmount = buyAmounts[rec.conviction.toLowerCase().trim()]; - if (!buyAmount) { - // handle annoying cases - // for now just put in 10 sol - buyAmount = 10; + let buyAmount = 0; + try { + const buyAmounts = await tokenProvider.calculateBuyAmounts(); + buyAmount = buyAmounts[rec.conviction.toLowerCase().trim()]; + } catch (error) { + elizaLogger.warn("Error calculating buy amounts", error); + continue; } // TODO: is this is a buy, sell, dont buy, or dont sell? @@ -267,7 +266,7 @@ async function handler(runtime: IAgentRuntime, message: Memory) { rec.contractAddress, userId, { - buy_amount: rec.buyAmount, + buy_amount: buyAmount, is_simulation: true, } ); diff --git a/packages/plugin-solana/src/keypairUtils.ts b/packages/plugin-solana/src/keypairUtils.ts index 80e22684845..d8a4cd2dd7b 100644 --- a/packages/plugin-solana/src/keypairUtils.ts +++ b/packages/plugin-solana/src/keypairUtils.ts @@ -19,7 +19,7 @@ export async function getWalletKey( requirePrivateKey = true ): Promise { const teeMode = runtime.getSetting("TEE_MODE") || TEEMode.OFF; - + elizaLogger.log("getWalletKey TEE mode:", teeMode); if (teeMode !== TEEMode.OFF) { const walletSecretSalt = runtime.getSetting("WALLET_SECRET_SALT"); if (!walletSecretSalt) { @@ -49,7 +49,7 @@ export async function getWalletKey( if (!privateKeyString) { throw new Error("Private key not found in settings"); } - + try { // First try base58 const secretKey = bs58.decode(privateKeyString); diff --git a/packages/plugin-solana/src/providers/token.ts b/packages/plugin-solana/src/providers/token.ts index d98533ce50f..7fcd23b8329 100644 --- a/packages/plugin-solana/src/providers/token.ts +++ b/packages/plugin-solana/src/providers/token.ts @@ -100,6 +100,9 @@ export class TokenProvider { url: string, options: RequestInit = {} ): Promise { + if(!settings.BIRDEYE_API_KEY) { + throw new Error("BIRDEYE_API_KEY is not set"); + } let lastError: Error; for (let i = 0; i < PROVIDER_CONFIG.MAX_RETRIES; i++) { @@ -291,7 +294,7 @@ export class TokenProvider { this.setCachedData(cacheKey, prices); return prices; } catch (error) { - elizaLogger.error("Error fetching prices:", error); + elizaLogger.error("Error fetching prices:", error.message); throw error; } } diff --git a/packages/plugin-solana/src/providers/wallet.ts b/packages/plugin-solana/src/providers/wallet.ts index cf2290004dc..fc045c6d261 100644 --- a/packages/plugin-solana/src/providers/wallet.ts +++ b/packages/plugin-solana/src/providers/wallet.ts @@ -72,6 +72,9 @@ export class WalletProvider { url: string, options: RequestInit = {} ): Promise { + if(!runtime.getSetting("BIRDEYE_API_KEY")) { + throw new Error("BIRDEYE_API_KEY is not set"); + } let lastError: Error; for (let i = 0; i < PROVIDER_CONFIG.MAX_RETRIES; i++) { @@ -347,7 +350,7 @@ export class WalletProvider { this.cache.set(cacheKey, prices); return prices; } catch (error) { - elizaLogger.error("Error fetching prices:", error); + elizaLogger.error("Error fetching prices:", error.message); throw error; } } diff --git a/packages/plugin-squid-router/README.md b/packages/plugin-squid-router/README.md index 95150f6d649..a34689c331e 100644 --- a/packages/plugin-squid-router/README.md +++ b/packages/plugin-squid-router/README.md @@ -7,7 +7,7 @@ For supported chains and tokens, please refer to the [Squid Router documentation ## Configuration The plugin requires the following configuration: -``` +```env # Squid Router SQUID_SDK_URL=https://apiplus.squidrouter.com # Default: https://apiplus.squidrouter.com SQUID_INTEGRATOR_ID= # get integrator id through https://docs.squidrouter.com/ diff --git a/packages/plugin-stargaze/README.md b/packages/plugin-stargaze/README.md index 878e546f520..a3a84855635 100644 --- a/packages/plugin-stargaze/README.md +++ b/packages/plugin-stargaze/README.md @@ -16,9 +16,9 @@ pnpm add @elizaos/plugin-stargaze Set up your environment with the required Stargaze API endpoint, currently Stargaze offers https://graphql.mainnet.stargaze-apis.com/graphql publicly. -| Variable Name | Description | -| ------------- | ----------- | -| `STARGAZE_ENDPOINT` | Stargaze GraphQL API endpoint | +```env +STARGAZE_ENDPOINT= Stargaze GraphQL API endpoint +``` ## Usage diff --git a/packages/plugin-starknet/README.md b/packages/plugin-starknet/README.md index bc344c9afd8..d254682d525 100644 --- a/packages/plugin-starknet/README.md +++ b/packages/plugin-starknet/README.md @@ -33,7 +33,7 @@ The Starknet plugin serves as a foundational component of Eliza OS, bridging Sta The plugin requires the following environment variables: -```typescript +```env STARKNET_ADDRESS = your_starknet_address; STARKNET_PRIVATE_KEY = your_private_key; STARKNET_RPC_URL = your_rpc_url; diff --git a/packages/plugin-story/README.md b/packages/plugin-story/README.md new file mode 100644 index 00000000000..c2d7bbf7ca6 --- /dev/null +++ b/packages/plugin-story/README.md @@ -0,0 +1,153 @@ +# @elizaos/plugin-story + +A Story Protocol integration plugin for ElizaOS, providing functionality to interact with Story Protocol's IP management and licensing features. + +## Features + +- IP Asset Registration +- License Management +- IP Details Retrieval +- License Terms Management +- IPFS Integration + +## Installation + +```bash +npm install @elizaos/plugin-story +``` + +## Configuration + +The plugin requires the following environment variables: +```env +STORY_API_BASE_URL= Story Protocol API base URL +PINATA_JWT= Pinata JWT for IPFS integration +STORY_API_KEY= Story Protocol API key +STORY_PRIVATE_KEY= Story Protocol private key +``` + +## Usage + +### Initialize the Plugin + +```typescript +import { storyPlugin } from '@elizaos/plugin-story'; +// Add to your ElizaOS configuration +const config = { + plugins: [storyPlugin], +// ... other config +}; +``` + +### Available Actions + +#### 1. Register IP Asset + +Register a new IP asset with metadata on Story Protocol. + +```typescript +const response = await registerIP({ +title: "My IP Asset", +description: "Description of my IP asset", +ipType: "character" // Optional +}); +``` + +#### 2. License IP Asset + +Create a license for an existing IP asset. + +```typescript +const response = await licenseIP({ +licensorIpId: "0x...", // IP Asset address +licenseTermsId: "1", // License terms ID +amount: 1 // Optional: Number of licenses to mint +}); +``` + +#### 3. Get IP Details + +Retrieve details about an IP asset. + +```typescript +const details = await getIPDetails({ +ipId: "0x..." // IP Asset address +}); +``` + +#### 4. Get Available Licenses + +Fetch available licenses for an IP asset. + +```typescript +const licenses = await getAvailableLicenses({ +ipId: "0x..." // IP Asset address +}); +``` + +#### 5. Attach License Terms + +Attach license terms to an IP asset. + +```typescript +const response = await attachTerms({ +ipId: "0x...", // IP Asset address +mintingFee: 1.0, // Optional: Fee to mint license +commercialUse: true, // Optional: Allow commercial use +commercialRevShare: 10 // Optional: Revenue share percentage +}); +``` + +## Development + +### Project Structure + +- `/src` + - `/actions` - Action implementations + - `/functions` - Utility functions + - `/lib` - Core library code + - `/providers` - Service providers + - `/templates` - Template definitions + - `/types` - TypeScript type definitions + +### Build + +```bash +npm run build +``` + +### Test + +```bash +npm run test +``` + +### Lint + +```bash +npm run lint +npm run lint:fix +``` + +### Format + +```bash +npm run format +npm run format:fix +``` + +## Dependencies + +- `@elizaos/core` - ElizaOS core framework +- `@pinata/sdk` - Pinata IPFS integration +- `@story-protocol/core-sdk` - Story Protocol SDK +- `viem` - Ethereum interactions +- `whatwg-url` - URL parsing + +## License + +This project is licensed under the terms specified in the package.json file. + +## Contributing + +Contributions are welcome! Please ensure you follow the project's code style and include appropriate tests with any pull requests. \ No newline at end of file diff --git a/packages/plugin-suno/README.md b/packages/plugin-suno/README.md index f78e3c8aaf0..62ff10a59c9 100644 --- a/packages/plugin-suno/README.md +++ b/packages/plugin-suno/README.md @@ -27,6 +27,10 @@ QUICK START 2. Configure the Suno provider with your API credentials: +```env +SUNO_API_KEY=your-suno-api-key +``` + import { sunoProvider } from '@elizaos/plugin-suno'; sunoProvider.configure({ diff --git a/packages/plugin-tee-marlin/README.md b/packages/plugin-tee-marlin/README.md index b26e3f9a8d2..675314a79d4 100644 --- a/packages/plugin-tee-marlin/README.md +++ b/packages/plugin-tee-marlin/README.md @@ -18,7 +18,7 @@ The `REMOTE_ATTESTATION` action fetches a remote attestation from an attestation #### Configuration The agent fetches the remote attestation from an attestation server whose URL can be configured in the `.env` file: -``` +```env # Optional, default is http://127.0.0.1:1350 TEE_MARLIN_ATTESTATION_ENDPOINT="http://127.0.0.1:1350" ``` diff --git a/packages/plugin-tee-verifiable-log/README.md b/packages/plugin-tee-verifiable-log/README.md index a94e6be1db9..fddadb9140f 100644 --- a/packages/plugin-tee-verifiable-log/README.md +++ b/packages/plugin-tee-verifiable-log/README.md @@ -16,7 +16,7 @@ docker run --rm -p 8090:8090 phalanetwork/tappd-simulator:latest ``` When using the provider through the runtime environment, ensure the following settings are configured: -```shell +```env # Optional, for simulator purposes if testing on mac or windows. Leave empty for Linux x86 machines. TEE_MODE="LOCAL" # LOCAL | DOCKER | PRODUCTION WALLET_SECRET_SALT= "" # ONLY define if you want to use TEE Plugin, otherwise it will throw errors diff --git a/packages/plugin-ton/README.md b/packages/plugin-ton/README.md index a85809193c8..495ec4b50f3 100644 --- a/packages/plugin-ton/README.md +++ b/packages/plugin-ton/README.md @@ -46,6 +46,10 @@ The plugin requires the following environment variables: TON_PRIVATE_KEY=your_mnemonic_phrase # Required - wallet mnemonic words TON_RPC_URL=your_rpc_endpoint # Optional - defaults to mainnet RPC TON_RPC_API_KEY= + +# Optional, for NFT operations +TON_NFT_IMAGES_FOLDER= +TON_NFT_METADATA_FOLDER= ``` ## Usage diff --git a/packages/plugin-trikon/readme.md b/packages/plugin-trikon/readme.md index 750e762471f..a7a8ab0399e 100644 --- a/packages/plugin-trikon/readme.md +++ b/packages/plugin-trikon/readme.md @@ -12,9 +12,10 @@ pnpm add @elizaos/plugin-trikon ## Configuration The plugin requires the following environment variables: - -- `TRIKON_WALLET_ADDRESS`: Your Trikon wallet address (must be a valid 64-character hex string starting with '0x') -- `TRIKON_INITIAL_BALANCE`: (Optional) The initial balance for the wallet. Defaults to "0" if not provided. +```env +TRIKON_WALLET_ADDRESS= Your Trikon wallet address (must be a valid 64-character hex string starting with '0x') +TRIKON_INITIAL_BALANCE= (Optional) The initial balance for the wallet. Defaults to "0" if not provided. +``` ## Usage diff --git a/packages/plugin-tts/README.md b/packages/plugin-tts/README.md index 1dc1bdb951b..1b510317721 100644 --- a/packages/plugin-tts/README.md +++ b/packages/plugin-tts/README.md @@ -16,7 +16,7 @@ pnpm install @elizaos/plugin-tts The plugin requires the following environment variable or runtime setting to be set: -```typescript +```env FAL_API_KEY= ``` diff --git a/packages/plugin-udio/README.md b/packages/plugin-udio/README.md index af749443f44..49fb134659b 100644 --- a/packages/plugin-udio/README.md +++ b/packages/plugin-udio/README.md @@ -24,8 +24,9 @@ QUICK START eliza.registerPlugin(udioPlugin); 2. Configure your Udio authentication token in your environment: - - UDIO_AUTH_TOKEN=your-udio-auth-token +```env +UDIO_AUTH_TOKEN=your-udio-auth-token +``` FEATURES diff --git a/packages/plugin-zerion/README.md b/packages/plugin-zerion/README.md index 4d82790f7e3..a2fab1427cb 100644 --- a/packages/plugin-zerion/README.md +++ b/packages/plugin-zerion/README.md @@ -23,7 +23,7 @@ npm install @elizaos/plugin-zerion 2. Set up your environment variables: -```bash +```env ZERION_API_KEY=your_api_key ``` diff --git a/packages/plugin-zilliqa/README.md b/packages/plugin-zilliqa/README.md index cadde319a1c..0b8504cf215 100644 --- a/packages/plugin-zilliqa/README.md +++ b/packages/plugin-zilliqa/README.md @@ -21,7 +21,7 @@ pnpm install @elizaos/plugin-zilliqa ### Environment Variables -```typescript +```env EVM_PRIVATE_KEY= EVM_PROVIDER_URL= ENABLE_ZILLIQA=1 diff --git a/scripts/restart-agent.sh b/scripts/restart-agent.sh new file mode 100755 index 00000000000..55a63ee67a8 --- /dev/null +++ b/scripts/restart-agent.sh @@ -0,0 +1,77 @@ +#!/bin/bash + +# switch to project root directory +cd "$(dirname "$0")/.." + +# Default action is restart +ACTION=${1:-restart} + +# load service port configurations from .env file +if [ -f .env ]; then + echo "Loading service port configurations from .env file..." + export $(cat .env | grep "SERVER_PORT" | grep -v '^#' | xargs) +else + echo "Warning: .env file not found!" +fi + +# get project name prefix +namePrefix=$(basename $(pwd)) +if [ ! -z "$APP_PREFIX" ]; then + namePrefix=$APP_PREFIX +fi + +cleanup_port() { + # if get port failed, return directly + if [ -z "$SERVER_PORT" ]; then + echo "Skipping port cleanup for $service due to port not found" + return + fi + + echo "Checking port $SERVER_PORT" + + # find process using this port + # pid can only get the first line of losf value + local pid=$(lsof -ti:$SERVER_PORT | head -n 1) + if [ ! -z "$pid" ]; then + echo "Found process $pid using port $SERVER_PORT, killing it..." + kill -9 $pid + sleep 1 + fi +} + +stop_agent() { + echo "Stopping agent..." + # Check if the PM2 process exists + if pm2 list | grep -q "${namePrefix}-agent"; then + pm2 stop "${namePrefix}-agent" | grep "${namePrefix}-agent" + sleep 2 + fi + cleanup_port +} + +start_agent() { + echo "Starting agent..." + # If the process doesn't exist, start with ecosystem.config.js + if pm2 list | grep -q "${namePrefix}-agent"; then + pm2 start "${namePrefix}-agent" + else + pm2 start ecosystem.config.js + fi +} + +case "$ACTION" in + start) + start_agent + ;; + stop) + stop_agent + ;; + restart) + stop_agent + start_agent + ;; + *) + echo "Usage: $0 {start|stop|restart}" + exit 1 + ;; +esac