diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 23aca531b45d7..605fa56da1819 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -10,8 +10,8 @@ }, "privileged": true, "runArgs": [ - "-p=3000:3000", // Add port for server api - "-p=5173:5173", // Add port for client + "-p 3000:3000", // Add port for server api + "-p 5173:5173", // Add port for client //"--volume=/usr/lib/wsl:/usr/lib/wsl", // uncomment for WSL //"--volume=/mnt/wslg:/mnt/wslg", // uncomment for WSL "--gpus=all", // ! uncomment for vGPU @@ -35,4 +35,4 @@ } }, "features": {} -} \ No newline at end of file +} diff --git a/.env.example b/.env.example index 200d21bd52cd4..bb47fdacc4493 100644 --- a/.env.example +++ b/.env.example @@ -4,10 +4,12 @@ # Cache Configs CACHE_STORE=database # Defaults to database. Other available cache store: redis and filesystem +CACHE_DIR=./data/cache # Directory to store the cache files if using filesystem cache REDIS_URL= # Redis URL - could be a local redis instance or cloud hosted redis. Also support rediss:// URLs PGLITE_DATA_DIR= #../pgLite/ if selecting a directory --- or memory:// if selecting in memory -# Eliza Port Config +# Eliza Server & Port Config +SERVER_URL=http://localhost SERVER_PORT=3000 # Supabase Configuration @@ -350,6 +352,22 @@ SMALL_LMSTUDIO_MODEL= # Default: hermes-3-llama-3.1-8b MEDIUM_LMSTUDIO_MODEL= # Default: hermes-3-llama-3.1-8b LARGE_LMSTUDIO_MODEL= # Default: hermes-3-llama-3.1-8b +# Secret AI Configuration +SECRET_AI_API_KEY= # Secret AI API Key +SECRET_AI_URL= # Default: https://ai1.scrtlabs.com:21434 +SMALL_SECRET_AI_MODEL= # Default: deepseek-r1:70b +MEDIUM_SECRET_AI_MODEL= # Default: deepseek-r1:70b +LARGE_SECRET_AI_MODEL= # Default: deepseek-r1:70b + +# NEAR AI (https://near.ai) Configuration +NEARAI_API_URL= # Default: https://api.near.ai/v1 +NEARAI_API_KEY= # NEAR AI API Key (optional). If you have run `nearai login` with NEAR AI CLI, the API key will be parsed from `~/.nearai/config.json`: https://docs.near.ai/agents/quickstart/ +NEARAI_MODEL= +SMALL_NEARAI_MODEL= # Default: fireworks::accounts/fireworks/models/llama-v3p2-3b-instruct +MEDIUM_NEARAI_MODEL= # Default: fireworks::accounts/fireworks/models/llama-v3p1-70b-instruct +LARGE_NEARAI_MODEL= # Default: fireworks::accounts/fireworks/models/llama-v3p1-405b-instruct +IMAGE_NEARAI_MODEL= # Default: fireworks::accounts/fireworks/models/playground-v2-5-1024px-aesthetic + # Remaining Provider Configurations GOOGLE_GENERATIVE_AI_API_KEY= # Gemini API key ALI_BAILIAN_API_KEY= # Ali Bailian API Key @@ -374,7 +392,7 @@ COINGECKO_PRO_API_KEY= MORALIS_API_KEY= # EVM -EVM_PRIVATE_KEY= +EVM_PRIVATE_KEY= # Add the "0x" prefix infront of your private key string EVM_PROVIDER_URL= # Zilliqa @@ -421,7 +439,7 @@ ABSTRACT_RPC_URL=https://api.testnet.abs.xyz # Starknet Configuration STARKNET_ADDRESS= STARKNET_PRIVATE_KEY= -STARKNET_RPC_URL= +STARKNET_RPC_URL=https://rpc.starknet-testnet.lava.build # Lens Network Configuration LENS_ADDRESS= @@ -553,7 +571,7 @@ NEAR_WALLET_SECRET_KEY= # NEAR Wallet Secret Key NEAR_WALLET_PUBLIC_KEY= # NEAR Wallet Public Key NEAR_ADDRESS= NEAR_SLIPPAGE=1 -NEAR_RPC_URL=https://rpc.testnet.near.org +NEAR_RPC_URL=https://near-testnet.lava.build NEAR_NETWORK=testnet # or mainnet # ZKsync Era Configuration @@ -968,3 +986,16 @@ BUNDLE_EXECUTOR_ADDRESS= # Address of the bundle executor contract DESK_EXCHANGE_PRIVATE_KEY= # Required for trading and cancelling orders DESK_EXCHANGE_NETWORK= # "mainnet" or "testnet +# Compass plugin configuration +COMPASS_WALLET_PRIVATE_KEY= +COMPASS_ARBITRUM_RPC_URL= +COMPASS_ETHEREUM_RPC_URL= +COMPASS_BASE_RPC_URL= + +# d.a.t.a Plugin Configurations +DATA_API_KEY= # Your d.a.t.a API key +DATA_AUTH_TOKEN= # Your d.a.t.a auth token + +# NKN Configuration +NKN_CLIENT_PRIVATE_KEY= # Required, NKN client private key +NKN_CLIENT_ID= # Optional, NKN client ID \ No newline at end of file diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 71701239963f8..ea99cc31aeff1 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -78,7 +78,7 @@ None: Automated tests are acceptable. ## Deployment instructions --> - + B[Character System] - A --> C[Memory Manager] - A --> D[Action System] - B --> E[Model Provider] - C --> F[Database] - D --> G[Platform Clients] +### Automated Start + +```bash +git clone https://github.com/elizaos/eliza-starter.git +cd eliza-starter +cp .env.example .env +pnpm i && pnpm build && pnpm start +``` + +OR + +```bash +git clone https://github.com/elizaos/eliza +cd eliza +sh scripts/start.sh ``` +For detailed instructions on using the start script, including character management and troubleshooting, see our [Quickstart Guide](./quickstart). + +> **Note**: The start script handles all dependencies, environment setup, and character management automatically. + +--- + ## Community and Support Eliza is backed by an active community of developers and users: -- **Open Source**: Contribute to the project on [GitHub](https://github.com/elizaos/eliza) -- **Documentation**: Comprehensive guides and API references -- **Examples**: Ready-to-use character templates and implementations -- **Support**: Active community for troubleshooting and discussion +- [**Open Source**](https://github.com/elizaos/eliza): Contribute to the project on GitHub +- [**Examples**](https://github.com/elizaos/characters): Ready-to-use character templates and implementations +- [**Support**](https://discord.gg/elizaos): Active communityfor troubleshooting and discussion + +Join us in building the future of autonomous AI agents with Eliza! ## Next Steps - [Create Your First Agent](../quickstart) - [Understand Core Concepts](../core/agents) - [Explore Advanced Features](./guides/advanced.md) - -Join us in building the future of autonomous AI agents with Eliza! diff --git a/docs/docs/packages/agents.md b/docs/docs/packages/agents.md deleted file mode 100644 index 33f7675f9d097..0000000000000 --- a/docs/docs/packages/agents.md +++ /dev/null @@ -1,409 +0,0 @@ -# 🤖 Agent Package - -## Overview - -The Agent Package (`@eliza/agent`) provides the high-level orchestration layer for Eliza, managing agent lifecycles, character loading, client initialization, and runtime coordination. - -## Installation - -```bash -pnpm add @eliza/agent -``` - -## Quick Start - -```typescript -import { startAgents, loadCharacters } from "@eliza/agent"; - -// Start agents with default or custom characters -const args = parseArguments(); -const characters = await loadCharacters(args.characters); - -// Initialize agents -await startAgents(); -``` - -## Core Components - -### Agent Creation - -```typescript -export async function createAgent( - character: Character, - db: IDatabaseAdapter, - token: string, -): Promise { - return new AgentRuntime({ - databaseAdapter: db, - token, - modelProvider: character.modelProvider, - character, - plugins: [ - bootstrapPlugin, - nodePlugin, - // Conditional plugins - character.settings.secrets.WALLET_PUBLIC_KEY ? solanaPlugin : null, - ].filter(Boolean), - providers: [], - actions: [], - services: [], - managers: [], - }); -} -``` - -### Character Loading - -```typescript -export async function loadCharacters( - charactersArg: string, -): Promise { - // Parse character paths - let characterPaths = charactersArg - ?.split(",") - .map((path) => path.trim()) - .map((path) => normalizePath(path)); - - const loadedCharacters = []; - - // Load each character file - for (const path of characterPaths) { - try { - const character = JSON.parse(fs.readFileSync(path, "utf8")); - - // Load plugins if specified - if (character.plugins) { - character.plugins = await loadPlugins(character.plugins); - } - - loadedCharacters.push(character); - } catch (error) { - console.error(`Error loading character from ${path}: ${error}`); - } - } - - // Fall back to default character if none loaded - if (loadedCharacters.length === 0) { - loadedCharacters.push(defaultCharacter); - } - - return loadedCharacters; -} -``` - -### Client Initialization - -```typescript -export async function initializeClients( - character: Character, - runtime: IAgentRuntime, -) { - const clients = []; - const clientTypes = - character.clients?.map((str) => str.toLowerCase()) || []; - - if (clientTypes.includes(Clients.DISCORD)) { - clients.push(await DiscordClientInterface.start(runtime)); - } - if (clientTypes.includes(Clients.TELEGRAM)) { - clients.push(await TelegramClientInterface.start(runtime)); - } - if (clientTypes.includes(Clients.TWITTER)) { - clients.push(await TwitterClientInterface.start(runtime)); - } - if (clientTypes.includes(Clients.DIRECT)) { - clients.push(await AutoClientInterface.start(runtime)); - } - - return clients; -} -``` - -## Database Management - -```typescript -function initializeDatabase(): IDatabaseAdapter { - // Use PostgreSQL if URL provided - if (process.env.POSTGRES_URL) { - return new PostgresDatabaseAdapter({ - connectionString: process.env.POSTGRES_URL, - }); - } - - // Fall back to SQLite - return new SqliteDatabaseAdapter(new Database("./db.sqlite")); -} -``` - -## Token Management - -```typescript -export function getTokenForProvider( - provider: ModelProviderName, - character: Character, -) { - switch (provider) { - case ModelProviderName.OPENAI: - return ( - character.settings?.secrets?.OPENAI_API_KEY || - settings.OPENAI_API_KEY - ); - - case ModelProviderName.ANTHROPIC: - return ( - character.settings?.secrets?.ANTHROPIC_API_KEY || - character.settings?.secrets?.CLAUDE_API_KEY || - settings.ANTHROPIC_API_KEY - ); - - // Handle other providers... - } -} -``` - -## Agent Lifecycle Management - -### Starting Agents - -```typescript -async function startAgent(character: Character, directClient: any) { - try { - // Get provider token - const token = getTokenForProvider(character.modelProvider, character); - - // Initialize database - const db = initializeDatabase(); - - // Create runtime - const runtime = await createAgent(character, db, token); - - // Initialize clients - const clients = await initializeClients(character, runtime); - - // Register with direct client - directClient.registerAgent(runtime); - - return clients; - } catch (error) { - console.error( - `Error starting agent for character ${character.name}:`, - error, - ); - throw error; - } -} -``` - -### Shell Interface - -```typescript -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout, -}); - -async function handleUserInput(input, agentId) { - if (input.toLowerCase() === "exit") { - rl.close(); - return; - } - - try { - const response = await fetch( - `http://localhost:${serverPort}/${agentId}/message`, - { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - text: input, - userId: "user", - userName: "User", - }), - }, - ); - - const data = await response.json(); - data.forEach((message) => console.log(`Agent: ${message.text}`)); - } catch (error) { - console.error("Error:", error); - } -} -``` - -## Advanced Features - -### Plugin Management - -```typescript -async function loadPlugins(pluginPaths: string[]) { - return await Promise.all( - pluginPaths.map(async (plugin) => { - const importedPlugin = await import(plugin); - return importedPlugin; - }), - ); -} -``` - -### Character Hot Reloading - -```typescript -async function reloadCharacter(runtime: IAgentRuntime, characterPath: string) { - // Load new character - const character = JSON.parse(fs.readFileSync(characterPath, "utf8")); - - // Update runtime - runtime.character = character; - - // Reload plugins - if (character.plugins) { - const plugins = await loadPlugins(character.plugins); - runtime.registerPlugins(plugins); - } -} -``` - -### Multi-Agent Coordination - -```typescript -class AgentCoordinator { - private agents: Map; - - async broadcast(message: Memory) { - const responses = await Promise.all( - Array.from(this.agents.values()).map((agent) => - agent.processMessage(message), - ), - ); - return responses; - } - - async coordinate(agents: string[], task: Task) { - // Coordinate multiple agents on a task - const selectedAgents = agents.map((id) => this.agents.get(id)); - - return await this.executeCoordinatedTask(selectedAgents, task); - } -} -``` - -## Best Practices - -### Character Management - -```typescript -// Validate character before loading -function validateCharacter(character: Character) { - if (!character.name) { - throw new Error("Character must have a name"); - } - - if (!character.modelProvider) { - throw new Error("Model provider must be specified"); - } -} - -// Use character versioning -const character = { - name: "Agent", - version: "1.0.0", - // ... -}; -``` - -### Error Handling - -```typescript -async function handleAgentError(error: Error, character: Character) { - // Log error with context - console.error(`Agent ${character.name} error:`, error); - - // Attempt recovery - if (error.code === "TOKEN_EXPIRED") { - await refreshToken(character); - } - - // Notify monitoring - await notify({ - level: "error", - character: character.name, - error, - }); -} -``` - -### Resource Management - -```typescript -class ResourceManager { - async cleanup() { - // Close database connections - await this.db.close(); - - // Shutdown clients - await Promise.all(this.clients.map((client) => client.stop())); - - // Clear caches - this.cache.clear(); - } - - async monitor() { - // Monitor resource usage - const usage = process.memoryUsage(); - if (usage.heapUsed > threshold) { - await this.cleanup(); - } - } -} -``` - -## Troubleshooting - -### Common Issues - -1. **Character Loading Failures** - -```typescript -try { - await loadCharacters(charactersArg); -} catch (error) { - if (error.code === "ENOENT") { - console.error("Character file not found"); - } else if (error instanceof SyntaxError) { - console.error("Invalid character JSON"); - } -} -``` - -2. **Client Initialization Errors** - -```typescript -async function handleClientError(error: Error) { - if (error.message.includes("rate limit")) { - await wait(exponentialBackoff()); - } else if (error.message.includes("auth")) { - await refreshAuth(); - } -} -``` - -3. **Database Connection Issues** - -```typescript -async function handleDbError(error: Error) { - if (error.message.includes("connection")) { - await reconnectDb(); - } else if (error.message.includes("locked")) { - await waitForLock(); - } -} -``` - -## Related Resources - -- [Character Creation Guide](#) -- [Client Configuration](#) -- [Plugin Development](#) -- [Multi-Agent Setup](../packages/agents) diff --git a/docs/docs/packages/packages.md b/docs/docs/packages/packages.md deleted file mode 100644 index b1e6b61648050..0000000000000 --- a/docs/docs/packages/packages.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -sidebar_position: 1 ---- - -# 📖 Package Overview - -## Core Components - -- **@elizaos/core**: Central framework and shared functionality -- **@elizaos/agent**: Agent runtime and management -- **@elizaos/adapters**: Database implementations (PostgreSQL, SQLite, etc.) -- **@elizaos/clients**: Platform integrations (Discord, Telegram, etc.) -- **@elizaos/plugins**: Extension modules for additional functionality - -## Package Architecture - -The Eliza framework is built on a modular architecture where each package serves a specific purpose: - -1. **Core Package**: Provides the fundamental building blocks -2. **Agent Package**: Handles agent lifecycle and runtime -3. **Adapters**: Enable different storage backends -4. **Clients**: Connect to various platforms -5. **Plugins**: Add specialized capabilities - -## Package Dependencies - -```mermaid -graph TD - A[Core Package] --> B[Agent Package] - A --> C[Database Adapters] - A --> D[Client Packages] - A --> E[Plugin System] - B --> C - B --> D - B --> E -``` - -## Getting Started - -``` -# Install core package -pnpm add @elizaos/core - -# Install specific adapters -pnpm add @elizaos/adapter-postgres -pnpm add @elizaos/adapter-sqlite - -# Install clients -pnpm add @elizaos/client-discord -pnpm add @elizaos/client-Telegram -``` diff --git a/docs/docs/packages/plugins.md b/docs/docs/packages/plugins.md index d23a4f928edb2..d29edef7d4362 100644 --- a/docs/docs/packages/plugins.md +++ b/docs/docs/packages/plugins.md @@ -785,7 +785,7 @@ The Form chain plugin enables interaction with Form blockchain's unique SocialFi - **Inputs**: - `subject`: Address to buy curves for - `amount`: Number of curves tokens to buy (defaults to 1) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") - **Example**: ```json { @@ -799,48 +799,48 @@ The Form chain plugin enables interaction with Form blockchain's unique SocialFi - **Inputs**: - `subject`: Address whose curves to sell - `amount`: Number of curves tokens to sell (defaults to 1) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") 3. `WITHDRAW_CURVES_TOKEN` - Convert curves tokens to their ERC20 equivalent - **Inputs**: - `subject`: Address whose curves to withdraw - `amount`: Number of curves tokens to withdraw (integer values only) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") 4. `DEPOSIT_CURVES_TOKEN` - Convert ERC20 tokens back to curves tokens - **Inputs**: - `subject`: Address whose ERC20 to convert - `amount`: Amount in ERC20 decimals (18 decimals precision) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") 5. `MINT_CURVES_ERC20` - Mint new ERC20 token for curves holdings - **Inputs**: - `name`: Token name (1-32 characters) - `symbol`: Token symbol (1-8 characters, uppercase) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") 6. `GET_CURVES_BALANCE` - Check curves token balance - **Inputs**: - `subject`: Address to check balance for - `owner`: Optional owner address (defaults to connected wallet) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") 7. `GET_CURVES_BUY_PRICE` - Get price quote for buying curves - **Inputs**: - `subject`: Address to check price for - `amount`: Number of curves tokens (defaults to 1) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") 8. `GET_CURVES_SELL_PRICE` - Get price quote for selling curves - **Inputs**: - `subject`: Address to check price for - `amount`: Number of curves tokens (defaults to 1) - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") 8. `GET_CURVES_ERC20_DETAILS` - Get curves token respective ERC20 details - **Inputs**: - `subject`: Address to check ERC20 token for - - `formula`: Curves formula type ("QUADRATIC" or "LOGRITHMIC") + - `formula`: Curves formula type ("QUADRATIC" or "LOGARITHMIC") **Providers:** - `curvesFormulaProvider` - Provides context about available curves formulas and their use cases @@ -866,13 +866,13 @@ The Form chain plugin enables interaction with Form blockchain's unique SocialFi **Formula Types:** - `QUADRATIC`: Standard bonding curve for regular use cases -- `LOGRITHMIC`: Optimized for high-volume trading and price stability +- `LOGARITHMIC`: Optimized for high-volume trading and price stability **Best Practices:** - Always check token balances before selling or withdrawing - Use price quotes before executing trades -- For large-scale operations, use the LOGRITHMIC formula +- For large-scale operations, use the LOGARITHMIC formula - Keep track of ERC20 token addresses after minting - Validate token names and symbols before minting - Consider gas costs when executing transactions @@ -1004,3 +1004,16 @@ To contribute a new plugin: 5. Update the plugin registry For detailed API documentation and examples, see the [API Reference](/api). + +--- + +## FAQ + +### How do I add and configure plugins? +Add plugin names to the plugins array in character.json, run `pnpm build`, and configure any required settings in `.env` or character file. + +### Can plugins interact with each other? +Yes, plugins can share functionality and data. For example, one plugin can use another's image generation capabilities. + +### How do I create custom functionality? +Create a new plugin in the packages directory, implement desired features, and add it to your character's plugin configuration. diff --git a/docs/docs/quickstart.md b/docs/docs/quickstart.md index 0842910581bfc..5572c7e00ae17 100644 --- a/docs/docs/quickstart.md +++ b/docs/docs/quickstart.md @@ -14,34 +14,103 @@ Before getting started with Eliza, ensure you have: - A code editor ([VS Code](https://code.visualstudio.com/), [Cursor](https://cursor.com/) or [VSCodium](https://vscodium.com) recommended) - [CUDA Toolkit](https://developer.nvidia.com/cuda-toolkit) (optional, for GPU acceleration) -## Installation +--- + +## Automated Installation + +1. Use the [start script](https://howieduhzit.best/start-sh/) + - 🔍 Auto OS Detection | 🛠️ Zero Config | 🎭 Character Management | 🔄 One-click Updates | ⚙️ Guided Setup + +```bash +# Linux/macOS +./scripts/start.sh +``` + +
+Troubleshooting +```bash +# On Windows? Setup WSL2 first +wsl --install -d Ubuntu +# Open Ubuntu, set up user, update: +sudo apt update && sudo apt upgrade -y +``` + +### Usage +``` +start.sh [-v|--verbose] [--skip-nvm] +``` + +### Common Error +- "characters not found": Check working directory +- `./scripts/start.sh -v` Run with logging +- Check console output +- [Open an issue](https://github.com/elizaOS/eliza/issues) + +### Permission Issues +``` +sudo chmod +x scripts/start.sh # Linux/macOS +Set-ExecutionPolicy RemoteSigned -Scope CurrentUser # Windows +``` + +### Package Issues +> Note: Always verify scripts before running it +``` +## Linux +sudo apt update + +## MacOS +/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" +brew update + +## Windows +# Run as admin +``` + +### Node.js Issues +- Required: Node.js 22+ +- Use `--skip-nvm` for system Node +- Check PATH configuration + +**Notes** +- Temporary files: `/tmp/eliza_*` +- Config location: `./config` +- Characters: `./characters/*.json` + +
-Clone the repository + +2. Using https://github.com/elizaOS/eliza-starter ```bash -git clone https://github.com/elizaOS/eliza.git +git clone git@github.com:elizaos/eliza-starter.git +cd eliza-starter +cp .env.example .env +pnpm i && pnpm build && pnpm start ``` -Enter directory +## Manual Installation + +After installing the prerequisites, clone the repository and enter the directory ```bash +git clone git@github.com:elizaOS/eliza.git cd eliza ``` -Switch to latest [stable version tag](https://github.com/elizaOS/eliza/tags) +Switch to the latest [stable version tag](https://github.com/elizaOS/eliza/tags) +This project moves quick, checkout the latest release known to work: ```bash -# This project moves quickly, check out the latest release known to work git checkout $(git describe --tags --abbrev=0) ``` -Install dependencies +Install the dependencies ```bash pnpm install --no-frozen-lockfile ``` -**Note:** Please only use the `--no-frozen-lockfile` option when you're initially instantiating the repo or are bumping the version of a package or adding a new package to your package.json. This practice helps maintain consistency in your project's dependencies and prevents unintended changes to the lockfile. +> **Note:** Please only use the `--no-frozen-lockfile` option when you're initially instantiating the repo or are bumping the version of a package or adding a new package to your package.json. This practice helps maintain consistency in your project's dependencies and prevents unintended changes to the lockfile. Build the local libraries @@ -49,7 +118,9 @@ Build the local libraries pnpm build ``` -## **Configure Environment** +--- + +## Configure Environment Copy example environment file @@ -87,8 +158,6 @@ Eliza supports multiple AI models and you set which model to use inside the char - **OpenAI**: Set `XAI_MODEL=gpt-4o-mini` or `gpt-4o` - **Livepeer**: Set `SMALL_LIVEPEER_MODEL`,`MEDIUM_LIVEPEER_MODEL`,`LARGE_LIVEPEER_MODEL` and `IMAGE_LIVEPEER_MODEL` to your desired models listed [here](https://livepeer-eliza.com/). -## Local inference - ### For llama_local inference: - The system will automatically download the model from Hugging Face @@ -278,6 +347,14 @@ NODE_MODULE_VERSION 131. This version of Node.js requires NODE_MODULE_VERSION 127. Please try re-compiling or re-installing ``` +or + +``` +Error: Could not locate the bindings file. Tried: +.../better_sqlite3.node +... +``` + You can try this, which will attempt to rebuild better-sqlite3. ```bash @@ -296,6 +373,41 @@ Then reinstall the requirements pnpm i ``` +You can also add a postinstall script in your `package.json` if you want to automate this: +```json +scripts: { + "postinstall": "npm rebuild better-sqlite3" +} +``` + +--- + +## FAQ + +### How do I install and set up ElizaOS? +Clone the repository, run `pnpm install --no-frozen-lockfile`, then `pnpm build`. Requires Node.js version 23.3.0. + +### Which Node.js version should I use? +Use Node.js version 23+ (specifically 23.3.0 is recommended) and pnpm v9.x for optimal compatibility. You can use nvm to manage Node versions with `nvm install 23` and `nvm use 23`. + +### How do I run multiple agents? +Create separate projects with unique character files and run in separate terminals, or use `pnpm start --characters="characters/agent1.json,characters/agent2.json"`. + +### What's the difference between eliza and eliza-starter? +Eliza-starter is a lightweight version for simpler setups, while the main eliza repository includes all advanced features and plugins. + +### How do I fix build/installation issues? +Use Node v23.3.0, run `pnpm clean`, then `pnpm install --no-frozen-lockfile`, followed by `pnpm build`. If issues persist, checkout the latest stable tag. + +### What are the minimum system requirements? +8GB RAM recommended for build process. For deployment, a t2.large instance on AWS with 20GB storage running Ubuntu is the minimum tested configuration. + +### How do I fix "Exit Status 1" errors? +If you see `triggerUncaughtException` errors, try: +1. Add dependencies to workspace root +2. Add dependencies to specific packages +3. Clean and rebuild + ## Next Steps Once you have your agent running, explore: @@ -305,6 +417,4 @@ Once you have your agent running, explore: 3. ⚡ [Add Custom Actions](./core/actions.md) 4. 🔧 [Advanced Configuration](./guides/configuration.md) -For detailed API documentation, troubleshooting, and advanced features, check out our [full documentation](https://elizaos.github.io/eliza/). - Join our [Discord community](https://discord.gg/ai16z) for support and updates! diff --git a/docs/docs/tutorials/.nader_tutorial_15min.md.swp b/docs/docs/tutorials/.nader_tutorial_15min.md.swp new file mode 100644 index 0000000000000..94fe147dc3f37 Binary files /dev/null and b/docs/docs/tutorials/.nader_tutorial_15min.md.swp differ diff --git a/docs/docs/tutorials/index.md b/docs/docs/tutorials/index.md new file mode 100644 index 0000000000000..377a9189966c0 --- /dev/null +++ b/docs/docs/tutorials/index.md @@ -0,0 +1,41 @@ +--- +Title: AI Agent Dev School +slug: / +--- + +# AI Agent Dev School + +Welcome to the AI Agent Dev School series, a comprehensive guide to building intelligent agents using the Eliza framework. Over the course of three in-depth sessions, we cover everything from the basics of TypeScript and plugins to advanced topics like providers, evaluators, and dynamic agent behaviors. + +## [Part 1: Introduction and Foundations](./part1.md) + +In the first session, we start from the very beginning, assuming no prior knowledge of TypeScript, Git, or AI agent development. We cover: + +- Historical context and the evolution of JavaScript and TypeScript +- Setting up your development environment +- Key concepts in Eliza: embedding models, characters, and chat clients +- Basics of working with Git and GitHub + +By the end of part 1, you'll have a solid foundation for diving into agent development with Eliza. + +## [Part 2: Deep Dive into Actions, Providers, and Evaluators](./part2.md) + +The second session focuses on the core building blocks of agent behavior in Eliza: + +- Actions: The tasks and responses that agents can perform +- Providers: Modules that provide information and state to the agent's context +- Evaluators: Modules that analyze situations and agent actions, triggering further actions or modifications + +We explore each of these in detail, walking through code examples and common use cases. We also cover how to package actions, providers and evaluators into reusable plugins. + +## [Part 3: Building a User Data Extraction Agent](./part3.md) + +In the final session, we apply the concepts from parts 1 and 2 to build a practical agentic application - a user data extraction flow. We cover: + +- The provider-evaluator loop for gathering information and triggering actions +- Leveraging Eliza's cache manager for efficient storage +- Using AI assistants to aid in code development +- Testing and debugging agent flows +- Adding dynamic behaviors based on completion state + +By the end of part 3, you'll have the skills to build sophisticated, stateful agents that can interact naturally with users to accomplish complex tasks. diff --git a/docs/docs/tutorials/nader_tutorial_10min.md b/docs/docs/tutorials/nader_tutorial_10min.md new file mode 100644 index 0000000000000..770e6c5f4bf62 --- /dev/null +++ b/docs/docs/tutorials/nader_tutorial_10min.md @@ -0,0 +1,97 @@ +--- +sidebar_position: 2 +--- + +# Creating an AI Agent with Your Own Personality + +In this tutorial, we'll explore how to create an AI agent that embodies your own personality using data from your Twitter archive, videos, markdown files, and PDFs. We'll leverage the [Characterfile](https://github.com/ai16z/characterfile) repo and [Eliza framework](https://github.com/elizaOS/eliza) to generate and integrate the character data. + +Video: https://youtu.be/uouSdtcWXTQ?si=cm13L4T7DQUMXd0C + +## Prerequisites + +- Twitter Developer account +- Anthropic API key +- Your Twitter archive (download instructions below) +- (Optional) Videos, markdown files, PDFs about you + +## Generating Your Character File + +### From Twitter Archive + +1. Request your Twitter archive: + + - Go to your Twitter settings + - Click "Download an archive of your data" + - Wait to receive the archive (timing depends on your account age/activity) + +2. Clone the Characterfile repo: + + ```bash + git clone https://github.com/ai16z/characterfile.git + ``` + +3. Run the `tweets-to-character` script: + + ```bash + npx tweets-to-character path/to/archive.zip + ``` + + - Select model (e.g. Claude) + - (Optional) Add any additional user information + +4. Script will generate a `character.json` file from your Tweets + +### From Other Files + +1. Put videos, PDFs, text, markdown, images in a folder + +2. Run the `folder-to-knowledge` script: + + ```bash + npx folder-to-knowledge path/to/folder + ``` + +3. Run `knowledge-to-character` to add knowledge to your character file + +## Setting Up the Agent + +1. Clone Eliza repo and check out latest version: + + ```bash + git clone https://github.com/elizaOS/eliza.git + git checkout + ``` + +2. Install dependencies: + + ```bash + pnpm install + pnpm build + ``` + +3. Add your character JSON file to `characters/` + +4. Modify character file: + + - Add `clients`, `modelProvider`, `plugins` fields + - Remove `voice` field + +5. Set up `.env` with Twitter and Anthropic credentials + +## Running the Agent + +1. Start agent with your character file: + + ```bash + pnpm start --character characters/yourcharacter.json + ``` + +2. Agent will log in and post an initial tweet + +3. Check your Twitter profile to see the agent in action! + +## Next Steps + +- Implement dynamic prompting to enhance agent interactions +- Extend agent with additional plugins and integrations diff --git a/docs/docs/tutorials/nader_tutorial_15min.md b/docs/docs/tutorials/nader_tutorial_15min.md new file mode 100644 index 0000000000000..ce76a23e64d55 --- /dev/null +++ b/docs/docs/tutorials/nader_tutorial_15min.md @@ -0,0 +1,105 @@ +--- +sidebar_position: 1 +--- + +# Building a Social AI Agent in 15 Minutes + +In this tutorial, you'll learn how to quickly build your own social media AI agent that can autonomously post tweets, respond to interactions, and maintain its own unique personality. We'll be using the [Eliza framework](https://ai16z.github.io/eliza/) by a16z and TypeScript. + +Video: https://youtu.be/6PZVwNTl5hI?si=0zB3OvYU4KiRQTxI + +## Prerequisites + +- Basic TypeScript knowledge +- Twitter Developer account +- (Optional) Anthropic API key + +## Project Setup + +1. Clone the Eliza repo and check out the latest version: + + ```bash + git clone https://github.com/elizaOS/eliza.git + cd eliza + git checkout + ``` + +2. Install dependencies: + + ```bash + pnpm install + pnpm build + ``` + +## Environment Variables + +1. Copy `.env.example` to `.env`: + + ```bash + cp .env.example .env + ``` + +2. Open `.env` and set your Twitter credentials. You can use username/password or cookies. + +3. (Optional) Set your Anthropic API key for the Claude model. + +4. For Gaia, set: + + ``` + MODEL_LLM_API_URL=https://modelserverurl/ + MODEL_EMBEDDING_MODEL=embeddingmodel + MODEL_EMBEDDING_ENABLED=true + ``` + +## Customizing Your Character + +1. Create `agent/mainCharacter.ts`: + + ```typescript + import { DefaultCharacter } from "./defaultCharacter"; + import { clients } from "../globalClients"; + + export const mainCharacter = { + ...DefaultCharacter, + clients: { twitter: clients.twitter }, + modelProvider: modelProviders.anthropic, + }; + ``` + +2. Extend the character by overriding properties like `name`, `bio`, `systemPrompt` etc. + +3. In `src/index.ts`, import `mainCharacter` and replace instances of `DefaultCharacter` with it. + +## Running the Agent + +1. Run `pnpm start` + +2. The agent will post a tweet and start listening for replies. Test it out by replying to the tweet. + +## Gaia Model Setup + +1. In `mainCharacter.ts`, change the model provider: + + ```typescript + modelProvider: modelProviders.gaiaNet; + ``` + +2. Customize the `systemPrompt` and `bio` for the new personality. + +3. Delete the SQLite DB at `data/sqlite.db` to reset tweet history. + +4. Run `pnpm start` again to see the updated agent in action! + +## Next Steps + +- Try integrating other extensions like databases, Discord, Telegram +- Add on-chain capabilities with EVM, Solana, StarkNet adapters +- Chat with your agent directly in the terminal + +## Resources + +- [Code Repo](https://github.com/dabit3/ai-agent-cognitivedriftt) +- [Eliza Docs](https://ai16z.github.io/eliza/) +- [Example Character File](https://github.com/ai16z/characterfile/blob/main/examples/example.character.json) +- [Default Character](https://github.com/elizaOS/eliza/blob/8f4e2643dcb1a5aafb25267e80d22e7e12fd044a/packages/core/src/defaultCharacter.ts#L4) +- [Environment Variables](https://gist.github.com/dabit3/7602e97f3abe0a93bdd84dc250f23021) diff --git a/docs/docs/tutorials/nader_tutorial_35min.md b/docs/docs/tutorials/nader_tutorial_35min.md new file mode 100644 index 0000000000000..0cc201f7f4313 --- /dev/null +++ b/docs/docs/tutorials/nader_tutorial_35min.md @@ -0,0 +1,327 @@ +# How to Build an API Plugin + +This guide walks you through creating a custom plugin for the Eliza AI framework that integrates with NASA's API to fetch space photos. You'll learn how to set up the project structure, implement the required components, and test your plugin across different interfaces. + +## Video Tutorial + + +Code: https://github.com/dabit3/eliza-nasa-plugin + + +**Key Timestamps** + +- **0:00** - Introduction to Eliza plugins and their importance +- **3:36** - Overview of the NASA API plugin we'll be building +- **6:40** - Setting up the project structure +- **12:26** - Creating the basic plugin files +- **18:64** - Understanding plugin components +- **32:84** - Implementing the NASA API service +- **43:22** - Setting up environment variables +- **59:12** - Testing the plugin in web interface +- **1:15:00** - Testing the plugin with Twitter integration + +## Why Build Plugins? + +Plugins are powerful extensions to the Eliza framework that allow you to: +- Integrate custom functionality into agent workflows +- Share reusable components with other developers +- Expand the capabilities of your AI agents +- Distribute your software products to developers +- Take advantage of growing opportunities in the agent space + +## Development Approaches + +You have two options for developing an Eliza plugin: + +### Option 1: Using the Starter Template + +:::warning +Untested in over a month, this might not work! +::: + +``` +git clone https://github.com/elizaOS/eliza-plugin-starter.git +cd eliza-plugin-starter +pnpm install +pnpm tsc +pnpm mock-eliza --characters=./characters/eternalai.character.json +``` + +### Option 2: Building from Scratch + +If you prefer to understand every component by building from scratch (as shown in the video tutorial), follow the manual setup process below. + +### Project Structure + +For building from scratch, your project structure will look like this: + +``` +plugin-name/ +├── package.json +├── tsconfig.json +├── tsup.config.ts +└── src/ + ├── index.ts # Main plugin entry + ├── types.ts # Type definitions + ├── environment.ts # Environment config + ├── services/ # API services + ├── actions/ # Plugin actions + └── examples/ # Usage examples +``` + +> When using the starter template, you'll find additional directories like `common/` for shared utilities and mocked client capabilities for testing. + + +## Setup Steps + +1. **Create and Initialize Project** +```bash +# Create project directory +mkdir eliza-plugin-nasa +cd eliza-plugin-nasa + +# Clone Eliza repository +git clone git@github.com:elizaOS/eliza.git +cd eliza +git checkout $(git describe --tags --abbrev=0) +``` + +2. **Create Project Directory** +```bash +cd packages +mkdir eliza-plugin-nasa +cd eliza-plugin-nasa +``` + +3. **Create Base Configuration Files** + +Create `package.json`: +```json +{ + "name": "@elizaos/plugin-nasa", + "version": "1.0.0", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "dependencies": { + "@elizaos/core": "latest" + }, + "peerDependencies": { + "@elizaos/core": "^1.0.0" + } +} +``` + +Create `tsconfig.json`: +```json +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} +``` + +Create `tsup.config.ts`: +```typescript +import { defineConfig } from 'tsup' + +export default defineConfig({ + entry: ['src/index.ts'], + format: ['cjs', 'esm'], + dts: true, + splitting: false, + sourcemap: true, + clean: true, +}) +``` + +4. **Create Project Structure** + +```bash +# Create directories +mkdir src +mkdir src/actions + +# Create essential files +touch package.json tsconfig.json tsup.config.ts +touch src/index.ts src/types.ts src/examples.ts +touch src/services.ts src/environment.ts +touch src/actions/getMarsRoverPhoto.ts src/actions/getApod.ts +``` + +4. **Configure Character File** + +Create `src/characters/natter.character.ts`: + +```typescript +import { ModelProviderName, Clients } from "@elizaos/core"; +import { nasaPlugin } from '@elizaos/plugin-nasa' + +export const mainCharacter = { + name: "sound_craft_", + clients: [Clients.TWITTER], + modelProvider: ModelProviderName.HYPERBOLIC, + plugins: [nasaPlugin], + // ... rest of character configuration +}; +``` + +See example: https://github.com/dabit3/eliza-nasa-plugin/blob/main/agent/src/nader.character.ts + +--- + +## Core Components + +### Types + +Source: `src/types.ts` + +```typescript +interface ApodResponse { + url: string; + title: string; + explanation: string; + date: string; +} + +interface MarsRoverResponse { + photos: Array<{ + img_src: string; + earth_date: string; + camera: { + name: string; + } + }>; +} +``` + +### Plugin Entry + +Source: `src/index.ts` + +```typescript +import type { Plugin } from "@elizaos/core"; +import { getMarsRoverPhoto } from './actions/getMarsRoverPhoto'; +import { getApod } from './actions/getApod'; + +export const nasaPlugin: Plugin = { + name: "nasa-plugin", + description: "NASA API integration for space photos", + actions: [getMarsRoverPhoto, getApod] +}; +``` + +### Actions +Actions define how your plugin responds to messages: + +```typescript +import { Action, IAgentRuntime } from "@elizaos/core"; + +export const getMarsRoverPhoto: Action = { + name: "NASA_GET_MARS_PHOTO", + similes: ["SHOW_MARS_PICTURE"], + description: "Fetches a photo from Mars rovers", + + validate: async (runtime: IAgentRuntime) => { + return validateNasaConfig(runtime); + }, + + handler: async (runtime: IAgentRuntime, state: any, callback: any) => { + const data = await getNasaService(runtime).getMarsRoverPhoto(); + await callback(`Here's a photo from Mars rover ${data.rover}...`); + return true; + } +}; +``` + +Source: `src/actions/getMarsRoverPhoto.ts` + +### Services +Services handle API interactions: + +```typescript +const nasaService = (config: NasaConfig) => ({ + getMarsRoverPhoto: async () => { + const response = await fetch( + `https://api.nasa.gov/mars-photos/api/v1/rovers/curiosity/photos?api_key=${config.apiKey}` + ); + return response.json(); + } +}); +``` + +### Environment Configuration + +Create `.env` in the root directory: +```bash +NASA_API_KEY=your_api_key_here +TWITTER_USERNAME=your_twitter_username +TWITTER_PASSWORD=your_twitter_password +TWITTER_EMAIL=your_twitter_email +``` + +```typescript +const validateNasaConfig = (runtime: IAgentRuntime) => { + const config = { + apiKey: runtime.getSetting("NASA_API_KEY") + }; + if (!config.apiKey) { + throw new Error("NASA API key not configured"); + } + return config; +}; +``` + +## Testing Your Plugin + +> See 00:12:39 in the video + +### Development Testing +```bash +# Using mock client +pnpm mock-eliza --characters=./characters/eternalai.character.json +``` + +### Production Testing +```bash +# Web interface +pnpm start client +# Visit localhost:5173 + +# Twitter integration +# Ensure Twitter credentials are configured in .env +pnpm start +``` + + +--- + +## FAQ + +### How should I handle errors in my plugin? +Validate environment variables before making API calls and provide meaningful error messages. Implement retry logic for failed requests to improve reliability. + +### What's the best way to ensure type safety? +Define interfaces for API responses and use TypeScript throughout your plugin to maintain type consistency and get better development experience. + +### How should I organize my plugin code? +Separate concerns into distinct files, follow consistent naming conventions, and thoroughly document your code for maintainability. + +### Why isn't my plugin loading? +Verify your package.json configuration, check that the plugin is properly registered in the character file, and ensure all dependencies are installed correctly. + +### Why isn't my action triggering? +Review your action examples for accuracy, check the validate function logic, and verify that the action is properly registered in your plugin. + +### What should I do if I have API integration issues? +Confirm your API key is properly configured, verify the API endpoint URLs are correct, and check that responses are being handled appropriately. diff --git a/docs/docs/tutorials/part1.md b/docs/docs/tutorials/part1.md new file mode 100644 index 0000000000000..3ff0e3eaa1683 --- /dev/null +++ b/docs/docs/tutorials/part1.md @@ -0,0 +1,81 @@ +--- +Title: AI Agent Dev School Part 1 +description: "Introduction and Foundations" +--- + +# Part 1: Introduction and Foundations + +In this first session of the AI Agent Dev School, we dive into the fundamentals of AI agent development using the Eliza framework. The session covers the history and evolution of JavaScript, TypeScript, and the Node.js ecosystem, providing a solid foundation for understanding the tools and technologies used in building AI agents with Eliza. + +## Origins and Ecosystem + +### JavaScript and Its Evolution + +- JavaScript was initially created as a simple scripting language for web browsers in 1995 by Brendan Eich. +- It has since evolved into a versatile language capable of running on servers with the introduction of Node.js, which leverages the V8 JavaScript engine. + +### TypeScript for Type Safety + +- TypeScript is a superset of JavaScript that introduces optional static typing, providing compile-time type checking and improved developer experience. +- It addresses JavaScript's lack of type safety while maintaining flexibility and compatibility with existing JavaScript code. + +### The Power of npm (Node Package Manager) + +- npm is a vast ecosystem of pre-built JavaScript packages that facilitate rapid development and code reuse. +- With millions of packages available, developers can easily incorporate external libraries into their projects using the `npm install` command. +- The open-source nature of the npm ecosystem allows developers to leverage the collective efforts of the community and build upon existing code. + +### Monorepos in Eliza Development + +- Eliza utilizes a monorepo structure, where multiple packages or projects are contained within a single repository. +- Monorepos offer advantages such as simplified management, easier collaboration, and the ability to share code between packages. + +### Git and GitHub for Collaboration + +- Git is a distributed version control system that enables collaborative software development by tracking changes in code. +- GitHub is a web-based hosting service built on top of Git, providing features like issue tracking, pull requests, and wikis for effective collaboration and project management. + +## Characters, Embeddings, and Discord Integration + +### Embedding Models + +- Embedding models play a crucial role in converting words or concepts into numerical vectors, capturing semantic meaning and enabling tasks like semantic search and comparison. +- These models transform textual data into multi-dimensional vectors, allowing for efficient representation and analysis of language. + +### Creating Custom Characters in Eliza + +- Eliza allows developers to create custom AI characters with distinct personalities and behaviors. +- Character definitions are specified using JSON files, which include details like the character's bio, example dialogue, and configuration options. +- The flexibility of character customization enables tailoring agents for specific platforms and use cases. + +### Integrating Discord Clients + +- Eliza provides seamless integration with Discord, allowing AI characters to interact with users on the popular communication platform. +- Setting up a Discord client involves configuring API keys, managing server permissions, and defining the character's behavior within the Discord environment. + +### Key Concepts in Eliza + +- System Directives: Special instructions that guide the agent's overall behavior and decision-making process. +- Message Examples: Sample dialogues that demonstrate the desired communication style and tone of the AI character. +- Style Directions: Additional instructions that influence the agent's personality, vocabulary, and interaction style. + +## Database, Clients, and Templates + +### Eliza's Database and Memory Management + +- Eliza utilizes a database system to store and manage data related to the AI agents, their interactions, and user information. +- The default database file is located within the Eliza project structure, but alternative database systems can be configured based on specific requirements. + +### Clients in Eliza + +- Clients in Eliza refer to the various platforms and communication channels through which AI agents can interact with users. +- Existing clients include Discord, Twitter, and Telegram, each with its own set of features and integration requirements. +- Developers can create custom clients to extend Eliza's capabilities and support additional platforms or services. + +### Eliza's Template System + +- Eliza employs a template system to structure and generate agent responses dynamically. +- Templates allow for the incorporation of variables, conditional logic, and other dynamic elements to create more engaging and context-aware interactions. +- The template system enables developers to define reusable patterns and customize agent responses based on various factors like user input, context, and character traits. + +By understanding these foundational concepts and components of the Eliza framework, developers can begin their journey into building sophisticated and interactive AI agents. The subsequent sessions of the AI Agent Dev School will delve deeper into advanced topics and practical implementation techniques. diff --git a/docs/docs/tutorials/part2.md b/docs/docs/tutorials/part2.md new file mode 100644 index 0000000000000..e6b529ce91e18 --- /dev/null +++ b/docs/docs/tutorials/part2.md @@ -0,0 +1,107 @@ +--- +Title: AI Agent Dev School Part 2 +description: "Deep Dive into Actions, Providers, and Evaluators" +--- + +# Part 2: Deep Dive into Actions, Providers, and Evaluators + +In this second session of the AI Agent Dev School series, we take a deep dive into the key abstractions in the Eliza framework that enable developers to create powerful AI agents: + +- **Actions**: The tasks and responses that agents can perform. +- **Providers**: Modules that provide information and state to the agent's context. +- **Evaluators**: Modules that analyze situations and agent actions, often triggering further actions or modifications. + +We explore each of these in detail, walking through code examples and common use cases. We also cover how to package up actions, providers and evaluators into reusable plugins. + +# Key Sections + +- [**00:03:33** - Shift in focus from characters (Dev School Part 1) to agent capabilities](https://www.youtube.com/watch?v=XenGeAcPAQo&t=213) +- [**00:07:09** - Deep dive into providers, actions, and evaluators, the core building blocks of Eliza](https://www.youtube.com/watch?v=XenGeAcPAQo&t=429) +- [**00:07:28** - Discussion about actions vs. tools, favoring decoupled intent and action execution](https://www.youtube.com/watch?v=XenGeAcPAQo&t=448) +- [**00:18:02** - Explanation of providers and their function as information sources for agents](https://www.youtube.com/watch?v=XenGeAcPAQo&t=1082) +- [**00:20:15** - Introduction to evaluators and their role in agent reflection and state analysis](https://www.youtube.com/watch?v=XenGeAcPAQo&t=1215) +- [**00:29:22** - Brief overview of clients as connectors to external platforms](https://www.youtube.com/watch?v=XenGeAcPAQo&t=1762) +- [**00:31:02** - Description of adapters and their function in database interactions](https://www.youtube.com/watch?v=XenGeAcPAQo&t=1862) +- [**00:34:02** - Discussion about plugins as bundles of core components, examples, and recommendations](https://www.youtube.com/watch?v=XenGeAcPAQo&t=2042) +- [**00:40:31** - Live Coding Demo begins: Creating a new plugin from scratch (DevSchoolExamplePlugin)](https://www.youtube.com/watch?v=XenGeAcPAQo&t=2431) +- [**00:47:54** - Implementing the simple HelloWorldAction](https://www.youtube.com/watch?v=XenGeAcPAQo&t=2791) +- [**01:00:26** - Implementing the CurrentNewsAction (fetching and formatting news data)](https://www.youtube.com/watch?v=XenGeAcPAQo&t=3626) +- [**01:22:09** - Demonstrating the Eliza Client for interacting with agents locally](https://www.youtube.com/watch?v=XenGeAcPAQo&t=4929) +- [**01:23:54** - Q&A: Plugin usage in character files, installation, Eliza vs. Eliza Starter](https://www.youtube.com/watch?v=XenGeAcPAQo&t=5034) +- [**01:36:17** - Saving agent responses as memories in the database](https://www.youtube.com/watch?v=XenGeAcPAQo&t=5777) +- [**01:43:06** - Using prompts for data extraction within actions](https://www.youtube.com/watch?v=XenGeAcPAQo&t=6186) +- [**01:51:54** - Importance of deleting the database during development to avoid context issues](https://www.youtube.com/watch?v=XenGeAcPAQo&t=6714) +- [**01:57:04** - Viewing agent context via console logs to understand model inputs](https://www.youtube.com/watch?v=XenGeAcPAQo&t=7024) +- [**02:07:07** - Explanation of memory management with knowledge, facts, and lore](https://www.youtube.com/watch?v=XenGeAcPAQo&t=7627) +- [**02:16:53** - Q&A: Prompt engineering opportunities, knowledge chunking and retrieval](https://www.youtube.com/watch?v=XenGeAcPAQo&t=8213) +- [**02:22:57** - Call for contributions: Encouraging viewers to create their own actions and plugins](https://www.youtube.com/watch?v=XenGeAcPAQo&t=8577) +- [**02:26:31** - Closing remarks and future DevSchool session announcements](https://www.youtube.com/watch?v=XenGeAcPAQo&t=8791) + +# Working with Actions + +Actions represent the core capabilities of an AI agent - the things it can actually do. In Eliza, an action is defined by: + +- **Name**: The unique name used to reference the action +- **Description**: Used to inform the agent when this action should be invoked +- **Handler**: The code that actually executes the action logic +- **Validator**: Determines if the action is valid to be called given the current context + +Some key points about actions in Eliza: + +- The agent decides which action to call based on the name and description. It does not have insight into the actual action code. +- The handler receives the agent runtime, the triggering message, the current state, and a callback function to send messages back to the user. +- The validate function allows for complex logic to determine action availability based on context and state. + +# Providers: Injecting State and Context + +Providers allow developers to dynamically inject relevant information into the agent's context. This could be real-time data, user information, results of previous conversations, or any other state the agent may need. + +Key aspects of providers: + +- Defined by a single `get` function that returns relevant state +- Called before each agent execution to hydrate the context +- Can conditionally provide state based on the current context + +Common provider examples include current time, user preferences, conversation history, and external API data. + +# Evaluators: Reflection and Analysis + +Evaluators run after each agent action, allowing the agent to reflect on what happened and potentially trigger additional actions. They are a key component in creating agents that can learn and adapt. + +Some common use cases for evaluators: + +- Extracting and storing facts from a conversation for future reference +- Analyzing user sentiment to measure trust and relationship +- Identifying key intents and entities to inform future actions +- Implementing feedback loops for agent improvement + +Evaluators work in close conjunction with providers - often an evaluator will extract some insight that a provider will then inject into future context. + +# Packaging Plugins + +The plugin system in Eliza allows developers to package up related actions, providers and evaluators into reusable modules. A plugin is defined by: + +- `package.json`: Metadata about the plugin +- `tsconfig.json`: TypeScript configuration +- `index.ts`: Registers the plugin's actions, providers and evaluators +- `src` directory: Contains the actual action, provider and evaluator code + +Plugins can be published to npm and then easily imported into any Eliza agent. This enables a powerful ecosystem of reusable agent capabilities. + +# Examples + +The session walks through several code examples to illustrate these concepts: + +1. Defining a simple "Hello World" action +2. Creating a "Current News" action that retrieves news headlines +3. Implementing a provider that injects a random emotion into the context +4. Registering actions and providers in a plugin + +# Key Takeaways + +- Actions, providers and evaluators are the core building blocks of agent behavior in Eliza +- Actions define what agents can do, providers manage context and state, and evaluators allow for reflection and adaptation +- The plugin system enables reusable packaging of agent capabilities +- Effective prompt engineering around the composition of the agent context is a key area for optimization + +With a solid understanding of these abstractions, developers have immense power and flexibility to create agent behaviors in Eliza. The next session will dive into an end-to-end example. diff --git a/docs/docs/tutorials/part3.md b/docs/docs/tutorials/part3.md new file mode 100644 index 0000000000000..4d58c2e91c6d6 --- /dev/null +++ b/docs/docs/tutorials/part3.md @@ -0,0 +1,82 @@ +--- +Title: AI Agent Dev School Part 3 +description: "Building a User Data Extraction Agent" +--- + +# Part 3: Building a User Data Extraction Agent + +In this third session of the AI Agent Dev School series, we dive into a practical application of providers and evaluators in the Eliza framework - building an agent that can extract key user data (name, location, job) through natural conversation. + +We explore: + +- The provider-evaluator loop for gathering information and triggering actions +- Deep dive into evaluators and their role in agent self-reflection +- Code walkthrough of real-world evaluators and providers +- Building a user data extraction flow from scratch +- Dynamic providers based on completion state +- Q&A on advanced topics and use cases + +# Key Sections + +- [**00:00:00** - Intro & Housekeeping](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=0) +- [**00:08:05** - Building a Form-Filling Agent](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=485) +- [**00:16:15** - Deep Dive into Evaluators](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=975) +- [**00:27:45** - Code walkthrough of the "Fact Evaluator"](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=1675) +- [**00:36:07** - Building a User Data Evaluator](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=2167) +- [**00:51:50** - Exploring Eliza's Cache Manager](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=3110) +- [**01:06:01** - Using Claude AI for Code Generation](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=3961) +- [**01:21:18** - Testing the User Data Flow](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=4878) +- [**01:30:27** - Adding a Dynamic Provider Based on Completion](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=5427) +- [**01:37:16** - Q&A with the Audience](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=5836) +- [**01:47:31** - Outro and Next Steps](https://www.youtube.com/watch?v=Y1DiqSVy4aU&t=6451) + +# The Provider-Evaluator Loop + +A key concept introduced in this session is the provider-evaluator loop for gathering information and triggering actions: + +1. The provider checks the cache/database for information we already have +2. If information is missing, the provider indicates to the agent what it needs to extract +3. The evaluator extracts new information from user messages and stores it +4. Once all required information is gathered, the evaluator triggers a completion action + +This loop allows agents to dynamically gather required data through natural conversation, enabling powerful form-filling and user profiling applications. + +# Deep Dive into Evaluators + +Evaluators in Eliza run after each agent action, allowing the agent to reflect on what happened and potentially trigger additional actions. Some key aspects of evaluators: + +- Defined by `validate` and `handler` functions +- `validate` determines if the evaluator should run based on the current context +- `handler` contains the core evaluator logic - state updates, data extraction, triggering actions, etc. +- Evaluators work in close conjunction with providers to extract insights and inject them into future context + +Common use cases include extracting conversation facts, analyzing sentiment, identifying intents, and implementing feedback loops. + +# Building the User Data Extraction Flow + +The hands-on portion of the session focuses on building a user data extraction flow from scratch. Key steps include: + +1. Creating a basic `UserDataEvaluator` and `UserDataProvider` +2. Registering them directly in the agent (without a plugin) +3. Leveraging Eliza's `CacheManager` for efficient key-value storage +4. Iteratively developing the extraction logic with the help of Claude AI +5. Testing the flow by interacting with the agent and inspecting logs/context +6. Adding a dynamic provider that triggers only after data collection is complete + +Through this process, we see how providers and evaluators work together to enable complex, stateful agent behaviors. + +# Using AI Assistants in Development + +A notable aspect of the session is the use of Claude AI to aid in code development. By providing clear instructions and iterating based on the generated code, complex logic can be developed rapidly. + +This showcases the potential for AI pair programming and how future developers might interact with their AI counterparts to build sophisticated applications. + +# Key Takeaways + +- Providers and evaluators are the key to stateful, dynamic agent behaviors +- The provider-evaluator loop is a powerful pattern for gathering information and triggering actions +- Evaluators enable agent self-reflection and adaptation based on conversation context +- AI assistants can significantly accelerate development by generating and refining code +- The potential for provider-evaluator based applications is immense - form-filling, user profiling, dynamic content unlocking, and more + +With these tools in hand, developers have a solid foundation for building highly interactive, personalized agentic applications. The next frontier is to explore advanced use cases and further push the boundaries of what's possible with Eliza. diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index c3d6b60aabdca..38c709de4fd88 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -143,6 +143,22 @@ const config = { ], ], themeConfig: { + mermaid: { + theme: { + light: 'default', + dark: 'dark' + }, + options: { + fontSize: 16, + flowchart: { + htmlLabels: true, + padding: 20, + nodeSpacing: 50, + rankSpacing: 50, + curve: 'cardinal' + } + } + }, colorMode: { defaultMode: "dark", disableSwitch: false, @@ -181,6 +197,11 @@ const config = { label: "Community", docId: "index", }, + { + to: 'showcase', + label: 'Showcase', + position: 'left' + }, { href: "https://github.com/elizaos/eliza", label: "GitHub", diff --git a/docs/docs/packages/adapters.md b/docs/notes/adapters.md similarity index 100% rename from docs/docs/packages/adapters.md rename to docs/notes/adapters.md diff --git a/docs/docs/packages/agent.md b/docs/notes/agent.md similarity index 96% rename from docs/docs/packages/agent.md rename to docs/notes/agent.md index 043813e809d59..2085a72b0d0eb 100644 --- a/docs/docs/packages/agent.md +++ b/docs/notes/agent.md @@ -4,7 +4,7 @@ sidebar_position: 1 # 🤖 Agent Package -The Agent Package (`@eliza/agent`) provides the high-level orchestration layer for Eliza, managing agent lifecycles, character loading, client initialization, and runtime coordination. +The Agent Package (`@elizaos/agent`) provides the high-level orchestration layer for Eliza, managing agent lifecycles, character loading, client initialization, and runtime coordination. ## Architecture Overview diff --git a/docs/docs/packages/core.md b/docs/notes/core.md similarity index 100% rename from docs/docs/packages/core.md rename to docs/notes/core.md diff --git a/docs/package-lock.json b/docs/package-lock.json index 4df54679e30be..145fcd250bf5d 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -20932,7 +20932,7 @@ "opener": "^1.5.2", "picocolors": "^1.0.0", "sirv": "^2.0.3", - "ws": "^7.3.1" + "ws": "8.18.0" }, "bin": { "webpack-bundle-analyzer": "lib/bin/analyzer.js" @@ -21038,7 +21038,7 @@ "sockjs": "^0.3.24", "spdy": "^4.0.2", "webpack-dev-middleware": "^5.3.4", - "ws": "^8.13.0" + "ws": "8.18.0" }, "bin": { "webpack-dev-server": "bin/webpack-dev-server.js" diff --git a/docs/package.json b/docs/package.json index 8f091c7f9fda2..ece968d156e42 100644 --- a/docs/package.json +++ b/docs/package.json @@ -1,12 +1,13 @@ { "name": "eliza-docs", - "version": "0.25.6-alpha.1", + "version": "0.25.8", "private": true, "packageManager": "pnpm@9.4.0", "scripts": { "docusaurus": "docusaurus", "start": "docusaurus start --no-open", "dev": "docusaurus start --port 3002 --no-open", + "update-registry": "node scripts/update-registry.js", "build": "docusaurus build", "swizzle": "docusaurus swizzle", "deploy": "docusaurus deploy", @@ -19,15 +20,18 @@ "@docusaurus/core": "3.7.0", "@docusaurus/plugin-content-blog": "3.7.0", "@docusaurus/plugin-content-docs": "3.7.0", - "@docusaurus/plugin-ideal-image": "3.7.0", + "@docusaurus/plugin-ideal-image": "^3.7.0", "@docusaurus/preset-classic": "3.7.0", - "@docusaurus/theme-mermaid": "3.7.0", "@docusaurus/theme-common": "3.7.0", + "@docusaurus/theme-mermaid": "3.7.0", + "@docusaurus/plugin-ideal-image": "^3.0.0", + "clsx": "^2.0.0", "@mdx-js/react": "3.0.1", - "clsx": "2.1.1", + "clsx": "^2.1.1", "docusaurus-lunr-search": "3.5.0", - "lunr": "2.3.9", "dotenv": "^16.4.7", + "lodash": "^4.17.21", + "lunr": "2.3.9", "prism-react-renderer": "2.3.1", "react": "18.3.1", "react-dom": "18.3.1", diff --git a/docs/scripts/update-registry.js b/docs/scripts/update-registry.js new file mode 100644 index 0000000000000..aafb72790ce13 --- /dev/null +++ b/docs/scripts/update-registry.js @@ -0,0 +1,75 @@ +const fs = require('fs'); +const path = require('path'); +const https = require('https'); + +const REGISTRY_URL = 'https://raw.githubusercontent.com/elizaos-plugins/registry/refs/heads/main/index.json'; +const OUTPUT_FILE = path.join(__dirname, '../src/data/registry-users.tsx'); + +function getGithubPreviewUrl(repoPath) { + return `https://opengraph.githubassets.com/1/${repoPath}`; +} + +function transformRegistryToUsers(registryData) { + return Object.entries(registryData).map(([name, repoUrl]) => { + const repoPath = repoUrl.replace('github:', ''); + + const displayName = name + .replace('@elizaos-plugins/plugin-', '') + .replace('@elizaos-plugins/client-', '') + .replace('@elizaos-plugins/adapter-', '') + .replace(/-/g, ' ') + .split(' ') + .map(word => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); + + const type = name.includes('client-') ? 'client' : + name.includes('adapter-') ? 'adapter' : + 'plugin'; + + return { + title: displayName, + description: `${type.charAt(0).toUpperCase() + type.slice(1)} for ${displayName}`, + preview: getGithubPreviewUrl(repoPath), + website: `https://github.com/${repoPath}`, + source: `https://github.com/${repoPath}`, + tags: [type] + }; + }); +} + +function fetchRegistry() { + return new Promise((resolve, reject) => { + https.get(REGISTRY_URL, (res) => { + let data = ''; + res.on('data', chunk => data += chunk); + res.on('end', () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(e); + } + }); + }).on('error', reject); + }); +} + +async function generateUsersFile() { + try { + const registryData = await fetchRegistry(); + const users = transformRegistryToUsers(registryData); + + const fileContent = `// This file is auto-generated. Do not edit directly. +import {type User} from './users'; + +export const registryUsers: User[] = ${JSON.stringify(users, null, 2)}; +`; + + fs.writeFileSync(OUTPUT_FILE, fileContent); + console.log('Successfully updated registry users data!'); + } catch (error) { + console.error('Failed to update registry:', error); + process.exit(1); + } +} + +generateUsersFile(); diff --git a/docs/sidebars.js b/docs/sidebars.js index 93cc9719f9a33..12029b6907c62 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -20,7 +20,56 @@ const sidebars = { id: "faq", label: "❓ FAQ", }, - ], + { + type: "category", + label: "Tutorials", + items: [ + { + type: "category", + label: "AI Agent Dev School", + items: [ + { + type: "doc", + id: "tutorials/index", + label: "Overview", + }, + { + type: "doc", + id: "tutorials/part1", + label: "Part 1", + }, + { + type: "doc", + id: "tutorials/part2", + label: "Part 2", + }, + { + type: "doc", + id: "tutorials/part3", + label: "Part 3", + }, + ], + collapsed: true, // Expand by default + }, + { + type: "doc", + id: "tutorials/nader_tutorial_10min", + label: "Clone Yourself in 10min", + }, + { + type: "doc", + id: "tutorials/nader_tutorial_15min", + label: "Build Social Agents in 15min", + }, + { + type: "doc", + id: "tutorials/nader_tutorial_35min", + label: "Build a Plugin in 35min", + }, + ], + collapsed: true, // Expand by default + }, + ], collapsed: false, }, { @@ -28,15 +77,25 @@ const sidebars = { label: "🧠 Core Concepts", collapsed: false, items: [ + { + type: "doc", + id: "core/overview", + label: "Overview", + }, { type: "doc", id: "core/characterfile", label: "Character Files", }, + { + type: "doc", + id: "core/clients", + label: "Clients", + }, { type: "doc", id: "core/agents", - label: "Agents", + label: "Agent Runtime", }, { type: "doc", @@ -53,6 +112,11 @@ const sidebars = { id: "core/evaluators", label: "Evaluators", }, + { + type: "doc", + id: "core/database", + label: "Database Adapters", + }, ], }, { @@ -65,6 +129,21 @@ const sidebars = { id: "guides/configuration", label: "Configuration", }, + { + type: "doc", + id: "guides/docker-setup", + label: "Docker Setup", + }, + { + type: "doc", + id: "guides/remote-deployment", + label: "Deployment", + }, + { + type: "doc", + id: "guides/fine-tuning", + label: "Fine-tuning", + }, { type: "doc", id: "guides/advanced", @@ -75,6 +154,11 @@ const sidebars = { id: "guides/secrets-management", label: "Secrets Management", }, + { + type: "doc", + id: "guides/memory-management", + label: "Memory Management", + }, { type: "doc", id: "guides/local-development", @@ -92,11 +176,6 @@ const sidebars = { label: "🎓 Advanced Topics", collapsed: false, items: [ - { - type: "doc", - id: "advanced/fine-tuning", - label: "Fine-tuning", - }, { type: "doc", id: "advanced/infrastructure", @@ -129,31 +208,6 @@ const sidebars = { label: "📦 Packages", collapsed: false, items: [ - { - type: "doc", - id: "packages/packages", - label: "Overview", - }, - { - type: "doc", - id: "packages/core", - label: "Core Package", - }, - { - type: "doc", - id: "packages/adapters", - label: "Database Adapters", - }, - { - type: "doc", - id: "packages/clients", - label: "Client Packages", - }, - { - type: "doc", - id: "packages/agent", - label: "Agent Package", - }, { type: "doc", id: "packages/plugins", diff --git a/docs/src/data/registry-users.tsx b/docs/src/data/registry-users.tsx new file mode 100644 index 0000000000000..b889980d45c93 --- /dev/null +++ b/docs/src/data/registry-users.tsx @@ -0,0 +1,1015 @@ +// This file is auto-generated. Do not edit directly. +import {type User} from './users'; + +export const registryUsers: User[] = [ + { + "title": "Mongodb", + "description": "Adapter for Mongodb", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/adapter-mongodb", + "website": "https://github.com/elizaos-plugins/adapter-mongodb", + "source": "https://github.com/elizaos-plugins/adapter-mongodb", + "tags": [ + "adapter" + ] + }, + { + "title": "Postgres", + "description": "Adapter for Postgres", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/adapter-postgres", + "website": "https://github.com/elizaos-plugins/adapter-postgres", + "source": "https://github.com/elizaos-plugins/adapter-postgres", + "tags": [ + "adapter" + ] + }, + { + "title": "Pglite", + "description": "Adapter for Pglite", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/adapter-pglite", + "website": "https://github.com/elizaos-plugins/adapter-pglite", + "source": "https://github.com/elizaos-plugins/adapter-pglite", + "tags": [ + "adapter" + ] + }, + { + "title": "Qdrant", + "description": "Adapter for Qdrant", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/adapter-qdrant", + "website": "https://github.com/elizaos-plugins/adapter-qdrant", + "source": "https://github.com/elizaos-plugins/adapter-qdrant", + "tags": [ + "adapter" + ] + }, + { + "title": "Sqljs", + "description": "Adapter for Sqljs", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/adapter-sqljs", + "website": "https://github.com/elizaos-plugins/adapter-sqljs", + "source": "https://github.com/elizaos-plugins/adapter-sqljs", + "tags": [ + "adapter" + ] + }, + { + "title": "Sqlite", + "description": "Adapter for Sqlite", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/adapter-sqlite", + "website": "https://github.com/elizaos-plugins/adapter-sqlite", + "source": "https://github.com/elizaos-plugins/adapter-sqlite", + "tags": [ + "adapter" + ] + }, + { + "title": "Supabase", + "description": "Adapter for Supabase", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/adapter-supabase", + "website": "https://github.com/elizaos-plugins/adapter-supabase", + "source": "https://github.com/elizaos-plugins/adapter-supabase", + "tags": [ + "adapter" + ] + }, + { + "title": "Auto", + "description": "Client for Auto", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-auto", + "website": "https://github.com/elizaos-plugins/client-auto", + "source": "https://github.com/elizaos-plugins/client-auto", + "tags": [ + "client" + ] + }, + { + "title": "Discord", + "description": "Client for Discord", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-discord", + "website": "https://github.com/elizaos-plugins/client-discord", + "source": "https://github.com/elizaos-plugins/client-discord", + "tags": [ + "client" + ] + }, + { + "title": "Farcaster", + "description": "Client for Farcaster", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-farcaster", + "website": "https://github.com/elizaos-plugins/client-farcaster", + "source": "https://github.com/elizaos-plugins/client-farcaster", + "tags": [ + "client" + ] + }, + { + "title": "Github", + "description": "Client for Github", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-github", + "website": "https://github.com/elizaos-plugins/client-github", + "source": "https://github.com/elizaos-plugins/client-github", + "tags": [ + "client" + ] + }, + { + "title": "Lens", + "description": "Client for Lens", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-lens", + "website": "https://github.com/elizaos-plugins/client-lens", + "source": "https://github.com/elizaos-plugins/client-lens", + "tags": [ + "client" + ] + }, + { + "title": "Slack", + "description": "Client for Slack", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-slack", + "website": "https://github.com/elizaos-plugins/client-slack", + "source": "https://github.com/elizaos-plugins/client-slack", + "tags": [ + "client" + ] + }, + { + "title": "Telegram", + "description": "Client for Telegram", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-telegram", + "website": "https://github.com/elizaos-plugins/client-telegram", + "source": "https://github.com/elizaos-plugins/client-telegram", + "tags": [ + "client" + ] + }, + { + "title": "Twitter", + "description": "Client for Twitter", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/client-twitter", + "website": "https://github.com/elizaos-plugins/client-twitter", + "source": "https://github.com/elizaos-plugins/client-twitter", + "tags": [ + "client" + ] + }, + { + "title": "0g", + "description": "Plugin for 0g", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-0g", + "website": "https://github.com/elizaos-plugins/plugin-0g", + "source": "https://github.com/elizaos-plugins/plugin-0g", + "tags": [ + "plugin" + ] + }, + { + "title": "3d Generation", + "description": "Plugin for 3d Generation", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-3d-generation", + "website": "https://github.com/elizaos-plugins/plugin-3d-generation", + "source": "https://github.com/elizaos-plugins/plugin-3d-generation", + "tags": [ + "plugin" + ] + }, + { + "title": "Abstract", + "description": "Plugin for Abstract", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-abstract", + "website": "https://github.com/elizaos-plugins/plugin-abstract", + "source": "https://github.com/elizaos-plugins/plugin-abstract", + "tags": [ + "plugin" + ] + }, + { + "title": "Akash", + "description": "Plugin for Akash", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-akash", + "website": "https://github.com/elizaos-plugins/plugin-akash", + "source": "https://github.com/elizaos-plugins/plugin-akash", + "tags": [ + "plugin" + ] + }, + { + "title": "Allora", + "description": "Plugin for Allora", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-allora", + "website": "https://github.com/elizaos-plugins/plugin-allora", + "source": "https://github.com/elizaos-plugins/plugin-allora", + "tags": [ + "plugin" + ] + }, + { + "title": "Anyone", + "description": "Plugin for Anyone", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-anyone", + "website": "https://github.com/elizaos-plugins/plugin-anyone", + "source": "https://github.com/elizaos-plugins/plugin-anyone", + "tags": [ + "plugin" + ] + }, + { + "title": "Aptos", + "description": "Plugin for Aptos", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-aptos", + "website": "https://github.com/elizaos-plugins/plugin-aptos", + "source": "https://github.com/elizaos-plugins/plugin-aptos", + "tags": [ + "plugin" + ] + }, + { + "title": "Arthera", + "description": "Plugin for Arthera", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-arthera", + "website": "https://github.com/elizaos-plugins/plugin-arthera", + "source": "https://github.com/elizaos-plugins/plugin-arthera", + "tags": [ + "plugin" + ] + }, + { + "title": "Asterai", + "description": "Plugin for Asterai", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-asterai", + "website": "https://github.com/elizaos-plugins/plugin-asterai", + "source": "https://github.com/elizaos-plugins/plugin-asterai", + "tags": [ + "plugin" + ] + }, + { + "title": "Autonome", + "description": "Plugin for Autonome", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-autonome", + "website": "https://github.com/elizaos-plugins/plugin-autonome", + "source": "https://github.com/elizaos-plugins/plugin-autonome", + "tags": [ + "plugin" + ] + }, + { + "title": "Avail", + "description": "Plugin for Avail", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-avail", + "website": "https://github.com/elizaos-plugins/plugin-avail", + "source": "https://github.com/elizaos-plugins/plugin-avail", + "tags": [ + "plugin" + ] + }, + { + "title": "Avalanche", + "description": "Plugin for Avalanche", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-avalanche", + "website": "https://github.com/elizaos-plugins/plugin-avalanche", + "source": "https://github.com/elizaos-plugins/plugin-avalanche", + "tags": [ + "plugin" + ] + }, + { + "title": "Binance", + "description": "Plugin for Binance", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-binance", + "website": "https://github.com/elizaos-plugins/plugin-binance", + "source": "https://github.com/elizaos-plugins/plugin-binance", + "tags": [ + "plugin" + ] + }, + { + "title": "Coinbase", + "description": "Plugin for Coinbase", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-coinbase", + "website": "https://github.com/elizaos-plugins/plugin-coinbase", + "source": "https://github.com/elizaos-plugins/plugin-coinbase", + "tags": [ + "plugin" + ] + }, + { + "title": "Coingecko", + "description": "Plugin for Coingecko", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-coingecko", + "website": "https://github.com/elizaos-plugins/plugin-coingecko", + "source": "https://github.com/elizaos-plugins/plugin-coingecko", + "tags": [ + "plugin" + ] + }, + { + "title": "Coinmarketcap", + "description": "Plugin for Coinmarketcap", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-coinmarketcap", + "website": "https://github.com/elizaos-plugins/plugin-coinmarketcap", + "source": "https://github.com/elizaos-plugins/plugin-coinmarketcap", + "tags": [ + "plugin" + ] + }, + { + "title": "Compass", + "description": "Plugin for Compass", + "preview": "https://opengraph.githubassets.com/1/CompassLabs/plugin-compass", + "website": "https://github.com/CompassLabs/plugin-compass", + "source": "https://github.com/CompassLabs/plugin-compass", + "tags": [ + "plugin" + ] + }, + { + "title": "Conflux", + "description": "Plugin for Conflux", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-conflux", + "website": "https://github.com/elizaos-plugins/plugin-conflux", + "source": "https://github.com/elizaos-plugins/plugin-conflux", + "tags": [ + "plugin" + ] + }, + { + "title": "Cosmos", + "description": "Plugin for Cosmos", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-cosmos", + "website": "https://github.com/elizaos-plugins/plugin-cosmos", + "source": "https://github.com/elizaos-plugins/plugin-cosmos", + "tags": [ + "plugin" + ] + }, + { + "title": "Cronoszkevm", + "description": "Plugin for Cronoszkevm", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-cronoszkevm", + "website": "https://github.com/elizaos-plugins/plugin-cronoszkevm", + "source": "https://github.com/elizaos-plugins/plugin-cronoszkevm", + "tags": [ + "plugin" + ] + }, + { + "title": "D.a.t.a", + "description": "Plugin for D.a.t.a", + "preview": "https://opengraph.githubassets.com/1/carv-protocol/plugin-d.a.t.a", + "website": "https://github.com/carv-protocol/plugin-d.a.t.a", + "source": "https://github.com/carv-protocol/plugin-d.a.t.a", + "tags": [ + "plugin" + ] + }, + { + "title": "Depin", + "description": "Plugin for Depin", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-depin", + "website": "https://github.com/elizaos-plugins/plugin-depin", + "source": "https://github.com/elizaos-plugins/plugin-depin", + "tags": [ + "plugin" + ] + }, + { + "title": "Di", + "description": "Plugin for Di", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-di", + "website": "https://github.com/elizaos-plugins/plugin-di", + "source": "https://github.com/elizaos-plugins/plugin-di", + "tags": [ + "plugin" + ] + }, + { + "title": "Echochambers", + "description": "Plugin for Echochambers", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-echochambers", + "website": "https://github.com/elizaos-plugins/plugin-echochambers", + "source": "https://github.com/elizaos-plugins/plugin-echochambers", + "tags": [ + "plugin" + ] + }, + { + "title": "Edwin", + "description": "Plugin for Edwin", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-edwin", + "website": "https://github.com/elizaos-plugins/plugin-edwin", + "source": "https://github.com/elizaos-plugins/plugin-edwin", + "tags": [ + "plugin" + ] + }, + { + "title": "Evm", + "description": "Plugin for Evm", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-evm", + "website": "https://github.com/elizaos-plugins/plugin-evm", + "source": "https://github.com/elizaos-plugins/plugin-evm", + "tags": [ + "plugin" + ] + }, + { + "title": "FerePro", + "description": "Plugin for FerePro", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-ferePro", + "website": "https://github.com/elizaos-plugins/plugin-ferePro", + "source": "https://github.com/elizaos-plugins/plugin-ferePro", + "tags": [ + "plugin" + ] + }, + { + "title": "Flow", + "description": "Plugin for Flow", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-flow", + "website": "https://github.com/elizaos-plugins/plugin-flow", + "source": "https://github.com/elizaos-plugins/plugin-flow", + "tags": [ + "plugin" + ] + }, + { + "title": "Fuel", + "description": "Plugin for Fuel", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-fuel", + "website": "https://github.com/elizaos-plugins/plugin-fuel", + "source": "https://github.com/elizaos-plugins/plugin-fuel", + "tags": [ + "plugin" + ] + }, + { + "title": "Genlayer", + "description": "Plugin for Genlayer", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-genlayer", + "website": "https://github.com/elizaos-plugins/plugin-genlayer", + "source": "https://github.com/elizaos-plugins/plugin-genlayer", + "tags": [ + "plugin" + ] + }, + { + "title": "Giphy", + "description": "Plugin for Giphy", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-giphy", + "website": "https://github.com/elizaos-plugins/plugin-giphy", + "source": "https://github.com/elizaos-plugins/plugin-giphy", + "tags": [ + "plugin" + ] + }, + { + "title": "Gitbook", + "description": "Plugin for Gitbook", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-gitbook", + "website": "https://github.com/elizaos-plugins/plugin-gitbook", + "source": "https://github.com/elizaos-plugins/plugin-gitbook", + "tags": [ + "plugin" + ] + }, + { + "title": "Goat", + "description": "Plugin for Goat", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-goat", + "website": "https://github.com/elizaos-plugins/plugin-goat", + "source": "https://github.com/elizaos-plugins/plugin-goat", + "tags": [ + "plugin" + ] + }, + { + "title": "Goplus", + "description": "Plugin for Goplus", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-goplus", + "website": "https://github.com/elizaos-plugins/plugin-goplus", + "source": "https://github.com/elizaos-plugins/plugin-goplus", + "tags": [ + "plugin" + ] + }, + { + "title": "Hyperliquid", + "description": "Plugin for Hyperliquid", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-hyperliquid", + "website": "https://github.com/elizaos-plugins/plugin-hyperliquid", + "source": "https://github.com/elizaos-plugins/plugin-hyperliquid", + "tags": [ + "plugin" + ] + }, + { + "title": "Icp", + "description": "Plugin for Icp", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-icp", + "website": "https://github.com/elizaos-plugins/plugin-icp", + "source": "https://github.com/elizaos-plugins/plugin-icp", + "tags": [ + "plugin" + ] + }, + { + "title": "Image Generation", + "description": "Plugin for Image Generation", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-image-generation", + "website": "https://github.com/elizaos-plugins/plugin-image-generation", + "source": "https://github.com/elizaos-plugins/plugin-image-generation", + "tags": [ + "plugin" + ] + }, + { + "title": "Intiface", + "description": "Plugin for Intiface", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-intiface", + "website": "https://github.com/elizaos-plugins/plugin-intiface", + "source": "https://github.com/elizaos-plugins/plugin-intiface", + "tags": [ + "plugin" + ] + }, + { + "title": "Irys", + "description": "Plugin for Irys", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-irys", + "website": "https://github.com/elizaos-plugins/plugin-irys", + "source": "https://github.com/elizaos-plugins/plugin-irys", + "tags": [ + "plugin" + ] + }, + { + "title": "LensNetwork", + "description": "Plugin for LensNetwork", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-lensNetwork", + "website": "https://github.com/elizaos-plugins/plugin-lensNetwork", + "source": "https://github.com/elizaos-plugins/plugin-lensNetwork", + "tags": [ + "plugin" + ] + }, + { + "title": "Letzai", + "description": "Plugin for Letzai", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-letzai", + "website": "https://github.com/elizaos-plugins/plugin-letzai", + "source": "https://github.com/elizaos-plugins/plugin-letzai", + "tags": [ + "plugin" + ] + }, + { + "title": "Massa", + "description": "Plugin for Massa", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-massa", + "website": "https://github.com/elizaos-plugins/plugin-massa", + "source": "https://github.com/elizaos-plugins/plugin-massa", + "tags": [ + "plugin" + ] + }, + { + "title": "Merkle", + "description": "Plugin for Merkle", + "preview": "https://opengraph.githubassets.com/1/merkle-trade/merkle-eliza-plugin", + "website": "https://github.com/merkle-trade/merkle-eliza-plugin", + "source": "https://github.com/merkle-trade/merkle-eliza-plugin", + "tags": [ + "plugin" + ] + }, + { + "title": "Messari Ai Toolkit", + "description": "Plugin for Messari Ai Toolkit", + "preview": "https://opengraph.githubassets.com/1/messari/plugin-messari-ai-toolkit", + "website": "https://github.com/messari/plugin-messari-ai-toolkit", + "source": "https://github.com/messari/plugin-messari-ai-toolkit", + "tags": [ + "plugin" + ] + }, + { + "title": "Movement", + "description": "Plugin for Movement", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-movement", + "website": "https://github.com/elizaos-plugins/plugin-movement", + "source": "https://github.com/elizaos-plugins/plugin-movement", + "tags": [ + "plugin" + ] + }, + { + "title": "Multiversx", + "description": "Plugin for Multiversx", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-multiversx", + "website": "https://github.com/elizaos-plugins/plugin-multiversx", + "source": "https://github.com/elizaos-plugins/plugin-multiversx", + "tags": [ + "plugin" + ] + }, + { + "title": "Near", + "description": "Plugin for Near", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-near", + "website": "https://github.com/elizaos-plugins/plugin-near", + "source": "https://github.com/elizaos-plugins/plugin-near", + "tags": [ + "plugin" + ] + }, + { + "title": "Nft Generation", + "description": "Plugin for Nft Generation", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-nft-generation", + "website": "https://github.com/elizaos-plugins/plugin-nft-generation", + "source": "https://github.com/elizaos-plugins/plugin-nft-generation", + "tags": [ + "plugin" + ] + }, + { + "title": "Node", + "description": "Plugin for Node", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-node", + "website": "https://github.com/elizaos-plugins/plugin-node", + "source": "https://github.com/elizaos-plugins/plugin-node", + "tags": [ + "plugin" + ] + }, + { + "title": "Obsidian", + "description": "Plugin for Obsidian", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-obsidian", + "website": "https://github.com/elizaos-plugins/plugin-obsidian", + "source": "https://github.com/elizaos-plugins/plugin-obsidian", + "tags": [ + "plugin" + ] + }, + { + "title": "Opacity", + "description": "Plugin for Opacity", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-opacity", + "website": "https://github.com/elizaos-plugins/plugin-opacity", + "source": "https://github.com/elizaos-plugins/plugin-opacity", + "tags": [ + "plugin" + ] + }, + { + "title": "Open Weather", + "description": "Plugin for Open Weather", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-open-weather", + "website": "https://github.com/elizaos-plugins/plugin-open-weather", + "source": "https://github.com/elizaos-plugins/plugin-open-weather", + "tags": [ + "plugin" + ] + }, + { + "title": "Primus", + "description": "Plugin for Primus", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-primus", + "website": "https://github.com/elizaos-plugins/plugin-primus", + "source": "https://github.com/elizaos-plugins/plugin-primus", + "tags": [ + "plugin" + ] + }, + { + "title": "Quai", + "description": "Plugin for Quai", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-quai", + "website": "https://github.com/elizaos-plugins/plugin-quai", + "source": "https://github.com/elizaos-plugins/plugin-quai", + "tags": [ + "plugin" + ] + }, + { + "title": "Rabbi Trader", + "description": "Plugin for Rabbi Trader", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-rabbi-trader", + "website": "https://github.com/elizaos-plugins/plugin-rabbi-trader", + "source": "https://github.com/elizaos-plugins/plugin-rabbi-trader", + "tags": [ + "plugin" + ] + }, + { + "title": "Sei", + "description": "Plugin for Sei", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-sei", + "website": "https://github.com/elizaos-plugins/plugin-sei", + "source": "https://github.com/elizaos-plugins/plugin-sei", + "tags": [ + "plugin" + ] + }, + { + "title": "Sgx", + "description": "Plugin for Sgx", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-sgx", + "website": "https://github.com/elizaos-plugins/plugin-sgx", + "source": "https://github.com/elizaos-plugins/plugin-sgx", + "tags": [ + "plugin" + ] + }, + { + "title": "Solana", + "description": "Plugin for Solana", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-solana", + "website": "https://github.com/elizaos-plugins/plugin-solana", + "source": "https://github.com/elizaos-plugins/plugin-solana", + "tags": [ + "plugin" + ] + }, + { + "title": "Solana Agent Kit", + "description": "Plugin for Solana Agent Kit", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-solana-agent-kit", + "website": "https://github.com/elizaos-plugins/plugin-solana-agent-kit", + "source": "https://github.com/elizaos-plugins/plugin-solana-agent-kit", + "tags": [ + "plugin" + ] + }, + { + "title": "Spheron", + "description": "Plugin for Spheron", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-spheron", + "website": "https://github.com/elizaos-plugins/plugin-spheron", + "source": "https://github.com/elizaos-plugins/plugin-spheron", + "tags": [ + "plugin" + ] + }, + { + "title": "Stargaze", + "description": "Plugin for Stargaze", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-stargaze", + "website": "https://github.com/elizaos-plugins/plugin-stargaze", + "source": "https://github.com/elizaos-plugins/plugin-stargaze", + "tags": [ + "plugin" + ] + }, + { + "title": "Starknet", + "description": "Plugin for Starknet", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-starknet", + "website": "https://github.com/elizaos-plugins/plugin-starknet", + "source": "https://github.com/elizaos-plugins/plugin-starknet", + "tags": [ + "plugin" + ] + }, + { + "title": "Story", + "description": "Plugin for Story", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-story", + "website": "https://github.com/elizaos-plugins/plugin-story", + "source": "https://github.com/elizaos-plugins/plugin-story", + "tags": [ + "plugin" + ] + }, + { + "title": "Sui", + "description": "Plugin for Sui", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-sui", + "website": "https://github.com/elizaos-plugins/plugin-sui", + "source": "https://github.com/elizaos-plugins/plugin-sui", + "tags": [ + "plugin" + ] + }, + { + "title": "Tee", + "description": "Plugin for Tee", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-tee", + "website": "https://github.com/elizaos-plugins/plugin-tee", + "source": "https://github.com/elizaos-plugins/plugin-tee", + "tags": [ + "plugin" + ] + }, + { + "title": "Tee Log", + "description": "Plugin for Tee Log", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-tee-log", + "website": "https://github.com/elizaos-plugins/plugin-tee-log", + "source": "https://github.com/elizaos-plugins/plugin-tee-log", + "tags": [ + "plugin" + ] + }, + { + "title": "Tee Marlin", + "description": "Plugin for Tee Marlin", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-tee-marlin", + "website": "https://github.com/elizaos-plugins/plugin-tee-marlin", + "source": "https://github.com/elizaos-plugins/plugin-tee-marlin", + "tags": [ + "plugin" + ] + }, + { + "title": "Thirdweb", + "description": "Plugin for Thirdweb", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-thirdweb", + "website": "https://github.com/elizaos-plugins/plugin-thirdweb", + "source": "https://github.com/elizaos-plugins/plugin-thirdweb", + "tags": [ + "plugin" + ] + }, + { + "title": "Ton", + "description": "Plugin for Ton", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-ton", + "website": "https://github.com/elizaos-plugins/plugin-ton", + "source": "https://github.com/elizaos-plugins/plugin-ton", + "tags": [ + "plugin" + ] + }, + { + "title": "Trustdb", + "description": "Plugin for Trustdb", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-trustdb", + "website": "https://github.com/elizaos-plugins/plugin-trustdb", + "source": "https://github.com/elizaos-plugins/plugin-trustdb", + "tags": [ + "plugin" + ] + }, + { + "title": "Tts", + "description": "Plugin for Tts", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-tts", + "website": "https://github.com/elizaos-plugins/plugin-tts", + "source": "https://github.com/elizaos-plugins/plugin-tts", + "tags": [ + "plugin" + ] + }, + { + "title": "Twitter", + "description": "Plugin for Twitter", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-twitter", + "website": "https://github.com/elizaos-plugins/plugin-twitter", + "source": "https://github.com/elizaos-plugins/plugin-twitter", + "tags": [ + "plugin" + ] + }, + { + "title": "Video Generation", + "description": "Plugin for Video Generation", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-video-generation", + "website": "https://github.com/elizaos-plugins/plugin-video-generation", + "source": "https://github.com/elizaos-plugins/plugin-video-generation", + "tags": [ + "plugin" + ] + }, + { + "title": "Web Search", + "description": "Plugin for Web Search", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-web-search", + "website": "https://github.com/elizaos-plugins/plugin-web-search", + "source": "https://github.com/elizaos-plugins/plugin-web-search", + "tags": [ + "plugin" + ] + }, + { + "title": "Whatsapp", + "description": "Plugin for Whatsapp", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-whatsapp", + "website": "https://github.com/elizaos-plugins/plugin-whatsapp", + "source": "https://github.com/elizaos-plugins/plugin-whatsapp", + "tags": [ + "plugin" + ] + }, + { + "title": "Zerion", + "description": "Plugin for Zerion", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-zerion", + "website": "https://github.com/elizaos-plugins/plugin-zerion", + "source": "https://github.com/elizaos-plugins/plugin-zerion", + "tags": [ + "plugin" + ] + }, + { + "title": "Zksync Era", + "description": "Plugin for Zksync Era", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-zksync-era", + "website": "https://github.com/elizaos-plugins/plugin-zksync-era", + "source": "https://github.com/elizaos-plugins/plugin-zksync-era", + "tags": [ + "plugin" + ] + }, + { + "title": "Omniflix", + "description": "Plugin for Omniflix", + "preview": "https://opengraph.githubassets.com/1/elizaos-plugins/plugin-omniflix", + "website": "https://github.com/elizaos-plugins/plugin-omniflix", + "source": "https://github.com/elizaos-plugins/plugin-omniflix", + "tags": [ + "plugin" + ] + }, + { + "title": "Ccxt", + "description": "Plugin for Ccxt", + "preview": "https://opengraph.githubassets.com/1/pranavjadhav1363/plugin-ccxt", + "website": "https://github.com/pranavjadhav1363/plugin-ccxt", + "source": "https://github.com/pranavjadhav1363/plugin-ccxt", + "tags": [ + "plugin" + ] + }, + { + "title": "@toddli/plugin Trustgo", + "description": "Plugin for @toddli/plugin Trustgo", + "preview": "https://opengraph.githubassets.com/1/TrustaLabs/plugin-trustgo", + "website": "https://github.com/TrustaLabs/plugin-trustgo", + "source": "https://github.com/TrustaLabs/plugin-trustgo", + "tags": [ + "plugin" + ] + }, + { + "title": "Firecrawl", + "description": "Plugin for Firecrawl", + "preview": "https://opengraph.githubassets.com/1/tobySolutions/plugin-firecrawl", + "website": "https://github.com/tobySolutions/plugin-firecrawl", + "source": "https://github.com/tobySolutions/plugin-firecrawl", + "tags": [ + "plugin" + ] + }, + { + "title": "ATTPs", + "description": "Plugin for ATTPs", + "preview": "https://opengraph.githubassets.com/1/APRO-com/plugin-ATTPs", + "website": "https://github.com/APRO-com/plugin-ATTPs", + "source": "https://github.com/APRO-com/plugin-ATTPs", + "tags": [ + "plugin" + ] + }, + { + "title": "Youtube To Text", + "description": "Plugin for Youtube To Text", + "preview": "https://opengraph.githubassets.com/1/wellaios/plugin-youtube-to-text", + "website": "https://github.com/wellaios/plugin-youtube-to-text", + "source": "https://github.com/wellaios/plugin-youtube-to-text", + "tags": [ + "plugin" + ] + }, + { + "title": "Nkn", + "description": "Plugin for Nkn", + "preview": "https://opengraph.githubassets.com/1/nknorg/eliza-plugin-nkn", + "website": "https://github.com/nknorg/eliza-plugin-nkn", + "source": "https://github.com/nknorg/eliza-plugin-nkn", + "tags": [ + "plugin" + ] + }, + { + "title": "Viction", + "description": "Plugin for Viction", + "preview": "https://opengraph.githubassets.com/1/BuildOnViction/plugin-viction", + "website": "https://github.com/BuildOnViction/plugin-viction", + "source": "https://github.com/BuildOnViction/plugin-viction", + "tags": [ + "plugin" + ] + }, + { + "title": "Grix", + "description": "Plugin for Grix", + "preview": "https://opengraph.githubassets.com/1/grixprotocol/plugin-grix", + "website": "https://github.com/grixprotocol/plugin-grix", + "source": "https://github.com/grixprotocol/plugin-grix", + "tags": [ + "plugin" + ] + } +]; diff --git a/docs/src/data/users.tsx b/docs/src/data/users.tsx new file mode 100644 index 0000000000000..37dfc3dfaec77 --- /dev/null +++ b/docs/src/data/users.tsx @@ -0,0 +1,71 @@ +import {sortBy} from 'lodash'; +import {registryUsers} from './registry-users'; + +export type TagType = + | 'favorite' + | 'opensource' + | 'adapter' + | 'client' + | 'plugin'; + +export type User = { + title: string; + description: string; + preview: string | null; + website: string; + source: string | null; + tags: TagType[]; +}; + +export type Tag = { + label: string; + description: string; + color: string; +}; + +export const Tags: {[type in TagType]: Tag} = { + favorite: { + label: 'Favorite', + description: 'Our favorite projects that you must check out!', + color: '#e9669e', + }, + opensource: { + label: 'Open Source', + description: 'Open source projects can be useful for inspiration!', + color: '#39ca30', + }, + adapter: { + label: 'Adapter', + description: 'Database and storage adapters', + color: '#bf4040', + }, + client: { + label: 'Client', + description: 'Platform and service clients', + color: '#4040bf', + }, + plugin: { + label: 'Plugin', + description: 'Feature and integration plugins', + color: '#40bf40', + } +}; + +export const TagList = Object.keys(Tags) as TagType[]; + +// Static entries (optional - you can add manual entries here) +const staticUsers: User[] = []; + +// Combine static and registry users +export const Users: User[] = [...staticUsers, ...registryUsers]; + +function sortUsers() { + let result = Users; + // Sort by site name + result = sortBy(result, (user) => user.title.toLowerCase()); + // Sort by favorite tag, favorites first + result = sortBy(result, (user) => !user.tags.includes('favorite')); + return result; +} + +export const sortedUsers = sortUsers(); diff --git a/docs/src/data/users2.tsx b/docs/src/data/users2.tsx new file mode 100644 index 0000000000000..0b6cf100c88a2 --- /dev/null +++ b/docs/src/data/users2.tsx @@ -0,0 +1,91 @@ +import {sortBy} from 'lodash'; + +export type TagType = + | 'favorite' + | 'opensource' + | 'product' + | 'design' + | 'large' + | 'personal'; + +export type User = { + title: string; + description: string; + preview: string | null; + website: string; + source: string | null; + tags: TagType[]; +}; + +export type Tag = { + label: string; + description: string; + color: string; +}; + +export const Tags: {[type in TagType]: Tag} = { + favorite: { + label: 'Favorite', + description: 'Our favorite projects that you must check out!', + color: '#e9669e', + }, + opensource: { + label: 'Open Source', + description: 'Open source projects can be useful for inspiration!', + color: '#39ca30', + }, + product: { + label: 'Product', + description: 'Projects related to commercial products!', + color: '#dfd545', + }, + design: { + label: 'Design', + description: 'Beautiful sites with custom designs!', + color: '#a44fb7', + }, + large: { + label: 'Large', + description: 'Large sites with lots of content!', + color: '#8c2f00', + }, + personal: { + label: 'Personal', + description: 'Personal websites and portfolios', + color: '#14cfc3', + } +}; + +export const TagList = Object.keys(Tags) as TagType[]; + +// Add your showcase sites here +const Users: User[] = [ + { + title: 'Example Project', + description: 'An example project built with our framework', + preview: null, // Add image path here + website: 'https://example.com', + source: 'https://github.com/example/project', + tags: ['opensource', 'personal'], + }, + { + title: 'My Project', + description: 'A cool project built with our framework', + preview: 'img/showcase/my-project.png', + website: 'https://myproject.com', + source: 'https://github.com/myproject', + tags: ['opensource', 'product'], + }, + // Add more sites here +]; + +function sortUsers() { + let result = Users; + // Sort by site name + result = sortBy(result, (user) => user.title.toLowerCase()); + // Sort by favorite tag, favorites first + result = sortBy(result, (user) => !user.tags.includes('favorite')); + return result; +} + +export const sortedUsers = sortUsers(); diff --git a/docs/src/pages/showcase/_components/ShowcaseCard/index.tsx b/docs/src/pages/showcase/_components/ShowcaseCard/index.tsx new file mode 100644 index 0000000000000..958c7390cc9f4 --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseCard/index.tsx @@ -0,0 +1,76 @@ +import React from 'react'; +import clsx from 'clsx'; +import Link from '@docusaurus/Link'; +import {Tags, type TagType, type User} from '../../../../data/users'; +import styles from './styles.module.css'; + +function TagIcon({label, color}: {label: string; color: string}) { + return ( + + ); +} + +function ShowcaseCardTag({tags}: {tags: TagType[]}) { + return ( + <> + {tags.map((tag) => { + const {label, color} = Tags[tag]; + return ( +
  • + + {label.toLowerCase()} +
  • + ); + })} + + ); +} + +export default function ShowcaseCard({user}: {user: User}) { + return ( +
  • +
    + {user.preview && ( + {user.title} + )} +
    +
    +
    +

    + + {user.title} + +

    + {user.source && ( + + source + + )} +
    +

    {user.description}

    +
    +
      + tag !== 'opensource')} /> +
    +
  • + ); +} diff --git a/docs/src/pages/showcase/_components/ShowcaseCard/styles.module.css b/docs/src/pages/showcase/_components/ShowcaseCard/styles.module.css new file mode 100644 index 0000000000000..6d124ff40ffaf --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseCard/styles.module.css @@ -0,0 +1,83 @@ +.showcaseCardImage { + overflow: hidden; + height: 100%; + border-bottom: 2px solid var(--ifm-color-emphasis-200); +} + +.cardImage { + width: 100%; + height: 100%; + object-fit: cover; +} + +.showcaseCardHeader { + display: flex; + align-items: center; + margin-bottom: 12px; +} + +.showcaseCardTitle { + margin-bottom: 0; + flex: 1 1 auto; + margin-right: 8px; +} + +.showcaseCardTitle a { + text-decoration: none; + background: linear-gradient( + var(--ifm-color-primary), + var(--ifm-color-primary) + ) + 0% 100% / 0% 1px no-repeat; + transition: background-size ease-out 200ms; +} + +.showcaseCardTitle a:not(:focus):hover { + background-size: 100% 1px; +} + +.showcaseCardSrcBtn { + margin-left: 6px; + padding-left: 12px; + padding-right: 12px; + border: none; +} + +.showcaseCardSrcBtn:focus-visible { + background-color: var(--ifm-color-secondary-dark); +} + +[data-theme='dark'] .showcaseCardSrcBtn { + background-color: var(--ifm-color-emphasis-200) !important; + color: inherit; +} + +[data-theme='dark'] .showcaseCardSrcBtn:hover { + background-color: var(--ifm-color-emphasis-300) !important; +} + +.showcaseCardBody { + font-size: smaller; + line-height: 1.66; +} + +.cardFooter { + display: flex; + flex-wrap: wrap; +} + +.tag { + font-size: 0.675rem; + border: 1px solid var(--ifm-color-secondary-darkest); + cursor: default; + margin-right: 6px; + margin-bottom: 6px; + border-radius: 12px; + display: inline-flex; + align-items: center; + padding: 2px 10px; +} + +.textLabel { + margin-right: 4px; +} diff --git a/docs/src/pages/showcase/_components/ShowcaseCards/index.tsx b/docs/src/pages/showcase/_components/ShowcaseCards/index.tsx new file mode 100644 index 0000000000000..5ded87dca1cb4 --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseCards/index.tsx @@ -0,0 +1,34 @@ +import React from 'react'; +import clsx from 'clsx'; +import { type User } from '@site/src/data/users'; +import ShowcaseCard from '../ShowcaseCard'; +import styles from './styles.module.css'; + +export default function ShowcaseCards({users}: {users: User[]}): JSX.Element { + // Keep only unique entries by title + const uniqueUsers = users.filter((user, index, self) => + index === self.findIndex((u) => u.title === user.title) + ); + + if (uniqueUsers.length === 0) { + return ( +
    +

    No results found

    +

    Try adjusting your search or filter criteria.

    +
    + ); + } + + return ( +
    +
    +

    {uniqueUsers.length} {uniqueUsers.length === 1 ? 'Site' : 'Sites'}

    +
    +
      + {uniqueUsers.map((user) => ( + + ))} +
    +
    + ); +} diff --git a/docs/src/pages/showcase/_components/ShowcaseCards/styles.module.css b/docs/src/pages/showcase/_components/ShowcaseCards/styles.module.css new file mode 100644 index 0000000000000..3303197f9eec5 --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseCards/styles.module.css @@ -0,0 +1,5 @@ +.showcaseList { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(280px, 1fr)); + gap: 24px; +} diff --git a/docs/src/pages/showcase/_components/ShowcaseFilters/index.tsx b/docs/src/pages/showcase/_components/ShowcaseFilters/index.tsx new file mode 100644 index 0000000000000..235aee22780f7 --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseFilters/index.tsx @@ -0,0 +1,86 @@ +import React, {useState} from 'react'; +import clsx from 'clsx'; +import {TagList, Tags, type TagType} from '../../../../data/users'; +import styles from './styles.module.css'; + +function ShowcaseTagSelect({ + tag, + label, + color, + description, + selected, + onToggle, +}: { + tag: TagType; + label: string; + color: string; + description: string; + selected: boolean; + onToggle: () => void; +}): JSX.Element { + return ( +
  • + +
  • + ); +} + +export default function ShowcaseFilters({ + selectedTags, + toggleTag, + operator, + toggleOperator, +}: { + selectedTags: TagType[]; + toggleTag: (tag: TagType) => void; + operator: 'AND' | 'OR'; + toggleOperator: () => void; +}): JSX.Element { + return ( +
    +
    +
    +

    + Filters + +

    +
    +
    +
      + {TagList.filter(tag => tag !== 'favorite').map((tag) => { + const {label, description, color} = Tags[tag]; + return ( + toggleTag(tag)} + /> + ); + })} +
    +
    + ); +} diff --git a/docs/src/pages/showcase/_components/ShowcaseFilters/styles.module.css b/docs/src/pages/showcase/_components/ShowcaseFilters/styles.module.css new file mode 100644 index 0000000000000..09bb1b4465dae --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseFilters/styles.module.css @@ -0,0 +1,57 @@ +.filterHeader { + display: flex; + align-items: center; + justify-content: space-between; + margin-bottom: 20px; +} + +.filterTitle { + display: flex; + align-items: center; + gap: 8px; +} + +.operatorButton { + background-color: var(--ifm-color-primary); + color: var(--ifm-color-white); + border: none; + border-radius: 20px; + padding: 4px 12px; + font-size: 0.8rem; + margin-left: 12px; + cursor: pointer; + transition: background-color 200ms ease-out; +} + +.operatorButton:hover { + background-color: var(--ifm-color-primary-dark); +} + +.tagList { + display: flex; + align-items: center; + flex-wrap: wrap; + gap: 8px; +} + +.tagListItem { + user-select: none; + white-space: nowrap; +} + +.tagButton { + display: inline-flex; + align-items: center; + padding: 6px 16px; + border: 2px solid; + border-radius: 20px; + font-size: 0.875rem; + font-weight: 500; + line-height: 1.2; + cursor: pointer; + transition: all 200ms ease-out; +} + +.tagButton:hover { + opacity: 0.8; +} diff --git a/docs/src/pages/showcase/_components/ShowcaseSearchBar/index.tsx b/docs/src/pages/showcase/_components/ShowcaseSearchBar/index.tsx new file mode 100644 index 0000000000000..1062a7c565bf9 --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseSearchBar/index.tsx @@ -0,0 +1,31 @@ +import React, { useCallback } from 'react'; +import { debounce } from 'lodash'; +import styles from './styles.module.css'; + +export default function ShowcaseSearchBar({ + onChange, + value +}: { + onChange: (value: string) => void; + value: string; +}): JSX.Element { + // Debounce the onChange callback + const debouncedOnChange = useCallback( + debounce((newValue: string) => { + onChange(newValue); + }, 200), + [onChange] + ); + + return ( +
    + debouncedOnChange(e.target.value)} + /> +
    + ); +} diff --git a/docs/src/pages/showcase/_components/ShowcaseSearchBar/styles.module.css b/docs/src/pages/showcase/_components/ShowcaseSearchBar/styles.module.css new file mode 100644 index 0000000000000..607e01cdbcdc8 --- /dev/null +++ b/docs/src/pages/showcase/_components/ShowcaseSearchBar/styles.module.css @@ -0,0 +1,27 @@ +.searchContainer { + margin-bottom: 24px; + width: 100%; + max-width: 400px; + padding: 0 var(--ifm-spacing-horizontal); +} + +.searchInput { + width: 100%; + height: 2.5rem; + padding: 0.5rem 1rem; + border-radius: 20px; + border: 2px solid var(--ifm-color-emphasis-300); + background: var(--ifm-background-color); + color: var(--ifm-font-color-base); + font-size: 0.9rem; + transition: border-color var(--ifm-transition-fast); +} + +.searchInput:focus { + outline: none; + border-color: var(--ifm-color-primary); +} + +.searchInput::placeholder { + color: var(--ifm-color-emphasis-500); +} diff --git a/docs/src/pages/showcase/_utils.tsx b/docs/src/pages/showcase/_utils.tsx new file mode 100644 index 0000000000000..850e2b570989a --- /dev/null +++ b/docs/src/pages/showcase/_utils.tsx @@ -0,0 +1,39 @@ +import {useState, useCallback} from 'react'; +import {type TagType, type User, sortedUsers} from '../../data/users'; + +// Hook for managing selected tags +export function useFilteredUsers() { + const [selectedTags, setSelectedTags] = useState([]); + const [operator, setOperator] = useState<'AND' | 'OR'>('OR'); + + const toggleTag = useCallback((tag: TagType) => { + setSelectedTags(tags => + tags.includes(tag) + ? tags.filter(t => t !== tag) + : [...tags, tag] + ); + }, []); + + const toggleOperator = useCallback(() => { + setOperator(op => op === 'OR' ? 'AND' : 'OR'); + }, []); + + // Filter users based on selected tags + const filteredUsers = selectedTags.length === 0 + ? sortedUsers + : sortedUsers.filter(user => { + if (operator === 'AND') { + return selectedTags.every(tag => user.tags.includes(tag)); + } else { + return selectedTags.some(tag => user.tags.includes(tag)); + } + }); + + return { + selectedTags, + toggleTag, + operator, + toggleOperator, + filteredUsers, + }; +} diff --git a/docs/src/pages/showcase/index.tsx b/docs/src/pages/showcase/index.tsx new file mode 100644 index 0000000000000..ccbc6b1643425 --- /dev/null +++ b/docs/src/pages/showcase/index.tsx @@ -0,0 +1,89 @@ +import React, { useState, useMemo } from 'react'; +import Layout from '@theme/Layout'; +import { type User, sortedUsers } from '@site/src/data/users'; +import ShowcaseSearchBar from './_components/ShowcaseSearchBar'; +import ShowcaseCards from './_components/ShowcaseCards'; +import ShowcaseFilters from './_components/ShowcaseFilters'; + +const TITLE = 'Plugin Showcase'; +const DESCRIPTION = 'Discover the awesome plugins in our ecosystem'; + +function ShowcaseHeader() { + return ( +
    +

    {TITLE}

    +

    {DESCRIPTION}

    +
    + ); +} + +function filterUsers(users: User[], search: string, selectedTags: string[], operator: 'OR' | 'AND') { + // First deduplicate the input array + const uniqueUsers = users.filter((user, index, self) => + index === self.findIndex((u) => u.title === user.title) + ); + + return uniqueUsers.filter(user => { + // Search filter + if (search) { + const searchValue = search.toLowerCase().trim(); + if (!user.title.toLowerCase().includes(searchValue) && + !user.description.toLowerCase().includes(searchValue)) { + return false; + } + } + + // Tags filter + if (selectedTags.length === 0) { + return true; + } + + if (operator === 'AND') { + return selectedTags.every(tag => user.tags.includes(tag)); + } + return selectedTags.some(tag => user.tags.includes(tag)); + }); +} + +export default function Showcase(): JSX.Element { + const [selectedTags, setSelectedTags] = useState([]); + const [operator, setOperator] = useState<'OR' | 'AND'>('OR'); + const [searchValue, setSearchValue] = useState(''); + + const toggleTag = (tag: string) => { + setSelectedTags(tags => + tags.includes(tag) + ? tags.filter(t => t !== tag) + : [...tags, tag] + ); + }; + + const toggleOperator = () => { + setOperator(op => op === 'OR' ? 'AND' : 'OR'); + }; + + const filteredUsers = useMemo(() => { + return filterUsers(sortedUsers, searchValue, selectedTags, operator); + }, [searchValue, selectedTags, operator]); + + return ( + +
    + + +
    + +
    + +
    +
    + ); +} diff --git a/docs/static/img/eliza-architecture.jpg b/docs/static/img/eliza-architecture.jpg new file mode 100644 index 0000000000000..36f89fe5a0301 Binary files /dev/null and b/docs/static/img/eliza-architecture.jpg differ diff --git a/docs/static/img/favicon.ico b/docs/static/img/favicon.ico index d12212c7daf80..ddb0bdca9fd13 100644 Binary files a/docs/static/img/favicon.ico and b/docs/static/img/favicon.ico differ diff --git a/i18n/readme/README_AR.md b/i18n/readme/README_AR.md index 7748533aa533e..9608b95aba8a6 100644 --- a/i18n/readme/README_AR.md +++ b/i18n/readme/README_AR.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_CN.md b/i18n/readme/README_CN.md index 9929f083a1291..f27b846048af4 100644 --- a/i18n/readme/README_CN.md +++ b/i18n/readme/README_CN.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    @@ -17,7 +17,7 @@ ## 🚩 概述
    - Eliza Diagram + Eliza Diagram
    ## ✨ 功能 diff --git a/i18n/readme/README_DE.md b/i18n/readme/README_DE.md index 1a1590d4b575d..9b9c94d823aeb 100644 --- a/i18n/readme/README_DE.md +++ b/i18n/readme/README_DE.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_ES.md b/i18n/readme/README_ES.md index 99354f4bbc7dd..552c65bbac910 100644 --- a/i18n/readme/README_ES.md +++ b/i18n/readme/README_ES.md @@ -1,6 +1,6 @@ # Eliza -Banner de Eliza +Banner de Eliza ## Funcionalidades diff --git a/i18n/readme/README_FA.md b/i18n/readme/README_FA.md index cfc386f4cd399..271dab0686744 100644 --- a/i18n/readme/README_FA.md +++ b/i18n/readme/README_FA.md @@ -1,7 +1,7 @@ # الیزا 🤖
    - الیزا بنر + الیزا بنر
    @@ -13,7 +13,7 @@ ## 🚩 معرفی کلی
    - نمودار الیزا + نمودار الیزا
    ## ✨ ویژگی‌ها diff --git a/i18n/readme/README_FR.md b/i18n/readme/README_FR.md index 5fa897ed1e126..66f91f7992246 100644 --- a/i18n/readme/README_FR.md +++ b/i18n/readme/README_FR.md @@ -1,7 +1,7 @@ # Eliza
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_GR.md b/i18n/readme/README_GR.md index 6898498b81295..91ddfcfd4720e 100644 --- a/i18n/readme/README_GR.md +++ b/i18n/readme/README_GR.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    @@ -15,7 +15,7 @@ ## 🚩 Επισκόπηση
    - Eliza Diagram + Eliza Diagram
    ## ✨ Χαρακτηριστικά diff --git a/i18n/readme/README_HE.md b/i18n/readme/README_HE.md index c25f8e17bea86..171143aab2b78 100644 --- a/i18n/readme/README_HE.md +++ b/i18n/readme/README_HE.md @@ -3,7 +3,7 @@ # אלייזה 🤖
    - אלייזה באנר + אלייזה באנר
    diff --git a/i18n/readme/README_HU.md b/i18n/readme/README_HU.md index 12b9dfeeb7f8a..089d4d5177693 100644 --- a/i18n/readme/README_HU.md +++ b/i18n/readme/README_HU.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_IT.md b/i18n/readme/README_IT.md index 06bd7365fb579..de8ed4ef13588 100644 --- a/i18n/readme/README_IT.md +++ b/i18n/readme/README_IT.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    ## ✨ Caratteristiche diff --git a/i18n/readme/README_JA.md b/i18n/readme/README_JA.md index a24330edb8a6d..1591aff9b5d40 100644 --- a/i18n/readme/README_JA.md +++ b/i18n/readme/README_JA.md @@ -1,6 +1,6 @@ # Eliza -Eliza Banner +Eliza Banner ## 機能 diff --git a/i18n/readme/README_KOR.md b/i18n/readme/README_KOR.md index fa560890f85db..143d6c00e45b6 100644 --- a/i18n/readme/README_KOR.md +++ b/i18n/readme/README_KOR.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    @@ -13,7 +13,7 @@ ## 🚩 구조
    - Eliza Diagram + Eliza Diagram
    ## ✨ 기능 diff --git a/i18n/readme/README_NL.md b/i18n/readme/README_NL.md index 5693f09696b40..714f7f77a5647 100644 --- a/i18n/readme/README_NL.md +++ b/i18n/readme/README_NL.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    @@ -17,7 +17,7 @@ ## 🚩 Overzicht
    - Eliza Diagram + Eliza Diagram
    ## ✨ Functies @@ -158,4 +158,4 @@ pnpm install --include=optional sharp ## Ster Historie -[![Star History Chart](https://api.star-history.com/svg?repos=elizaos/eliza&type=Date)](https://star-history.com/#elizaos/eliza&Date) \ No newline at end of file +[![Star History Chart](https://api.star-history.com/svg?repos=elizaos/eliza&type=Date)](https://star-history.com/#elizaos/eliza&Date) diff --git a/i18n/readme/README_PL.md b/i18n/readme/README_PL.md index 9cd164b1e12c4..d97d9185f02a0 100644 --- a/i18n/readme/README_PL.md +++ b/i18n/readme/README_PL.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_PTBR.md b/i18n/readme/README_PTBR.md index a6f6cb86d60d8..2c862d5a71530 100644 --- a/i18n/readme/README_PTBR.md +++ b/i18n/readme/README_PTBR.md @@ -1,6 +1,6 @@ # Eliza -Eliza Banner +Eliza Banner ## Funcionalidades diff --git a/i18n/readme/README_RO.md b/i18n/readme/README_RO.md index 8f64b4f60b15b..750ae8987f40e 100644 --- a/i18n/readme/README_RO.md +++ b/i18n/readme/README_RO.md @@ -1,6 +1,6 @@ # Eliza -Eliza Banner +Eliza Banner ## Funcționalități diff --git a/i18n/readme/README_RS.md b/i18n/readme/README_RS.md index c4d6abea2203e..9d72a1599cba7 100644 --- a/i18n/readme/README_RS.md +++ b/i18n/readme/README_RS.md @@ -1,6 +1,6 @@ # Eliza -Baner Eliza +Baner Eliza ## Funkcionalnosti diff --git a/i18n/readme/README_RU.md b/i18n/readme/README_RU.md index c48dab77b4dc4..fc6ddfa412fef 100644 --- a/i18n/readme/README_RU.md +++ b/i18n/readme/README_RU.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_TG.md b/i18n/readme/README_TG.md index ca223a8bf1bbf..f23e432aebf3c 100644 --- a/i18n/readme/README_TG.md +++ b/i18n/readme/README_TG.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_TH.md b/i18n/readme/README_TH.md index fc853bd768187..46349bf182ea1 100644 --- a/i18n/readme/README_TH.md +++ b/i18n/readme/README_TH.md @@ -1,7 +1,7 @@ # Eliza (อีไลซ่า) 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_TR.md b/i18n/readme/README_TR.md index 89f46a8e153da..bd8d698ff2c0b 100644 --- a/i18n/readme/README_TR.md +++ b/i18n/readme/README_TR.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/i18n/readme/README_UA.md b/i18n/readme/README_UA.md new file mode 100644 index 0000000000000..89741480b223e --- /dev/null +++ b/i18n/readme/README_UA.md @@ -0,0 +1,187 @@ +# Eliza 🤖 + +
    + Eliza Banner +
    + +
    + +📖 [Документація](https://elizaos.github.io/eliza/) | 🎯 [Приклади](https://github.com/thejoven/awesome-eliza) + +
    + +## ✨ Особливості + +- 🛠 Повноцінні коннектори для Discord, Twitter та Telegram +- 👥 Підтримка кількох агентів та кімнат +- 📚 Просте додавання та взаємодія з вашими документами +- 💾 Запам'ятовування контексту та зберігання документів +- 🚀 Висока масштабованість - створюйте свої власні дії та клієнти для розширення можливостей +- ☁️ Підтримує багато моделей, включаючи локальні Llama, OpenAI, Anthropic, Groq та інші +- 📦 Простота у використанні! + +## 🎯 Для чого це можна використовувати? + +- 🤖 Чат-боти +- 🕵️ Автономні агенти +- 📈 Обробка бізнес-процесів +- 🎮 NPC у відеоіграх +- 🧠 Торгівля + +## 🌍 Переклади + +
    +Доступні мови + +- [中文说明](./README_CN.md) +- [日本語の説明](./README_JA.md) +- [한국어 설명](./README_KOR.md) +- [Instructions en français](./README_FR.md) +- [Instruções em português](./README_PTBR.md) +- [Інструкція російською](./README_RU.md) + +

    + +# 🚀 Початок роботи + +**Необхідні умови (ОБОВ'ЯЗКОВО):** + +- [Python 2.7+](https://www.python.org/downloads/) +- [Node.js 23.3+](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) +- [pnpm](https://pnpm.io/installation) + > **Для користувачів Windows:** потрібен WSL + +### Редагування файлу .env + +- Скопіюйте `.env.example` в `.env` та заповніть відповідними значеннями + +```bash +cp .env.example .env +``` + +### Редагування файлу персонажа + +1. Відкрийте `packages/core/src/defaultCharacter.ts`, щоб змінити персонажа за замовчуванням. + +2. Для завантаження користувацьких персонажів: + - Використовуйте команду `pnpm start --characters="path/to/your/character.json"` + - Можна завантажувати кілька файлів персонажів одночасно. + +### Запуск Eliza + +Після налаштування файлу `.env` та файлу персонажа ви можете запустити бота за допомогою наступної команди: + +```bash +pnpm i +pnpm build +pnpm start + +# Проект швидко розвивається, іноді потрібно очищати проект, якщо ви повертаєтесь до нього через деякий час +pnpm clean +``` + +#### Додаткові вимоги + +Можливо, буде потрібно встановити Sharp. Якщо при запуску виникне помилка, спробуйте встановити його за допомогою наступної команди: + +```bash +pnpm install --include=optional sharp +``` + +# Налаштування середовища + +Вам потрібно додати змінні середовища у файл `.env` для підключення до різних платформ: + +``` +# Обов'язкові змінні середовища +DISCORD_APPLICATION_ID= +DISCORD_API_TOKEN= # Токен бота +OPENAI_API_KEY=sk-* # API-ключ OpenAI, що починається з sk- +ELEVENLABS_XI_API_KEY= # API-ключ від elevenlabs +GOOGLE_GENERATIVE_AI_API_KEY= # API-ключ Gemini + +# НАЛАШТУВАННЯ ELEVENLABS +ELEVENLABS_MODEL_ID=eleven_multilingual_v2 +ELEVENLABS_VOICE_ID=21m00Tcm4TlvDq8ikWAM +ELEVENLABS_VOICE_STABILITY=0.5 +ELEVENLABS_VOICE_SIMILARITY_BOOST=0.9 +ELEVENLABS_VOICE_STYLE=0.66 +ELEVENLABS_VOICE_USE_SPEAKER_BOOST=false +ELEVENLABS_OPTIMIZE_STREAMING_LATENCY=4 +ELEVENLABS_OUTPUT_FORMAT=pcm_16000 + +TWITTER_DRY_RUN=false +TWITTER_USERNAME= # Ім'я користувача акаунта +TWITTER_PASSWORD= # Пароль акаунта +TWITTER_EMAIL= # Email акаунта + +XAI_API_KEY= +XAI_MODEL= + + +# Для запитів до Claude +ANTHROPIC_API_KEY= + +# EVM +EVM_PRIVATE_KEY=EXAMPLE_WALLET_PRIVATE_KEY + +# Solana +SOLANA_PRIVATE_KEY=EXAMPLE_WALLET_PRIVATE_KEY +SOLANA_PUBLIC_KEY=EXAMPLE_WALLET_PUBLIC_KEY + +# Fallback Wallet Configuration (deprecated) +WALLET_PRIVATE_KEY=EXAMPLE_WALLET_PRIVATE_KEY +WALLET_PUBLIC_KEY=EXAMPLE_WALLET_PUBLIC_KEY + +BIRDEYE_API_KEY= # API-ключ для BirdEye + +SOL_ADDRESS=So11111111111111111111111111111111111111112 +SLIPPAGE=1 +SOLANA_RPC_URL=https://api.mainnet-beta.solana.com +HELIUS_API_KEY= # API-ключ Helius + + +## Telegram +TELEGRAM_BOT_TOKEN= # Токен бота Telegram + +TOGETHER_API_KEY= + +``` + +# Локальне налаштування середовища + +### Налаштування CUDA + +Якщо у вас є NVIDIA GPU, ви можете встановити CUDA для значного прискорення локального інференсу. + +```bash +pnpm install +npx --no node-llama-cpp source download --gpu cuda +``` + +Переконайтеся, що ви встановили CUDA Toolkit, включаючи cuDNN та cuBLAS. + +### Локальний запуск + +Додайте `XAI_MODEL` та встановіть його в одне з вищезгаданих значень з Запуску з Llama. Ви можете залишити `XAI_API_KEY` порожнім — модель буде завантажена з huggingface та оброблена локально. + +# Клієнти + +## Бот для Discord + +Для отримання допомоги по налаштуванню бота Discord ознайомтесь з інструкцією: [Налаштування додатку бота](https://discordjs.guide/preparations/setting-up-a-bot-application.html). + +### Спільнота та контакти + +- [GitHub Issues](https://github.com/elizaos/eliza/issues). Найкраще підходить для: повідомлень про помилки при використанні Eliza та пропозицій нових функцій. +- [Discord](https://discord.gg/ai16z). Найкраще підходить для: обміну своїми додатками та спілкування з спільнотою. + +## Контриб'ютори + + + + + +## Історія зірок + +[![Графік історії зірок](https://api.star-history.com/svg?repos=elizaos/eliza&type=Date)](https://star-history.com/#elizaos/eliza&Date) diff --git a/i18n/readme/README_VI.md b/i18n/readme/README_VI.md index 0622da99b7077..be88f8dfe9d29 100644 --- a/i18n/readme/README_VI.md +++ b/i18n/readme/README_VI.md @@ -1,7 +1,7 @@ # Eliza 🤖
    - Eliza Banner + Eliza Banner
    diff --git a/lerna.json b/lerna.json index 2e0829e156dc1..760a84f70046e 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "0.25.6-alpha.1", + "version": "0.25.8", "packages": [ "packages/*", "docs", diff --git a/package.json b/package.json index 62cce98f4b968..d2b68a22b147a 100644 --- a/package.json +++ b/package.json @@ -29,13 +29,15 @@ "@commitlint/cli": "18.6.1", "@commitlint/config-conventional": "18.6.3", "@types/jest": "^29.5.11", + "cli": "workspace:*", "concurrently": "9.1.0", "cross-env": "7.0.3", "husky": "9.1.7", "jest": "^29.7.0", "lerna": "8.1.5", + "nodemon": "3.1.7", "only-allow": "1.2.1", - "turbo": "2.3.3", + "turbo": "2.4.2", "typedoc": "0.26.11", "typescript": "5.6.3", "viem": "2.21.58", @@ -58,10 +60,11 @@ "@ai-sdk/provider-utils": "2.1.6", "cookie": "0.7.0", "bs58": "5.0.0", - "@coral-xyz/anchor": "0.28.0" - }, - "patchedDependencies": { - "@solana-developers/helpers": "patches/@solana-developers__helpers.patch" + "@coral-xyz/anchor": "0.28.0", + "axios@>=0.8.1 <0.28.0": ">=0.28.0", + "undici@>=6.0.0 <6.21.1": ">=6.21.1", + "path-to-regexp@<0.1.12": ">=0.1.12", + "secp256k1": "5.0.1" } }, "engines": { @@ -71,6 +74,7 @@ "@0glabs/0g-ts-sdk": "0.2.1", "@coinbase/coinbase-sdk": "0.10.0", "@deepgram/sdk": "^3.9.0", + "@elizaos-plugins/adapter-sqlite": "workspace:*", "@injectivelabs/sdk-ts": "^1.14.33", "@vitest/eslint-plugin": "1.0.1", "amqplib": "0.10.5", @@ -81,6 +85,7 @@ "optional": "0.1.4", "pnpm": "9.15.0", "sharp": "0.33.5", + "ws": "8.18.0", "zod": "3.24.1" }, "packageManager": "pnpm@9.15.0", diff --git a/packages/_examples/plugin-with-di/.npmignore b/packages/_examples/plugin-with-di/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/_examples/plugin-with-di/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/_examples/plugin-with-di/README.md b/packages/_examples/plugin-with-di/README.md deleted file mode 100644 index 3d396d51a841d..0000000000000 --- a/packages/_examples/plugin-with-di/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# Sample Plugin for Eliza - -The Sample Plugin for Eliza extends the functionality of the Eliza platform by providing additional actions, providers, evaluators, and more. This plugin is designed to be easily extendable and customizable to fit various use cases. - -## Description - -The Sample Plugin offers a set of features that can be integrated into the Eliza platform to enhance its capabilities. Below is a high-level overview of the different components available in this plugin. - -## Actions - -- **createResourceAction**: This action enables the creation and management of generic resources. It can be customized to handle different types of resources and integrate with various data sources. - -## Providers - -- **sampleProvider**: This provider offers a mechanism to supply data or services to the plugin. It can be extended to include additional providers as needed. - -## Evaluators - -- **sampleEvaluator**: This evaluator provides a way to assess or analyze data within the plugin. It can be extended to include additional evaluators as needed. - -## Services - -- **[ServiceName]**: Description of the service and its functionality. This can be extended to include additional services as needed. - -## Clients - -- **[ClientName]**: Description of the client and its functionality. This can be extended to include additional clients as needed. - -## How to Extend - -To extend the Sample Plugin, you can add new actions, providers, evaluators, services, and clients by following the structure provided in the plugin. Each component can be customized to fit your specific requirements. - -1. **Actions**: Add new actions by defining them in the `actions` array. -2. **Providers**: Add new providers by defining them in the `providers` array. -3. **Evaluators**: Add new evaluators by defining them in the `evaluators` array. -4. **Services**: Add new services by defining them in the `services` array. -5. **Clients**: Add new clients by defining them in the `clients` array. - -For more detailed information on how to extend the plugin, refer to the documentation provided in the Eliza platform. diff --git a/packages/_examples/plugin-with-di/package.json b/packages/_examples/plugin-with-di/package.json deleted file mode 100644 index 61717121e89fa..0000000000000 --- a/packages/_examples/plugin-with-di/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "@elizaos/plugin-sample-with-di", - "version": "0.1.9-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "@elizaos/plugin-di": "workspace:*" - }, - "devDependencies": { - "tsup": "8.3.5", - "@types/node": "^20.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - } -} diff --git a/packages/_examples/plugin-with-di/src/actions/sampleAction.ts b/packages/_examples/plugin-with-di/src/actions/sampleAction.ts deleted file mode 100644 index a5505b226ed9a..0000000000000 --- a/packages/_examples/plugin-with-di/src/actions/sampleAction.ts +++ /dev/null @@ -1,168 +0,0 @@ -import { z } from "zod"; -import { inject, injectable } from "inversify"; -import { - type IAgentRuntime, - type Memory, - type HandlerCallback, - type State, - elizaLogger, -} from "@elizaos/core"; -import { - property, - globalContainer, - BaseInjectableAction, - type ActionOptions -} from "@elizaos/plugin-di"; - -import { SampleProvider } from "../providers/sampleProvider"; - -/** - * The content class for the action - */ -export class CreateResourceContent { - @property({ - description: "Name of the resource", - schema: z.string(), - }) - name: string; - - @property({ - description: "Type of resource (document, image, video)", - schema: z.string(), - }) - type: string; - - @property({ - description: "Description of the resource", - schema: z.string(), - }) - description: string; - - @property({ - description: "Array of tags to categorize the resource", - schema: z.array(z.string()), - }) - tags: string[]; -} - -/** - * Options for the CreateResource action - */ -const options: ActionOptions = { - name: "CREATE_RESOURCE", - similes: [], - description: "Create a new resource with the specified details", - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Create a new resource with the name 'Resource1' and type 'TypeA'", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Resource created successfully: -- Name: Resource1 -- Type: TypeA`, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Create a new resource with the name 'Resource2' and type 'TypeB'", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Resource created successfully: -- Name: Resource2 -- Type: TypeB`, - }, - }, - ], - ], - contentClass: CreateResourceContent, -}; - -/** - * CreateResourceAction - */ -@injectable() -export class CreateResourceAction extends BaseInjectableAction { - constructor( - @inject(SampleProvider) - private readonly sampleProvider: SampleProvider - ) { - super(options); - } - - async validate( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - return !!runtime.character.settings.secrets?.API_KEY; - } - - async execute( - content: CreateResourceContent | null, - runtime: IAgentRuntime, - message: Memory, - state: State, - callback?: HandlerCallback - ): Promise { - if (!content) { - const error = "No content provided for the action."; - elizaLogger.warn(error); - await callback?.({ text: error }, []); - return; - } - - // Call injected provider to do some work - try { - const result = await this.sampleProvider.get( - runtime, - message, - state - ); - if (!result) { - elizaLogger.warn("Provider did not return a result."); - } else { - elizaLogger.info("Privder result:", result); - } - // Use result in callback - } catch (error) { - elizaLogger.error("Provider error:", error); - } - - // persist relevant data if needed to memory/knowledge - // const memory = { - // type: "resource", - // content: resourceDetails.object, - // timestamp: new Date().toISOString() - // }; - - // await runtime.storeMemory(memory); - - callback?.( - { - text: `Resource created successfully: -- Name: ${content.name} -- Type: ${content.type} -- Description: ${content.description} -- Tags: ${content.tags.join(", ")} - -Resource has been stored in memory.`, - }, - [] - ); - } -} - -// Register the action with the global container -globalContainer.bind(CreateResourceAction).toSelf().inRequestScope(); diff --git a/packages/_examples/plugin-with-di/src/evaluators/sampleEvaluator.ts b/packages/_examples/plugin-with-di/src/evaluators/sampleEvaluator.ts deleted file mode 100644 index 4138fe81cd229..0000000000000 --- a/packages/_examples/plugin-with-di/src/evaluators/sampleEvaluator.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { type IAgentRuntime, type Memory, elizaLogger } from "@elizaos/core"; -import { injectable } from "inversify"; -import { - globalContainer, - BaseInjectableEvaluator, - type EvaluatorOptions -} from "@elizaos/plugin-di"; - -const options: EvaluatorOptions = { - alwaysRun: false, - name: "sampleEvaluator", - description: "Sample evaluator for checking important content in memory", - similes: ["content checker", "memory evaluator"], - examples: [ - { - context: "Checking if memory contains important content", - messages: [ - { - user: "{{user1}}", - content: { - text: "This is an important message", - }, - }, - ], - outcome: `\`\`\`json -[ - { - "score": 1, - "reason": "Memory contains important content." - } -] -\`\`\``, - }, - ], -}; - -@injectable() -export class SampleEvaluator extends BaseInjectableEvaluator { - constructor() { - super(options); - } - - async handler(runtime: IAgentRuntime, memory: Memory) { - // Evaluation logic for the evaluator - elizaLogger.log("Evaluating data in sampleEvaluator..."); - try { - if (!memory.content || typeof memory.content.text !== "string") { - return { - score: 0, - reason: "Invalid memory content structure", - }; - } - - if (memory.content.text.includes("important")) { - elizaLogger.log("Important content found in memory."); - return { - score: 1, - reason: "Memory contains important content.", - }; - } else { - elizaLogger.log("No important content found in memory."); - return { - score: 0, - reason: "Memory does not contain important content.", - }; - } - } catch (error) { - elizaLogger.error("Error in sampleEvaluator:", error); - throw error; - } - } -} - -// Register the sample evaluator with the global container -globalContainer.bind(SampleEvaluator).toSelf().inRequestScope(); diff --git a/packages/_examples/plugin-with-di/src/index.ts b/packages/_examples/plugin-with-di/src/index.ts deleted file mode 100644 index 10aba3a2706e5..0000000000000 --- a/packages/_examples/plugin-with-di/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { samplePlugin } from "./samplePlugin"; - -export * from "./samplePlugin"; - -export default samplePlugin; diff --git a/packages/_examples/plugin-with-di/src/providers/sampleProvider.ts b/packages/_examples/plugin-with-di/src/providers/sampleProvider.ts deleted file mode 100644 index e126ba818fef6..0000000000000 --- a/packages/_examples/plugin-with-di/src/providers/sampleProvider.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { inject, injectable } from "inversify"; -import { - type Provider, - type IAgentRuntime, - type Memory, - type State, - elizaLogger, -} from "@elizaos/core"; -import { globalContainer, type InjectableProvider } from "@elizaos/plugin-di"; - -// Dynamic Data Provider - -globalContainer - .bind>("DYNAMIC_DATA") - .toDynamicValue(async () => { - return Promise.resolve({ key: "value" }); - }); - -/** - * Sample Provider - */ -@injectable() -export class SampleProvider - implements InjectableProvider>, Provider -{ - private _sharedInstance: Record; - - constructor( - @inject("DYNAMIC_DATA") - private readonly dynamicData: Record - ) {} - - // ---- Implementing the InjectableProvider interface ---- - - async getInstance( - _runtime: IAgentRuntime - ): Promise> { - if (!this._sharedInstance) { - this._sharedInstance = {}; - } - return this._sharedInstance; - } - - // ---- Implementing the Provider interface ---- - - async get( - _runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - elizaLogger.log("Retrieving data in sampleProvider..."); - return `Shared instance data: ${JSON.stringify(this._sharedInstance)} -Dynamic data: ${JSON.stringify(this.dynamicData)} -`; - } -} - -// Register the provider with the global container -globalContainer.bind(SampleProvider).toSelf().inSingletonScope(); diff --git a/packages/_examples/plugin-with-di/src/samplePlugin.ts b/packages/_examples/plugin-with-di/src/samplePlugin.ts deleted file mode 100644 index 806c74ca125fc..0000000000000 --- a/packages/_examples/plugin-with-di/src/samplePlugin.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { PluginOptions } from '@elizaos/plugin-di'; -import { CreateResourceAction } from "./actions/sampleAction"; -import { SampleProvider } from "./providers/sampleProvider"; -import { SampleEvaluator } from "./evaluators/sampleEvaluator"; -import { SampleService } from './services/sampleService'; - -export const samplePlugin: PluginOptions = { - name: "sample", - description: "Enables creation and management of generic resources", - actions: [CreateResourceAction], - providers: [SampleProvider], - evaluators: [SampleEvaluator], - services: [SampleService], - clients: [], -}; - -export default samplePlugin; diff --git a/packages/_examples/plugin-with-di/src/services/sampleService.ts b/packages/_examples/plugin-with-di/src/services/sampleService.ts deleted file mode 100644 index 9a8f0408bde23..0000000000000 --- a/packages/_examples/plugin-with-di/src/services/sampleService.ts +++ /dev/null @@ -1,154 +0,0 @@ -import { inject, injectable } from "inversify"; -import { - Service, - ServiceType, - type IAgentRuntime, - elizaLogger, - stringToUuid, - Memory, - State, -} from "@elizaos/core"; -import { globalContainer } from "@elizaos/plugin-di"; - -import { SampleProvider } from "../providers/sampleProvider"; - -// Add SAMPLE to ServiceType enum in types.ts -declare module "@elizaos/core" { - export enum ServiceType { - SAMPLE = "sample", - } -} - -// The SampleService is a simple service that logs "Hello world" every 15 minutes. -@injectable() -export class SampleService extends Service { - private runtime: IAgentRuntime | null = null; - private intervalId: NodeJS.Timeout | null = null; - private readonly DEFAULT_INTERVAL = 15 * 60 * 1000; // 15 minutes in milliseconds - - constructor( - @inject(SampleProvider) - private readonly sampleProvider: SampleProvider - ) { - super(); - } - - static get serviceType(): ServiceType { - return "sample" as ServiceType.SAMPLE; - } - - private static isInitialized = false; - - async initialize(runtime: IAgentRuntime): Promise { - // Verify if the service is already initialized - if (SampleService.isInitialized) { - return; - } - - this.runtime = runtime; - - // Start the periodic task - this.startPeriodicTask(); - SampleService.isInitialized = true; - elizaLogger.info("SampleService initialized and started periodic task"); - } - - private static activeTaskCount = 0; - - private startPeriodicTask(): void { - // Verify if a task is already active - if (SampleService.activeTaskCount > 0) { - elizaLogger.warn( - "SampleService: Periodic task already running, skipping" - ); - return; - } - - // Clear any existing interval - if (this.intervalId) { - clearInterval(this.intervalId); - } - - SampleService.activeTaskCount++; - elizaLogger.info( - `SampleService: Starting periodic task (active tasks: ${SampleService.activeTaskCount})` - ); - - // Initial call immediately - this.fetchSample(); - - // Set up periodic calls - this.intervalId = setInterval(() => { - this.fetchSample(); - }, this.DEFAULT_INTERVAL); - } - - private async fetchSample(): Promise { - if (!this.runtime) { - elizaLogger.error("SampleService: Runtime not initialized"); - return; - } - - try { - // Example of using the sampleProvider - // Create dummy memory and state objects for the provider - const dummyMemory: Memory = { - id: stringToUuid("sample-service-trigger"), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId: this.runtime.agentId, - content: { text: "Periodic sample fetch" }, - createdAt: Date.now(), - }; - - const dummyState: State = { - userId: this.runtime.agentId, - bio: "", - lore: "", - messageDirections: "", - postDirections: "", - roomId: this.runtime.agentId, - actors: "", - recentMessages: "", - recentMessagesData: [], - }; - await this.sampleProvider.get( - this.runtime, - dummyMemory, - dummyState - ); - - // hello world log example - elizaLogger.info("SampleService: Hello world"); - - elizaLogger.info( - "SampleService: Successfully fetched and processed sample" - ); - } catch (error) { - elizaLogger.error("SampleService: Error fetching sample:", error); - } - } - - // Method to stop the service - stop(): void { - if (this.intervalId) { - clearInterval(this.intervalId); - this.intervalId = null; - SampleService.activeTaskCount--; - elizaLogger.info( - `SampleService stopped (active tasks: ${SampleService.activeTaskCount})` - ); - } - SampleService.isInitialized = false; - } - - // Method to manually trigger a sample fetch (for testing) - async forceFetch(): Promise { - await this.fetchSample(); - } -} - -// Register the provider with the global container -globalContainer.bind(SampleService).toSelf().inSingletonScope(); - -export default SampleService; diff --git a/packages/_examples/plugin-with-di/tsconfig.json b/packages/_examples/plugin-with-di/tsconfig.json deleted file mode 100644 index a4134313dc69c..0000000000000 --- a/packages/_examples/plugin-with-di/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "extends": "../../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": ["node"], - "experimentalDecorators": true, - "emitDecoratorMetadata": true - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/_examples/plugin-with-di/tsup.config.ts b/packages/_examples/plugin-with-di/tsup.config.ts deleted file mode 100644 index 1a96f24afa1eb..0000000000000 --- a/packages/_examples/plugin-with-di/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - // Add other modules you want to externalize - ], -}); diff --git a/packages/_examples/plugin/.npmignore b/packages/_examples/plugin/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/_examples/plugin/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/_examples/plugin/README.md b/packages/_examples/plugin/README.md deleted file mode 100644 index 3d396d51a841d..0000000000000 --- a/packages/_examples/plugin/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# Sample Plugin for Eliza - -The Sample Plugin for Eliza extends the functionality of the Eliza platform by providing additional actions, providers, evaluators, and more. This plugin is designed to be easily extendable and customizable to fit various use cases. - -## Description - -The Sample Plugin offers a set of features that can be integrated into the Eliza platform to enhance its capabilities. Below is a high-level overview of the different components available in this plugin. - -## Actions - -- **createResourceAction**: This action enables the creation and management of generic resources. It can be customized to handle different types of resources and integrate with various data sources. - -## Providers - -- **sampleProvider**: This provider offers a mechanism to supply data or services to the plugin. It can be extended to include additional providers as needed. - -## Evaluators - -- **sampleEvaluator**: This evaluator provides a way to assess or analyze data within the plugin. It can be extended to include additional evaluators as needed. - -## Services - -- **[ServiceName]**: Description of the service and its functionality. This can be extended to include additional services as needed. - -## Clients - -- **[ClientName]**: Description of the client and its functionality. This can be extended to include additional clients as needed. - -## How to Extend - -To extend the Sample Plugin, you can add new actions, providers, evaluators, services, and clients by following the structure provided in the plugin. Each component can be customized to fit your specific requirements. - -1. **Actions**: Add new actions by defining them in the `actions` array. -2. **Providers**: Add new providers by defining them in the `providers` array. -3. **Evaluators**: Add new evaluators by defining them in the `evaluators` array. -4. **Services**: Add new services by defining them in the `services` array. -5. **Clients**: Add new clients by defining them in the `clients` array. - -For more detailed information on how to extend the plugin, refer to the documentation provided in the Eliza platform. diff --git a/packages/_examples/plugin/package.json b/packages/_examples/plugin/package.json deleted file mode 100644 index dff917e5080f0..0000000000000 --- a/packages/_examples/plugin/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "@elizaos/plugin-sample", - "version": "0.1.5-alpha.5", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*" - }, - "devDependencies": { - "tsup": "8.3.5", - "@types/node": "^20.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - } -} diff --git a/packages/_examples/plugin/src/actions/sampleAction.ts b/packages/_examples/plugin/src/actions/sampleAction.ts deleted file mode 100644 index 0e9e12f949bd5..0000000000000 --- a/packages/_examples/plugin/src/actions/sampleAction.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type HandlerCallback, - type State, - composeContext, - generateObject, - ModelClass, - elizaLogger, -} from "@elizaos/core"; - -import { CreateResourceSchema, isCreateResourceContent } from "../types"; - -import { createResourceTemplate } from "../templates"; - -export const createResourceAction: Action = { - name: "CREATE_RESOURCE", - description: "Create a new resource with the specified details", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - return !!runtime.character.settings.secrets?.API_KEY; - }, - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: object, - callback: HandlerCallback - ) => { - try { - const context = composeContext({ - state, - template: createResourceTemplate, - }); - - const resourceDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: CreateResourceSchema, - }); - - if (!isCreateResourceContent(resourceDetails.object)) { - callback({ text: "Invalid resource details provided." }, []); - return; - } - - // persist relevant data if needed to memory/knowledge - // const memory = { - // type: "resource", - // content: resourceDetails.object, - // timestamp: new Date().toISOString() - // }; - - // await runtime.storeMemory(memory); - - callback( - { - text: `Resource created successfully: -- Name: ${resourceDetails.object.name} -- Type: ${resourceDetails.object.type} -- Description: ${resourceDetails.object.description} -- Tags: ${resourceDetails.object.tags.join(", ")} - -Resource has been stored in memory.`, - }, - [] - ); - } catch (error) { - elizaLogger.error("Error creating resource:", error); - callback( - { text: "Failed to create resource. Please check the logs." }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Create a new resource with the name 'Resource1' and type 'TypeA'", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Resource created successfully: -- Name: Resource1 -- Type: TypeA`, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Create a new resource with the name 'Resource2' and type 'TypeB'", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Resource created successfully: -- Name: Resource2 -- Type: TypeB`, - }, - }, - ], - ], -}; diff --git a/packages/_examples/plugin/src/evaluators/sampleEvalutor.ts b/packages/_examples/plugin/src/evaluators/sampleEvalutor.ts deleted file mode 100644 index 511ca5ba1cf60..0000000000000 --- a/packages/_examples/plugin/src/evaluators/sampleEvalutor.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { - type Evaluator, - type IAgentRuntime, - type Memory, - type State, - elizaLogger, -} from "@elizaos/core"; - -export const sampleEvaluator: Evaluator = { - alwaysRun: false, - description: "Sample evaluator for checking important content in memory", - similes: ["content checker", "memory evaluator"], - examples: [ - { - context: "Checking if memory contains important content", - messages: [ - { - action: "evaluate", - input: "This is an important message", - output: { - score: 1, - reason: "Memory contains important content.", - }, - }, - ], - outcome: "Memory should be evaluated as important", - }, - ], - handler: async (runtime: IAgentRuntime, memory: Memory, state: State) => { - // Evaluation logic for the evaluator - elizaLogger.log("Evaluating data in sampleEvaluator..."); - - // Example evaluation logic - if (memory.content?.includes("important")) { - elizaLogger.log("Important content found in memory."); - return { - score: 1, - reason: "Memory contains important content.", - }; - } - elizaLogger.log("No important content found in memory."); - return { - score: 0, - reason: "Memory does not contain important content.", - }; - }, - name: "sampleEvaluator", - validate: async (runtime: IAgentRuntime, memory: Memory, state: State) => { - // Validation logic for the evaluator - return true; - }, -}; diff --git a/packages/_examples/plugin/src/index.ts b/packages/_examples/plugin/src/index.ts deleted file mode 100644 index b16dd06142745..0000000000000 --- a/packages/_examples/plugin/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { samplePlugin } from "./plugins/samplePlugin"; - -export * from "./plugins/samplePlugin"; - -export default samplePlugin; diff --git a/packages/_examples/plugin/src/plugins/samplePlugin.ts b/packages/_examples/plugin/src/plugins/samplePlugin.ts deleted file mode 100644 index afaefca6a3b30..0000000000000 --- a/packages/_examples/plugin/src/plugins/samplePlugin.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { createResourceAction } from "../actions/sampleAction"; -import { sampleProvider } from "../providers/sampleProvider"; -import { sampleEvaluator } from "../evaluators/sampleEvalutor"; -import SampleService from "../services/sampleService"; - -export const samplePlugin: Plugin = { - name: "sample", - description: "Enables creation and management of generic resources", - actions: [createResourceAction], - providers: [sampleProvider], - evaluators: [sampleEvaluator], - // separate examples will be added for services and clients - services: [new SampleService()], - clients: [], -}; diff --git a/packages/_examples/plugin/src/providers/sampleProvider.ts b/packages/_examples/plugin/src/providers/sampleProvider.ts deleted file mode 100644 index 086eb4804b72e..0000000000000 --- a/packages/_examples/plugin/src/providers/sampleProvider.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { - type Provider, - type IAgentRuntime, - type Memory, - type State, - elizaLogger, -} from "@elizaos/core"; - -export const sampleProvider: Provider = { - // biome-ignore lint: 'runtime' is intentionally unused - get: async (runtime: IAgentRuntime, message: Memory, state: State) => { - // Data retrieval logic for the provider - elizaLogger.log("Retrieving data in sampleProvider..."); - }, -}; diff --git a/packages/_examples/plugin/src/services/sampleService.ts b/packages/_examples/plugin/src/services/sampleService.ts deleted file mode 100644 index 07b65cef83138..0000000000000 --- a/packages/_examples/plugin/src/services/sampleService.ts +++ /dev/null @@ -1,136 +0,0 @@ -import { - Service, - ServiceType, - type IAgentRuntime, - // Memory, - // State, - elizaLogger, - // stringToUuid, -} from "@elizaos/core"; -// import { sampleProvider } from "../providers/sampleProvider"; // TODO: Uncomment this line to use the sampleProvider - -// Add SAMPLE to ServiceType enum in types.ts -declare module "@elizaos/core" { - export enum ServiceType { - SAMPLE = "sample", - } -} - -// The SampleService is a simple service that logs "Hello world" every 15 minutes. -export class SampleService extends Service { - private runtime: IAgentRuntime | null = null; - private intervalId: NodeJS.Timeout | null = null; - private readonly DEFAULT_INTERVAL = 15 * 60 * 1000; // 15 minutes in milliseconds - - static get serviceType(): ServiceType { - return "sample" as ServiceType.SAMPLE; - } - - private static isInitialized = false; - - async initialize(runtime: IAgentRuntime): Promise { - // Verify if the service is already initialized - if (SampleService.isInitialized) { - return; - } - - this.runtime = runtime; - - // Start the periodic task - this.startPeriodicTask(); - SampleService.isInitialized = true; - elizaLogger.info("SampleService initialized and started periodic task"); - } - - private static activeTaskCount = 0; - - private startPeriodicTask(): void { - // Verify if a task is already active - if (SampleService.activeTaskCount > 0) { - elizaLogger.warn( - "SampleService: Periodic task already running, skipping" - ); - return; - } - - // Clear any existing interval - if (this.intervalId) { - clearInterval(this.intervalId); - } - - SampleService.activeTaskCount++; - elizaLogger.info( - `SampleService: Starting periodic task (active tasks: ${SampleService.activeTaskCount})` - ); - - // Initial call immediately - this.fetchSample(); - - // Set up periodic calls - this.intervalId = setInterval(() => { - this.fetchSample(); - }, this.DEFAULT_INTERVAL); - } - - private async fetchSample(): Promise { - if (!this.runtime) { - elizaLogger.error("SampleService: Runtime not initialized"); - return; - } - - try { - // Example of using the sampleProvider - // Create dummy memory and state objects for the provider - // const dummyMemory: Memory = { - // id: stringToUuid("sample-service-trigger"), - // userId: this.runtime.agentId, - // agentId: this.runtime.agentId, - // roomId: this.runtime.agentId, - // content: { text: "Periodic sample fetch" }, - // createdAt: Date.now(), - // }; - - // const dummyState: State = { - // userId: this.runtime.agentId, - // bio: "", - // lore: "", - // messageDirections: "", - // postDirections: "", - // roomId: this.runtime.agentId, - // actors: "", - // recentMessages: "", - // recentMessagesData: [], - // }; - // await sampleProvider.get(this.runtime, dummyMemory, dummyState); - - // hello world log example - elizaLogger.info("SampleService: Hello world"); - - elizaLogger.info( - "SampleService: Successfully fetched and processed sample" - ); - } catch (error) { - elizaLogger.error("SampleService: Error fetching sample:", error); - } - } - - // Method to stop the service - stop(): void { - if (this.intervalId) { - clearInterval(this.intervalId); - this.intervalId = null; - SampleService.activeTaskCount--; - elizaLogger.info( - `SampleService stopped (active tasks: ${SampleService.activeTaskCount})` - ); - } - SampleService.isInitialized = false; - } - - // Method to manually trigger a sample fetch (for testing) - async forceFetch(): Promise { - await this.fetchSample(); - } -} - -export default SampleService; diff --git a/packages/_examples/plugin/src/templates.ts b/packages/_examples/plugin/src/templates.ts deleted file mode 100644 index f9c0d965917a9..0000000000000 --- a/packages/_examples/plugin/src/templates.ts +++ /dev/null @@ -1,60 +0,0 @@ -export const createResourceTemplate = ` -Extract the following details to create a new resource: -- **name** (string): Name of the resource -- **type** (string): Type of resource (document, image, video) -- **description** (string): Description of the resource -- **tags** (array): Array of tags to categorize the resource - -Provide the values in the following JSON format: - -\`\`\`json -{ - "name": "", - "type": "", - "description": "", - "tags": ["", ""] -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const readResourceTemplate = ` -Extract the following details to read a resource: -- **id** (string): Unique identifier of the resource -- **fields** (array): Specific fields to retrieve (optional) - -Provide the values in the following JSON format: - -\`\`\`json -{ - "id": "", - "fields": ["", ""] -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const updateResourceTemplate = ` -Extract the following details to update a resource: -- **id** (string): Unique identifier of the resource -- **updates** (object): Key-value pairs of fields to update - -Provide the values in the following JSON format: - -\`\`\`json -{ - "id": "", - "updates": { - "": "", - "": "" - } -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; diff --git a/packages/_examples/plugin/src/types.ts b/packages/_examples/plugin/src/types.ts deleted file mode 100644 index f8f958e2c6614..0000000000000 --- a/packages/_examples/plugin/src/types.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { z } from "zod"; - -// Base resource schema -export const ResourceSchema = z.object({ - id: z.string().optional(), - name: z.string().min(1), - type: z.enum(["document", "image", "video"]), - description: z.string(), - tags: z.array(z.string()), -}); - -// Create resource schema -export const CreateResourceSchema = ResourceSchema.omit({ id: true }); - -// Read resource schema -export const ReadResourceSchema = z.object({ - id: z.string(), - fields: z.array(z.string()).optional(), -}); - -// Update resource schema -export const UpdateResourceSchema = z.object({ - id: z.string(), - updates: z.record(z.string(), z.any()), -}); - -// Type definitions -export type Resource = z.infer; -export type CreateResourceContent = z.infer; -export type ReadResourceContent = z.infer; -export type UpdateResourceContent = z.infer; - -// Type guards -export const isCreateResourceContent = ( - obj: object -): obj is CreateResourceContent => { - return CreateResourceSchema.safeParse(obj).success; -}; - -export const isReadResourceContent = (obj: object): obj is ReadResourceContent => { - return ReadResourceSchema.safeParse(obj).success; -}; - -export const isUpdateResourceContent = ( - obj: object -): obj is UpdateResourceContent => { - return UpdateResourceSchema.safeParse(obj).success; -}; - -// Plugin configuration type -export interface ExamplePluginConfig { - apiKey: string; - apiSecret: string; - endpoint?: string; -} diff --git a/packages/_examples/plugin/tsconfig.json b/packages/_examples/plugin/tsconfig.json deleted file mode 100644 index 8c77b755f7b5c..0000000000000 --- a/packages/_examples/plugin/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": ["node"] - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/_examples/plugin/tsup.config.ts b/packages/_examples/plugin/tsup.config.ts deleted file mode 100644 index 1a96f24afa1eb..0000000000000 --- a/packages/_examples/plugin/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - // Add other modules you want to externalize - ], -}); diff --git a/packages/adapter-mongodb/.npmignore b/packages/adapter-mongodb/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/adapter-mongodb/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/adapter-mongodb/eslint.config.mjs b/packages/adapter-mongodb/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/adapter-mongodb/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/adapter-mongodb/package.json b/packages/adapter-mongodb/package.json deleted file mode 100644 index 7c0acccd80c2c..0000000000000 --- a/packages/adapter-mongodb/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "@elizaos/adapter-mongodb", - "version": "0.25.6-alpha.1", - "description": "MongoDB adapter for ElizaOS", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "mongodb": "^6.3.0", - "uuid": "^9.0.1" - }, - "devDependencies": { - "@types/jest": "^29.5.11", - "@types/node": "^20.11.5", - "@types/uuid": "^9.0.7", - "jest": "^29.7.0", - "ts-jest": "^29.1.1", - "tsup": "^8.0.1", - "typescript": "^5.3.3" - }, - "scripts": { - "build": "tsup", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint --fix --cache .", - "test": "cd src/__tests__ && ./run_tests.sh", - "test:watch": "jest --watch" - }, - "engines": { - "node": ">=16.0.0" - }, - "jest": { - "preset": "ts-jest", - "testEnvironment": "node", - "testMatch": [ - "/src/__tests__/**/*.test.ts" - ] - } -} diff --git a/packages/adapter-mongodb/src/__tests__/docker-compose.test.yml b/packages/adapter-mongodb/src/__tests__/docker-compose.test.yml deleted file mode 100644 index 4c3f03a6f0266..0000000000000 --- a/packages/adapter-mongodb/src/__tests__/docker-compose.test.yml +++ /dev/null @@ -1,16 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json -version: '3.8' -services: - mongodb-test: - image: mongo:latest - environment: - MONGO_INITDB_ROOT_USERNAME: mongodb - MONGO_INITDB_ROOT_PASSWORD: mongodb - MONGO_INITDB_DATABASE: eliza_test - ports: - - "27018:27017" - healthcheck: - test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"] - interval: 5s - timeout: 5s - retries: 5 \ No newline at end of file diff --git a/packages/adapter-mongodb/src/__tests__/mongodb-adapter.test.ts b/packages/adapter-mongodb/src/__tests__/mongodb-adapter.test.ts deleted file mode 100644 index 8dea76723fda9..0000000000000 --- a/packages/adapter-mongodb/src/__tests__/mongodb-adapter.test.ts +++ /dev/null @@ -1,134 +0,0 @@ -import { MongoClient } from 'mongodb'; -import { MongoDBDatabaseAdapter } from '../index'; -import { v4 as uuidv4 } from 'uuid'; -import { RAGKnowledgeItem, type UUID } from '@elizaos/core'; - -describe('MongoDBAdapter', () => { - let adapter: MongoDBDatabaseAdapter; - const testUrl = 'mongodb://mongodb:mongodb@localhost:27018'; - const dbName = 'eliza_test'; - - beforeAll(async () => { - adapter = new MongoDBDatabaseAdapter(new MongoClient(testUrl), dbName); - await adapter.init(); - }); - - afterAll(async () => { - await adapter.close(); - }); - - beforeEach(async () => { - // Clear the collections before each test - const client = new MongoClient(testUrl); - await client.connect(); - const db = client.db(dbName); - await db.collection('knowledge').deleteMany({}); - await client.close(); - }); - - describe('Knowledge Management', () => { - it('should create and retrieve knowledge', async () => { - const testKnowledge: RAGKnowledgeItem = { - id: uuidv4() as UUID, - agentId: uuidv4() as UUID, - content: { - text: 'Test knowledge content', - metadata: { - isShared: false, - isMain: true - } - }, - embedding: new Float32Array([0.1, 0.2, 0.3]), - createdAt: Date.now() - }; - - await adapter.createKnowledge(testKnowledge); - - const retrieved = await adapter.getKnowledge({ id: testKnowledge.id, agentId: testKnowledge.agentId }); - expect(retrieved).toHaveLength(1); - expect(retrieved[0].id).toBe(testKnowledge.id); - expect(retrieved[0].content.text).toBe(testKnowledge.content.text); - }); - - it('should search knowledge by embedding', async () => { - const testKnowledge1: RAGKnowledgeItem = { - id: uuidv4() as UUID, - agentId: uuidv4() as UUID, - content: { - text: 'First test knowledge', - metadata: { isShared: false } - }, - embedding: new Float32Array([0.1, 0.2, 0.3]), - createdAt: Date.now() - }; - - const testKnowledge2: RAGKnowledgeItem = { - id: uuidv4() as UUID, - agentId: uuidv4() as UUID, - content: { - text: 'Second test knowledge', - metadata: { isShared: false } - }, - embedding: new Float32Array([0.4, 0.5, 0.6]), - createdAt: Date.now() - }; - - await adapter.createKnowledge(testKnowledge1); - await adapter.createKnowledge(testKnowledge2); - - const searchResults = await adapter.searchKnowledge({ - agentId: testKnowledge1.agentId, - embedding: new Float32Array([0.1, 0.2, 0.3]), - match_count: 1, - match_threshold: 0.8 - }); - - expect(searchResults).toHaveLength(1); - expect(searchResults[0].id).toBe(testKnowledge1.id); - }); - - it('should remove knowledge', async () => { - const testKnowledge: RAGKnowledgeItem = { - id: uuidv4() as UUID, - agentId: uuidv4() as UUID, - content: { - text: 'Test knowledge to remove', - metadata: { isShared: false } - }, - embedding: new Float32Array([0.1, 0.2, 0.3]), - createdAt: Date.now() - }; - - await adapter.createKnowledge(testKnowledge); - await adapter.removeKnowledge(testKnowledge.id); - - const retrieved = await adapter.getKnowledge({ id: testKnowledge.id, agentId: testKnowledge.agentId }); - expect(retrieved).toHaveLength(0); - }); - }); - - describe('Cache Operations', () => { - it('should set and get cache', async () => { - const agentId = uuidv4(); - const key = 'test-key'; - const value = 'test-value'; - - await adapter.setCache({ key, agentId: agentId as UUID, value }); - const retrieved = await adapter.getCache({ key, agentId: agentId as UUID }); - - expect(retrieved).toBe(value); - }); - - it('should delete cache', async () => { - const agentId = uuidv4(); - const key = 'test-key'; - const value = 'test-value'; - - await adapter.setCache({ key, agentId: agentId as UUID, value }); - await adapter.deleteCache({ key, agentId: agentId as UUID }); - - const retrieved = await adapter.getCache({ key, agentId: agentId as UUID }); - expect(retrieved).toBeUndefined(); - }); - }); -}); \ No newline at end of file diff --git a/packages/adapter-mongodb/src/__tests__/run_tests.sh b/packages/adapter-mongodb/src/__tests__/run_tests.sh deleted file mode 100755 index 9902d970331c8..0000000000000 --- a/packages/adapter-mongodb/src/__tests__/run_tests.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -# Function to cleanup resources -cleanup() { - echo "Cleaning up resources..." - docker-compose -f docker-compose.test.yml down - exit 0 -} - -# Trap SIGINT and SIGTERM signals and cleanup -trap cleanup SIGINT SIGTERM - -# Start MongoDB container -echo "Starting MongoDB container..." -docker-compose -f docker-compose.test.yml up -d - -# Wait for MongoDB to be ready -echo "Waiting for MongoDB to be ready..." -until docker-compose -f docker-compose.test.yml exec -T mongodb-test mongosh --eval "db.adminCommand('ping')" > /dev/null 2>&1; do - echo "MongoDB is not ready yet..." - sleep 1 -done - -echo "MongoDB is ready!" - -# Run tests -echo "Running tests..." -jest --runInBand --forceExit - -# Cleanup after tests -cleanup \ No newline at end of file diff --git a/packages/adapter-mongodb/src/index.ts b/packages/adapter-mongodb/src/index.ts deleted file mode 100644 index 53a3b35561dbf..0000000000000 --- a/packages/adapter-mongodb/src/index.ts +++ /dev/null @@ -1,1445 +0,0 @@ -import { MongoClient } from 'mongodb'; -import { - DatabaseAdapter, - RAGKnowledgeItem, - IDatabaseCacheAdapter, - Account, - Actor, - GoalStatus, - Participant, - type Goal, - type Memory, - type Relationship, - type UUID, elizaLogger, -} from "@elizaos/core"; -import { v4 } from "uuid"; - -interface KnowledgeDocument { - id: UUID; - agentId: UUID; - content: string | { - text: string; - metadata?: { - isShared?: boolean; - isMain?: boolean; - isChunk?: boolean; - originalId?: string; - chunkIndex?: number; - }; - }; - embedding: number[] | null; - createdAt: Date | number; - isMain: boolean; - originalId: string | null; - chunkIndex: number | null; - isShared: boolean; -} - -export class MongoDBDatabaseAdapter - extends DatabaseAdapter - implements IDatabaseCacheAdapter -{ - private database: any; - private databaseName: string; - private hasVectorSearch: boolean; - private isConnected: boolean = false; - private isVectorSearchIndexComputable: boolean; - public db: MongoClient; - - constructor(client: MongoClient, databaseName: string) { - super(); - this.db = client; - this.databaseName = databaseName; - this.hasVectorSearch = false; - this.isConnected = false; - this.isVectorSearchIndexComputable = true; - } - - private async initializeCollections(): Promise { - const collections = [ - 'memories', - 'participants', - 'cache', - 'knowledge', - 'rooms', - 'accounts', - 'goals', - 'logs', - 'relationships' - ]; - - for (const collectionName of collections) { - try { - await this.database.createCollection(collectionName); - console.log(`Collection ${collectionName} created or already exists`); - } catch (error) { - if ((error as any).code !== 48) { // 48 is "collection already exists" - console.error(`Error creating collection ${collectionName}:`, error); - } - } - } - } - - private async initializeStandardIndexes(): Promise { - const collectionsWithIndexes = [ - { - collectionName: 'memories', - indexes: [ - { key: { type: 1, roomId: 1, agentId: 1, createdAt: -1 } }, - { key: { content: "text" }, options: { weights: { content: 10 } } } - ] - }, - { - collectionName: 'participants', - indexes: [ - { key: { userId: 1, roomId: 1 }, options: { unique: true } } - ] - }, - { - collectionName: 'cache', - indexes: [ - { key: { expiresAt: 1 }, options: { expireAfterSeconds: 0 } } - ] - }, - { - collectionName: 'knowledge', - indexes: [ - { key: { agentId: 1 } }, - { key: { isShared: 1 } }, - { key: { id: 1 }, options: { unique: true } }, - { key: { "content.text": "text" } } - ] - } - ]; - - await Promise.all(collectionsWithIndexes.map(async ({ collectionName, indexes }) => { - const collection = this.database.collection(collectionName); - const existingIndexes = await collection.listIndexes().toArray(); - - for (const index of indexes) { - const indexExists = existingIndexes.some(existingIndex => - JSON.stringify(existingIndex.key) === JSON.stringify(index.key) - ); - - if (!indexExists) { - console.log(`Creating index for ${collectionName}:`, index.key); - await collection.createIndex(index.key, index.options || {}); - } else { - console.log(`Index already exists for ${collectionName}:`, index.key); - } - } - })); - } - - private async initializeVectorSearch(): Promise { - try { - // Check if vector search is supported - const dbStatus = await this.database.admin().serverStatus(); - const vectorSearchSupported = dbStatus.vectorSearch?.supported === true; - - if (vectorSearchSupported && this.isVectorSearchIndexComputable) { - const vectorSearchConfig = { - name: "vector_index", - definition: { - vectorSearchConfig: { - dimensions: 1536, - similarity: "cosine", - numLists: 100, - efConstruction: 128 - } - } - }; - - try { - // Create vector search indexes for both collections - for (const collection of ['memories', 'knowledge']) { - await this.database.collection(collection).createIndex( - { embedding: "vectorSearch" }, - vectorSearchConfig - ); - } - - this.hasVectorSearch = true; - console.log("Vector search capabilities are available and enabled"); - - // Check sharding status - const dbInfo = await this.database.admin().command({ listDatabases: 1, nameOnly: true }); - const memoriesStats = await this.database.collection('memories').stats(); - - if (dbInfo?.sharded && memoriesStats?.sharded) { - this.isVectorSearchIndexComputable = false; - this.hasVectorSearch = false; - await this.createStandardEmbeddingIndexes(); - } - } catch (error) { - console.log("Vector search initialization failed, falling back to standard search", error); - this.isVectorSearchIndexComputable = false; - this.hasVectorSearch = false; - await this.createStandardEmbeddingIndexes(); - } - } else { - console.log("Vector search not supported, using standard search"); - this.isVectorSearchIndexComputable = false; - this.hasVectorSearch = false; - await this.createStandardEmbeddingIndexes(); - } - } catch (error) { - console.log("Error checking vector search capability, defaulting to standard search", error); - this.isVectorSearchIndexComputable = false; - this.hasVectorSearch = false; - await this.createStandardEmbeddingIndexes(); - } - } - - private async createStandardEmbeddingIndexes(): Promise { - try { - for (const collection of ['memories', 'knowledge']) { - await this.database.collection(collection).createIndex({ embedding: 1 }); - } - console.log("Standard embedding indexes created successfully"); - } catch (error) { - console.error("Failed to create standard embedding indexes:", error); - } - } - - async init() { - if (this.isConnected) { - return; - } - - try { - await this.db.connect(); - this.database = this.db.db(this.databaseName); - - await this.initializeCollections(); - await this.initializeStandardIndexes(); - await this.initializeVectorSearch(); - - try { - // Enable sharding for better performance - await this.database.command({ - enableSharding: this.database.databaseName - }); - await this.database.command({ - shardCollection: `${this.database.databaseName}.memories`, - key: { roomId: "hashed" } - }); - } catch (error) { - console.log("Sharding may already be enabled or insufficient permissions", error); - } - - this.isConnected = true; - - } catch (error) { - this.isConnected = false; - this.isVectorSearchIndexComputable = false; - console.error("Failed to initialize MongoDB connection:", error); - throw error; - } - } - - async close() { - if (this.isConnected) { - await this.db.close(); - this.isConnected = false; - } - } - - private async ensureConnection() { - if (!this.isConnected) { - await this.init(); - } - } - - // Updated database operation methods with connection checks - async getRoom(roomId: UUID): Promise { - await this.ensureConnection(); - const room = await this.database.collection('rooms').findOne({ id: roomId }); - return room ? room.id : null; - } - - async getParticipantsForAccount(userId: UUID): Promise { - await this.ensureConnection(); - return await this.database.collection('participants') - .find({ userId }) - .toArray(); - } - - async getParticipantsForRoom(roomId: UUID): Promise { - await this.ensureConnection(); - const participants = await this.database.collection('participants') - .find({ roomId }) - .toArray(); - return participants.map(p => p.userId); - } - - async getParticipantUserState( - roomId: UUID, - userId: UUID - ): Promise<"FOLLOWED" | "MUTED" | null> { - await this.ensureConnection(); - const participant = await this.database.collection('participants') - .findOne({ roomId, userId }); - return participant?.userState ?? null; - } - - async setParticipantUserState( - roomId: UUID, - userId: UUID, - state: "FOLLOWED" | "MUTED" | null - ): Promise { - await this.ensureConnection(); - await this.database.collection('participants').updateOne( - { roomId, userId }, - { $set: { userState: state } } - ); - } - - async getAccountById(userId: UUID): Promise { - await this.ensureConnection(); - const account = await this.database.collection('accounts').findOne({ id: userId }); - if (!account) return null; - return { - ...account, - details: typeof account.details === 'string' ? - JSON.parse(account.details) : account.details - }; - } - - async createAccount(account: Account): Promise { - await this.ensureConnection(); - try { - await this.database.collection('accounts').insertOne({ - ...account, - id: account.id ?? v4(), - details: JSON.stringify(account.details), - createdAt: new Date() - }); - return true; - } catch (error) { - console.error("Error creating account:", error); - return false; - } - } - - async getActorDetails(params: { roomId: UUID }): Promise { - await this.ensureConnection(); - const actors = await this.database.collection('participants') - .aggregate([ - { $match: { roomId: params.roomId } }, - { - $lookup: { - from: 'accounts', - localField: 'userId', - foreignField: 'id', - as: 'account' - } - }, - { $unwind: '$account' }, - { - $project: { - id: '$account.id', - name: '$account.name', - username: '$account.username', - details: '$account.details' - } - } - ]).toArray(); - - return actors - .map(actor => ({ - ...actor, - details: typeof actor.details === 'string' ? - JSON.parse(actor.details) : actor.details - })) - .filter((actor): actor is Actor => actor !== null); - } - - async getMemoriesByRoomIds(params: { - agentId: UUID; - roomIds: UUID[]; - tableName: string; - }): Promise { - await this.ensureConnection(); - if (!params.tableName) { - params.tableName = "messages"; - } - - const memories = await this.database.collection('memories') - .find({ - type: params.tableName, - agentId: params.agentId, - roomId: { $in: params.roomIds } - }) - .toArray(); - - return memories.map(memory => ({ - ...memory, - content: typeof memory.content === 'string' ? - JSON.parse(memory.content) : memory.content - })); - } - - async getMemoryById(memoryId: UUID): Promise { - await this.ensureConnection(); - const memory = await this.database.collection('memories').findOne({ id: memoryId }); - if (!memory) return null; - - return { - ...memory, - content: typeof memory.content === 'string' ? - JSON.parse(memory.content) : memory.content - }; - } - - async createMemory(memory: Memory, tableName: string): Promise { - - await this.ensureConnection(); - try { - let isUnique = true; - - if (memory.embedding) { - const similarMemories = await this.searchMemories( - { - tableName, - roomId: memory.roomId, - agentId: memory.agentId, - embedding: memory.embedding, - match_threshold: 0.95, - match_count: 1, - unique: isUnique - } - ) - // const similarMemories = await this.searchMemoriesByEmbedding( - // memory.embedding, - // { - // tableName, - // agentId: memory.agentId, - // roomId: memory.roomId, - // match_threshold: 0.95, - // count: 1 - // } - // ); - isUnique = similarMemories.length === 0; - } - - - const content = JSON.stringify(memory.content); - const createdAt = memory.createdAt ?? Date.now(); - - await this.database.collection('memories').insertOne({ - id: memory.id ?? v4(), - type: tableName, - content, - embedding: memory.embedding ? Array.from(memory.embedding) : null, - userId: memory.userId, - roomId: memory.roomId, - agentId: memory.agentId, - unique: isUnique, - createdAt: new Date(createdAt) - }); - }catch (e) { - elizaLogger.error(e); - } - } - - private async searchMemoriesFallback(params: { - embedding: number[]; - query: any; - limit?: number; - }): Promise { - await this.ensureConnection(); - // Implement a basic similarity search using standard MongoDB operations - const memories = await this.database.collection('memories') - .find(params.query) - .limit(params.limit || 10) - .toArray(); - - // Sort by cosine similarity computed in application - return memories - .map(memory => ({ - ...memory, - similarity: this.cosineSimilarity(params.embedding, memory.embedding) - })) - .sort((a, b) => b.similarity - a.similarity) - .map(memory => ({ - ...memory, - createdAt: typeof memory.createdAt === "string" ? - Date.parse(memory.createdAt) : memory.createdAt, - content: typeof memory.content === 'string' ? - JSON.parse(memory.content) : memory.content - })); - } - - private cosineSimilarity(a: Float32Array | number[], b: Float32Array | number[]): number { - const aArr = Array.from(a); - const bArr = Array.from(b); - const dotProduct = aArr.reduce((sum, val, i) => sum + val * bArr[i], 0); - const magnitudeA = Math.sqrt(aArr.reduce((sum, val) => sum + val * val, 0)); - const magnitudeB = Math.sqrt(bArr.reduce((sum, val) => sum + val * val, 0)); - return dotProduct / (magnitudeA * magnitudeB); - } - - async searchMemories(params: { - tableName: string; - roomId: UUID; - agentId?: UUID; - embedding: number[]; - match_threshold: number; - match_count: number; - unique: boolean; - }): Promise { - await this.ensureConnection(); - const query = { - type: params.tableName, - roomId: params.roomId, - ...(params.unique && { unique: true }), - ...(params.agentId && { agentId: params.agentId }) - }; - - if (this.hasVectorSearch) { - const pipeline = [ - { - $search: { - vectorSearch: { - queryVector: new Float32Array(params.embedding), - path: "embedding", - numCandidates: params.match_count * 2, - limit: params.match_count, - index: "vector_index", - } - } - }, - { $match: query } - ]; - - try { - const memories = await this.database.collection('memories') - .aggregate(pipeline) - .toArray(); - - return memories.map(memory => ({ - ...memory, - createdAt: typeof memory.createdAt === "string" ? - Date.parse(memory.createdAt) : memory.createdAt, - content: typeof memory.content === 'string' ? - JSON.parse(memory.content) : memory.content - })); - } catch (error) { - console.log("Vector search failed, falling back to standard search", error); - return this.searchMemoriesFallback({ - embedding: params.embedding, - query, - limit: params.match_count - }); - } - } - - return this.searchMemoriesFallback({ - embedding: params.embedding, - query, - limit: params.match_count - }); - } - - - - async searchMemoriesByEmbedding( - embedding: number[], - params: { - match_threshold?: number; - count?: number; - roomId?: UUID; - agentId: UUID; - unique?: boolean; - tableName: string; - } - ): Promise { - await this.ensureConnection(); - const pipeline = [ - { - $search: { - vectorSearch: { - queryVector: Array.from(embedding), - path: "embedding", - numCandidates: (params.count ?? 10) * 2, - limit: params.count, - index: "vector_index" - } - } - }, - { - $match: { - type: params.tableName, - agentId: params.agentId, - ...(params.unique && { unique: true }), - ...(params.roomId && { roomId: params.roomId }) - } - } - ]; - - const memories = await this.database.collection('memories') - .aggregate(pipeline) - .toArray(); - - return memories.map(memory => ({ - ...memory, - createdAt: typeof memory.createdAt === "string" ? - Date.parse(memory.createdAt) : memory.createdAt, - content: typeof memory.content === 'string' ? - JSON.parse(memory.content) : memory.content - })); - } - - async getCachedEmbeddings(opts: { - query_table_name: string; - query_threshold: number; - query_input: string; - query_field_name: string; - query_field_sub_name: string; - query_match_count: number; - }): Promise<{ embedding: number[]; levenshtein_score: number }[]> { - await this.ensureConnection(); - const BATCH_SIZE = 1000; // Process in chunks of 1000 documents - let results: { embedding: number[]; levenshtein_score: number }[] = []; - - try { - // Get total count for progress tracking - const totalCount = await this.database.collection('memories').countDocuments({ - type: opts.query_table_name, - [`content.${opts.query_field_name}.${opts.query_field_sub_name}`]: { $exists: true } - }); - - let processed = 0; - - while (processed < totalCount) { - // Fetch batch of documents - const memories = await this.database.collection('memories') - .find({ - type: opts.query_table_name, - [`content.${opts.query_field_name}.${opts.query_field_sub_name}`]: { $exists: true } - }) - .skip(processed) - .limit(BATCH_SIZE) - .toArray(); - - // Process batch - const batchResults = memories - .map(memory => { - try { - const content = memory.content[opts.query_field_name][opts.query_field_sub_name]; - if (!content || typeof content !== 'string') { - return null; - } - - return { - embedding: Array.from(memory.embedding), - levenshtein_score: this.calculateLevenshteinDistanceOptimized( - content.toLowerCase(), - opts.query_input.toLowerCase() - ) - }; - } catch (error) { - console.warn(`Error processing memory document: ${error}`); - return null; - } - }) - .filter((result): result is { embedding: number[]; levenshtein_score: number } => - result !== null); - - // Merge batch results - results = this.mergeAndSortResults(results, batchResults, opts.query_match_count); - processed += memories.length; - - // Log progress for long operations - if (totalCount > BATCH_SIZE) { - console.log(`Processed ${processed}/${totalCount} documents`); - } - } - - return results; - - } catch (error) { - console.error("Error in getCachedEmbeddings:", error); - if (results.length > 0) { - console.log("Returning partial results"); - return results; - } - return []; - } - } - - /** - * Optimized Levenshtein distance calculation with early termination - * and matrix reuse for better performance - */ - private calculateLevenshteinDistanceOptimized(str1: string, str2: string): number { - // Early termination for identical strings - if (str1 === str2) return 0; - - // Early termination for empty strings - if (str1.length === 0) return str2.length; - if (str2.length === 0) return str1.length; - - // Use shorter string as inner loop for better performance - if (str1.length > str2.length) { - [str1, str2] = [str2, str1]; - } - - // Reuse matrix to avoid garbage collection - const matrix = this.getLevenshteinMatrix(str1.length + 1, str2.length + 1); - - // Initialize first row and column - for (let i = 0; i <= str1.length; i++) matrix[i][0] = i; - for (let j = 0; j <= str2.length; j++) matrix[0][j] = j; - - // Calculate minimum edit distance - for (let i = 1; i <= str1.length; i++) { - for (let j = 1; j <= str2.length; j++) { - if (str1[i-1] === str2[j-1]) { - matrix[i][j] = matrix[i-1][j-1]; - } else { - matrix[i][j] = Math.min( - matrix[i-1][j-1] + 1, // substitution - matrix[i][j-1] + 1, // insertion - matrix[i-1][j] + 1 // deletion - ); - } - } - } - - return matrix[str1.length][str2.length]; - } - -// Cache for reusing Levenshtein distance matrix - private levenshteinMatrix: number[][] = []; - private maxMatrixSize = 0; - - private getLevenshteinMatrix(rows: number, cols: number): number[][] { - const size = rows * cols; - if (size > this.maxMatrixSize) { - this.levenshteinMatrix = Array(rows).fill(null) - .map(() => Array(cols).fill(0)); - this.maxMatrixSize = size; - } - return this.levenshteinMatrix; - } - - /** - * Efficiently merge and sort two arrays of results while maintaining top K items - */ - private mergeAndSortResults( - existing: { embedding: number[]; levenshtein_score: number }[], - newResults: { embedding: number[]; levenshtein_score: number }[], - limit: number - ): { embedding: number[]; levenshtein_score: number }[] { - const merged = [...existing, ...newResults]; - - // Use quick select algorithm if array is large - if (merged.length > 1000) { - return this.quickSelectTopK(merged, limit); - } - - // Use simple sort for smaller arrays - return merged - .sort((a, b) => a.levenshtein_score - b.levenshtein_score) - .slice(0, limit); - } - - /** - * Quick select algorithm to efficiently find top K items - */ - private quickSelectTopK( - arr: { embedding: number[]; levenshtein_score: number }[], - k: number - ): { embedding: number[]; levenshtein_score: number }[] { - if (arr.length <= k) return arr.sort((a, b) => a.levenshtein_score - b.levenshtein_score); - - const pivot = arr[Math.floor(Math.random() * arr.length)].levenshtein_score; - const left = arr.filter(x => x.levenshtein_score < pivot); - const equal = arr.filter(x => x.levenshtein_score === pivot); - const right = arr.filter(x => x.levenshtein_score > pivot); - - if (k <= left.length) { - return this.quickSelectTopK(left, k); - } - if (k <= left.length + equal.length) { - return [...left, ...equal.slice(0, k - left.length)] - .sort((a, b) => a.levenshtein_score - b.levenshtein_score); - } - return [...left, ...equal, ...this.quickSelectTopK(right, k - left.length - equal.length)] - .sort((a, b) => a.levenshtein_score - b.levenshtein_score); - } - - async updateGoalStatus(params: { - goalId: UUID; - status: GoalStatus; - }): Promise { - await this.ensureConnection(); - await this.database.collection('goals').updateOne( - { id: params.goalId }, - { $set: { status: params.status } } - ); - } - - async log(params: { - body: { [key: string]: unknown }; - userId: UUID; - roomId: UUID; - type: string; - }): Promise { - await this.ensureConnection(); - await this.database.collection('logs').insertOne({ - id: v4(), - body: JSON.stringify(params.body), - userId: params.userId, - roomId: params.roomId, - type: params.type, - createdAt: new Date() - }); - } - - async getMemories(params: { - roomId: UUID; - count?: number; - unique?: boolean; - tableName: string; - agentId: UUID; - start?: number; - end?: number; - }): Promise { - await this.ensureConnection(); - if (!params.tableName) { - throw new Error("tableName is required"); - } - if (!params.roomId) { - throw new Error("roomId is required"); - } - - const query: any = { - type: params.tableName, - agentId: params.agentId, - roomId: params.roomId - }; - - if (params.unique) { - query.unique = true; - } - - if (params.start || params.end) { - query.createdAt = {}; - if (params.start) query.createdAt.$gte = new Date(params.start); - if (params.end) query.createdAt.$lte = new Date(params.end); - } - - const memories = await this.database.collection('memories') - .find(query) - .sort({ createdAt: -1 }) - .limit(params.count || 0) - .toArray(); - - return memories.map(memory => ({ - ...memory, - createdAt: new Date(memory.createdAt).getTime(), - content: typeof memory.content === 'string' ? - JSON.parse(memory.content) : memory.content - })); - } - - async removeMemory(memoryId: UUID, tableName: string): Promise { - await this.ensureConnection(); - await this.database.collection('memories').deleteOne({ - id: memoryId, - type: tableName - }); - } - - async removeAllMemories(roomId: UUID, tableName: string): Promise { - await this.ensureConnection(); - await this.database.collection('memories').deleteMany({ - roomId, - type: tableName - }); - } - - async countMemories( - roomId: UUID, - unique = true, - tableName = "" - ): Promise { - await this.ensureConnection(); - if (!tableName) { - throw new Error("tableName is required"); - } - - const query: any = { - type: tableName, - roomId - }; - - if (unique) { - query.unique = true; - } - - return await this.database.collection('memories').countDocuments(query); - } - - async getGoals(params: { - roomId: UUID; - userId?: UUID | null; - onlyInProgress?: boolean; - count?: number; - }): Promise { - await this.ensureConnection(); - const query: any = { roomId: params.roomId }; - - if (params.userId) { - query.userId = params.userId; - } - - if (params.onlyInProgress) { - query.status = 'IN_PROGRESS'; - } - - const goals = await this.database.collection('goals') - .find(query) - .limit(params.count || 0) - .toArray(); - - return goals.map(goal => ({ - ...goal, - objectives: typeof goal.objectives === 'string' ? - JSON.parse(goal.objectives) : goal.objectives - })); - } - - async updateGoal(goal: Goal): Promise { - await this.ensureConnection(); - await this.database.collection('goals').updateOne( - { id: goal.id }, - { - $set: { - name: goal.name, - status: goal.status, - objectives: JSON.stringify(goal.objectives) - } - } - ); - } - - async createGoal(goal: Goal): Promise { - await this.ensureConnection(); - await this.database.collection('goals').insertOne({ - ...goal, - id: goal.id ?? v4(), - objectives: JSON.stringify(goal.objectives), - createdAt: new Date() - }); - } - - async removeGoal(goalId: UUID): Promise { - await this.ensureConnection(); - await this.database.collection('goals').deleteOne({ id: goalId }); - } - - async removeAllGoals(roomId: UUID): Promise { - await this.ensureConnection(); - await this.database.collection('goals').deleteMany({ roomId }); - } - - async createRoom(roomId?: UUID): Promise { - await this.ensureConnection(); - const newRoomId = roomId || v4() as UUID; - try { - await this.database.collection('rooms').insertOne({ - id: newRoomId, - createdAt: new Date() - }); - return newRoomId; - } catch (error) { - console.error("Error creating room:", error); - throw error; // Throw error instead of silently continuing - } - } - - async removeRoom(roomId: UUID): Promise { - await this.ensureConnection(); - try { - await this.database.collection('rooms').deleteOne({ id: roomId }); - } catch (error) { - console.error("Error removing room:", error); - throw error; - } - } - - async getRoomsForParticipant(userId: UUID): Promise { - await this.ensureConnection(); - const rooms = await this.database.collection('participants') - .find({ userId }) - .project({ roomId: 1 }) - .toArray(); - return rooms.map(r => r.roomId); - } - - async getRoomsForParticipants(userIds: UUID[]): Promise { - await this.ensureConnection(); - const rooms = await this.database.collection('participants') - .distinct('roomId', { userId: { $in: userIds } }); - return rooms; - } - - async addParticipant(userId: UUID, roomId: UUID): Promise { - await this.ensureConnection(); - try { - await this.database.collection('participants').insertOne({ - id: v4(), - userId, - roomId, - createdAt: new Date() - }); - return true; - } catch (error) { - console.log("Error adding participant", error); - return false; - } - } - - async removeParticipant(userId: UUID, roomId: UUID): Promise { - await this.ensureConnection(); - try { - await this.database.collection('participants').deleteOne({ - userId, - roomId - }); - return true; - } catch (error) { - console.log("Error removing participant", error); - return false; - } - } - - async createRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - await this.ensureConnection(); - if (!params.userA || !params.userB) { - throw new Error("userA and userB are required"); - } - - try { - await this.database.collection('relationships').insertOne({ - id: v4(), - userA: params.userA, - userB: params.userB, - userId: params.userA, - createdAt: new Date() - }); - return true; - } catch (error) { - console.log("Error creating relationship", error); - return false; - } - } - - async getRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - await this.ensureConnection(); - return await this.database.collection('relationships').findOne({ - $or: [ - { userA: params.userA, userB: params.userB }, - { userA: params.userB, userB: params.userA } - ] - }); - } - - async getRelationships(params: { userId: UUID }): Promise { - await this.ensureConnection(); - return await this.database.collection('relationships') - .find({ - $or: [ - { userA: params.userId }, - { userB: params.userId } - ] - }) - .toArray(); - } - - async getCache(params: { - key: string; - agentId: UUID; - }): Promise { - await this.ensureConnection(); - const cached = await this.database.collection('cache') - .findOne({ - key: params.key, - agentId: params.agentId, - expiresAt: { $gt: new Date() } - }); - return cached?.value; - } - - async setCache(params: { - key: string; - agentId: UUID; - value: string; - }): Promise { - await this.ensureConnection(); - try { - await this.database.collection('cache').updateOne( - { key: params.key, agentId: params.agentId }, - { - $set: { - value: params.value, - createdAt: new Date(), - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000) // 24 hours expiry - } - }, - { upsert: true } - ); - return true; - } catch (error) { - console.log("Error setting cache", error); - return false; - } - } - - async deleteCache(params: { - key: string; - agentId: UUID; - }): Promise { - await this.ensureConnection(); - try { - await this.database.collection('cache').deleteOne({ - key: params.key, - agentId: params.agentId - }); - return true; - } catch (error) { - console.log("Error removing cache", error); - return false; - } - } - - async getKnowledge(params: { - id?: UUID; - agentId: UUID; - limit?: number; - query?: string; - }): Promise { - await this.ensureConnection(); - - const query: any = { - $or: [ - { agentId: params.agentId }, - { isShared: true } - ] - }; - - if (params.id) { - query.id = params.id; - } - - const knowledge = await this.database.collection('knowledge') - .find(query) - .limit(params.limit || 0) - .toArray(); - - return knowledge.map(item => ({ - id: item.id, - agentId: item.agentId, - content: typeof item.content === 'string' ? JSON.parse(item.content) : item.content, - embedding: item.embedding ? new Float32Array(item.embedding) : undefined, - createdAt: typeof item.createdAt === "string" ? Date.parse(item.createdAt) : item.createdAt - })); - } - - async searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - match_count: number; - searchText?: string; - }): Promise { - await this.ensureConnection(); - - const cacheKey = `embedding_${params.agentId}_${params.searchText}`; - const cachedResult = await this.getCache({ - key: cacheKey, - agentId: params.agentId - }); - - if (cachedResult) { - return JSON.parse(cachedResult); - } - - try { - let results: KnowledgeDocument[]; - - if (this.hasVectorSearch) { - try { - results = await this.vectorSearchKnowledge(params); - } catch (error) { - console.log("Vector search failed, falling back to standard search", error); - results = await this.fallbackSearchKnowledge(params); - } - } else { - results = await this.fallbackSearchKnowledge(params); - } - - const mappedResults = results.map(item => ({ - id: item.id, - agentId: item.agentId, // This will always be UUID - content: typeof item.content === 'string' ? JSON.parse(item.content) : item.content, - embedding: item.embedding ? new Float32Array(item.embedding) : undefined, - createdAt: typeof item.createdAt === "string" ? Date.parse(item.createdAt) : item.createdAt, - similarity: (item as any).combinedScore || 0 - })) as RAGKnowledgeItem[]; - - await this.setCache({ - key: cacheKey, - agentId: params.agentId, - value: JSON.stringify(mappedResults) - }); - - return mappedResults; - } catch (error) { - console.error("Error in searchKnowledge:", error); - throw error; - } - } - - private async vectorSearchKnowledge(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - match_count: number; - searchText?: string; - }): Promise { - const pipeline = [ - { - $search: { - vectorSearch: { - queryVector: Array.from(params.embedding), - path: "embedding", - numCandidates: params.match_count * 2, - limit: params.match_count * 2, - index: "vector_index" - } - } - }, - ...this.getKnowledgeSearchPipeline(params) - ]; - - return await this.database.collection('knowledge') - .aggregate(pipeline) - .toArray(); - } - - private async fallbackSearchKnowledge(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - match_count: number; - searchText?: string; - }): Promise { - const pipeline = [ - { - $match: { - $or: [ - { agentId: params.agentId }, - { isShared: true, agentId: null } - ] - } - }, - ...this.getKnowledgeSearchPipeline(params) - ]; - - return await this.database.collection('knowledge') - .aggregate(pipeline) - .toArray(); - } - - private getKnowledgeSearchPipeline(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - searchText?: string; - }): object[] { - return [ - { - $addFields: { - vectorScore: this.hasVectorSearch ? - { $meta: "vectorSearchScore" } : - { - $let: { - vars: { - embedding: { $ifNull: ["$embedding", []] } - }, - in: { - $cond: [ - { $eq: [{ $size: "$$embedding" }, 0] }, - 0, - { - $divide: [ - 1, - { $add: [1, { $function: { - body: this.cosineSimilarity.toString(), - args: [params.embedding, "$$embedding"], - lang: "js" - }}] } - ] - } - ] - } - } - }, - keywordScore: this.calculateKeywordScore(params.searchText) - } - }, - { - $addFields: { - combinedScore: { $multiply: ["$vectorScore", "$keywordScore"] } - } - }, - { - $match: { - $or: [ - { vectorScore: { $gte: params.match_threshold } }, - { - $and: [ - { keywordScore: { $gt: 1.0 } }, - { vectorScore: { $gte: 0.3 } } - ] - } - ] - } - }, - { $sort: { combinedScore: -1 } } - ]; - } - - private calculateKeywordScore(searchText?: string): object { - return { - $multiply: [ - { - $cond: [ - searchText ? { - $regexMatch: { - input: { $toLower: "$content.text" }, - regex: new RegExp(searchText.toLowerCase()) - } - } : false, - 3.0, - 1.0 - ] - }, - { - $cond: [ - { $eq: ["$content.metadata.isChunk", true] }, - 1.5, - { - $cond: [ - { $eq: ["$content.metadata.isMain", true] }, - 1.2, - 1.0 - ] - } - ] - } - ] - }; - } - - // Update error handling in createKnowledge - async createKnowledge(knowledge: RAGKnowledgeItem): Promise { - await this.ensureConnection(); - - try { - const metadata = knowledge.content.metadata || {}; - const isShared = metadata.isShared || false; - - const doc = { - id: knowledge.id, - agentId: knowledge.agentId, - content: typeof knowledge.content === 'string' ? - knowledge.content : - JSON.stringify(knowledge.content), - embedding: knowledge.embedding ? Array.from(knowledge.embedding) : null, - createdAt: knowledge.createdAt || Date.now(), - isMain: metadata.isMain || false, - originalId: metadata.originalId || null, - chunkIndex: metadata.chunkIndex || null, - isShared - }; - - await this.database.collection('knowledge').updateOne( - { id: knowledge.id }, - { $setOnInsert: doc }, - { upsert: true } - ); - } catch (err) { - if (err instanceof Error) { - const error = err as Error & { code?: number }; - const isShared = knowledge.content.metadata?.isShared; - - if (isShared && error.code === 11000) { - console.info(`Shared knowledge ${knowledge.id} already exists, skipping`); - return; - } - - console.error(`Error creating knowledge ${knowledge.id}:`, error); - throw error; - } - throw err; - } - } - - async removeKnowledge(id: UUID): Promise { - await this.ensureConnection(); - await this.database.collection('knowledge').deleteOne({ id }); - } - - async clearKnowledge(agentId: UUID, shared?: boolean): Promise { - await this.ensureConnection(); - const query = shared ? - { $or: [{ agentId }, { isShared: true }] } : - { agentId }; - - try { - await this.database.collection('knowledge').deleteMany(query); - } catch (error) { - console.error(`Error clearing knowledge for agent ${agentId}:`, error); - throw error; - } - } - - async getMemoriesByIds(memoryIds: UUID[], tableName?: string): Promise { - await this.ensureConnection(); - const collection = tableName || 'memories'; - - try { - const memories = await this.database.collection(collection) - .find({ id: { $in: memoryIds } }) - .toArray(); - - return memories.map(memory => ({ - id: memory.id, - roomId: memory.roomId, - agentId: memory.agentId, - type: memory.type, - content: memory.content, - embedding: memory.embedding, - createdAt: memory.createdAt instanceof Date ? memory.createdAt.getTime() : memory.createdAt, - metadata: memory.metadata || {} - })); - } catch (error) { - elizaLogger.error('Failed to get memories by IDs:', error); - return []; - } - } - -} - diff --git a/packages/adapter-mongodb/tsconfig.json b/packages/adapter-mongodb/tsconfig.json deleted file mode 100644 index 9033fbf9b4942..0000000000000 --- a/packages/adapter-mongodb/tsconfig.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "module": "ESNext", - "outDir": "dist", - "rootDir": "src", - "strict": true, - "esModuleInterop": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "moduleResolution": "bundler", - "resolveJsonModule": true, - "isolatedModules": true, - "lib": ["ES2021", "DOM"], - "target": "ES2021" - }, - "include": [ - "src/**/*.ts" - ], - "exclude": [ - "node_modules", - "dist" - ] -} diff --git a/packages/adapter-mongodb/tsup.config.ts b/packages/adapter-mongodb/tsup.config.ts deleted file mode 100644 index fe2cf357e1d4d..0000000000000 --- a/packages/adapter-mongodb/tsup.config.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - dts: true, - target: "node16", - external: [ - "mongodb", - "uuid", - "@ai16z/eliza", - "dotenv", - "fs", - "path", - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "@anush008/tokenizers" - ], - esbuildOptions(options) { - options.conditions = ["module"] - }, -}); diff --git a/packages/adapter-pglite/.npmignore b/packages/adapter-pglite/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/adapter-pglite/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/adapter-pglite/package.json b/packages/adapter-pglite/package.json deleted file mode 100644 index 8e52bbf8b3720..0000000000000 --- a/packages/adapter-pglite/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "@elizaos/adapter-pglite", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@electric-sql/pglite": "^0.2.15", - "@elizaos/core": "workspace:*" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/adapter-pglite/schema.sql b/packages/adapter-pglite/schema.sql deleted file mode 100644 index 4a0f7c6f1dd98..0000000000000 --- a/packages/adapter-pglite/schema.sql +++ /dev/null @@ -1,140 +0,0 @@ --- Enable pgvector extension - --- -- Drop existing tables and extensions --- DROP EXTENSION IF EXISTS vector CASCADE; --- DROP TABLE IF EXISTS relationships CASCADE; --- DROP TABLE IF EXISTS participants CASCADE; --- DROP TABLE IF EXISTS logs CASCADE; --- DROP TABLE IF EXISTS goals CASCADE; --- DROP TABLE IF EXISTS memories CASCADE; --- DROP TABLE IF EXISTS rooms CASCADE; --- DROP TABLE IF EXISTS accounts CASCADE; - - -CREATE EXTENSION IF NOT EXISTS vector; -CREATE EXTENSION IF NOT EXISTS fuzzystrmatch; - --- Create a function to determine vector dimension -CREATE OR REPLACE FUNCTION get_embedding_dimension() -RETURNS INTEGER AS $$ -BEGIN - -- Check for OpenAI first - IF current_setting('app.use_openai_embedding', TRUE) = 'true' THEN - RETURN 1536; -- OpenAI dimension - -- Then check for Ollama - ELSIF current_setting('app.use_ollama_embedding', TRUE) = 'true' THEN - RETURN 1024; -- Ollama mxbai-embed-large dimension - -- Then check for GAIANET - ELSIF current_setting('app.use_gaianet_embedding', TRUE) = 'true' THEN - RETURN 768; -- Gaianet nomic-embed dimension - ELSE - RETURN 384; -- BGE/Other embedding dimension - END IF; -END; -$$ LANGUAGE plpgsql; - -BEGIN; - -CREATE TABLE IF NOT EXISTS accounts ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "name" TEXT, - "username" TEXT, - "email" TEXT NOT NULL, - "avatarUrl" TEXT, - "details" JSONB DEFAULT '{}'::jsonb -); - -CREATE TABLE IF NOT EXISTS rooms ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP -); - -DO $$ -DECLARE - vector_dim INTEGER; -BEGIN - vector_dim := get_embedding_dimension(); - - EXECUTE format(' - CREATE TABLE IF NOT EXISTS memories ( - "id" UUID PRIMARY KEY, - "type" TEXT NOT NULL, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "content" JSONB NOT NULL, - "embedding" vector(%s), - "userId" UUID REFERENCES accounts("id"), - "agentId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "unique" BOOLEAN DEFAULT true NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE - )', vector_dim); -END $$; - -CREATE TABLE IF NOT EXISTS goals ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID REFERENCES accounts("id"), - "name" TEXT, - "status" TEXT, - "description" TEXT, - "roomId" UUID REFERENCES rooms("id"), - "objectives" JSONB DEFAULT '[]'::jsonb NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS logs ( - "id" UUID PRIMARY KEY DEFAULT gen_random_uuid(), - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID NOT NULL REFERENCES accounts("id"), - "body" JSONB NOT NULL, - "type" TEXT NOT NULL, - "roomId" UUID NOT NULL REFERENCES rooms("id"), - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS participants ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "userState" TEXT, - "last_message_read" TEXT, - UNIQUE("userId", "roomId"), - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS relationships ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userA" UUID NOT NULL REFERENCES accounts("id"), - "userB" UUID NOT NULL REFERENCES accounts("id"), - "status" TEXT, - "userId" UUID NOT NULL REFERENCES accounts("id"), - CONSTRAINT fk_user_a FOREIGN KEY ("userA") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_user_b FOREIGN KEY ("userB") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS cache ( - "key" TEXT NOT NULL, - "agentId" TEXT NOT NULL, - "value" JSONB DEFAULT '{}'::jsonb, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "expiresAt" TIMESTAMP, - PRIMARY KEY ("key", "agentId") -); - --- Indexes -CREATE INDEX IF NOT EXISTS idx_memories_embedding ON memories USING hnsw ("embedding" vector_cosine_ops); -CREATE INDEX IF NOT EXISTS idx_memories_type_room ON memories("type", "roomId"); -CREATE INDEX IF NOT EXISTS idx_participants_user ON participants("userId"); -CREATE INDEX IF NOT EXISTS idx_participants_room ON participants("roomId"); -CREATE INDEX IF NOT EXISTS idx_relationships_users ON relationships("userA", "userB"); - -COMMIT; diff --git a/packages/adapter-pglite/src/index.ts b/packages/adapter-pglite/src/index.ts deleted file mode 100644 index 71e821b2c6be0..0000000000000 --- a/packages/adapter-pglite/src/index.ts +++ /dev/null @@ -1,1563 +0,0 @@ -import { v4 } from "uuid"; - -import { - type Account, - type Actor, - type GoalStatus, - type Goal, - type Memory, - type Relationship, - type UUID, - type IDatabaseCacheAdapter, - type Participant, - elizaLogger, - getEmbeddingConfig, - DatabaseAdapter, - EmbeddingProvider, - type RAGKnowledgeItem, -} from "@elizaos/core"; -import fs from "fs"; -import { fileURLToPath } from "url"; -import path from "path"; -import { - PGlite, - type PGliteOptions, - type Results, - type Transaction, -} from "@electric-sql/pglite"; -import { vector } from "@electric-sql/pglite/vector"; -import { fuzzystrmatch } from "@electric-sql/pglite/contrib/fuzzystrmatch"; - -const __filename = fileURLToPath(import.meta.url); // get the resolved path to the file -const __dirname = path.dirname(__filename); // get the name of the directory - -export class PGLiteDatabaseAdapter - extends DatabaseAdapter - implements IDatabaseCacheAdapter -{ - constructor(options: PGliteOptions) { - super(); - this.db = new PGlite({ - ...options, - // Add the vector and fuzzystrmatch extensions - extensions: { - ...(options.extensions ?? {}), - vector, - fuzzystrmatch, - }, - }); - } - - async init() { - await this.db.waitReady; - - await this.withTransaction(async (tx) => { - // Set application settings for embedding dimension - const embeddingConfig = getEmbeddingConfig(); - if (embeddingConfig.provider === EmbeddingProvider.OpenAI) { - await tx.query("SET app.use_openai_embedding = 'true'"); - await tx.query("SET app.use_ollama_embedding = 'false'"); - await tx.query("SET app.use_gaianet_embedding = 'false'"); - } else if (embeddingConfig.provider === EmbeddingProvider.Ollama) { - await tx.query("SET app.use_openai_embedding = 'false'"); - await tx.query("SET app.use_ollama_embedding = 'true'"); - await tx.query("SET app.use_gaianet_embedding = 'false'"); - } else if (embeddingConfig.provider === EmbeddingProvider.GaiaNet) { - await tx.query("SET app.use_openai_embedding = 'false'"); - await tx.query("SET app.use_ollama_embedding = 'false'"); - await tx.query("SET app.use_gaianet_embedding = 'true'"); - } else { - await tx.query("SET app.use_openai_embedding = 'false'"); - await tx.query("SET app.use_ollama_embedding = 'false'"); - await tx.query("SET app.use_gaianet_embedding = 'false'"); - } - - const schema = fs.readFileSync( - path.resolve(__dirname, "../schema.sql"), - "utf8" - ); - await tx.exec(schema); - }, "init"); - } - - async close() { - await this.db.close(); - } - - private async withDatabase( - operation: () => Promise, - context: string - ): Promise { - return this.withCircuitBreaker(async () => { - return operation(); - }, context); - } - - private async withTransaction( - operation: (tx: Transaction) => Promise, - context: string - ): Promise { - return this.withCircuitBreaker(async () => { - return this.db.transaction(operation); - }, context); - } - - async query( - queryTextOrConfig: string, - values?: unknown[] - ): Promise> { - return this.withDatabase(async () => { - return await this.db.query(queryTextOrConfig, values); - }, "query"); - } - - async getRoom(roomId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.query<{ id: UUID }>( - "SELECT id FROM rooms WHERE id = $1", - [roomId] - ); - return rows.length > 0 ? rows[0].id : null; - }, "getRoom"); - } - - async getParticipantsForAccount(userId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.query( - `SELECT id, "userId", "roomId", "last_message_read" - FROM participants - WHERE "userId" = $1`, - [userId] - ); - return rows; - }, "getParticipantsForAccount"); - } - - async getParticipantUserState( - roomId: UUID, - userId: UUID - ): Promise<"FOLLOWED" | "MUTED" | null> { - return this.withDatabase(async () => { - const { rows } = await this.query<{ - userState: "FOLLOWED" | "MUTED"; - }>( - `SELECT "userState" FROM participants WHERE "roomId" = $1 AND "userId" = $2`, - [roomId, userId] - ); - return rows.length > 0 ? rows[0].userState : null; - }, "getParticipantUserState"); - } - - async getMemoriesByRoomIds(params: { - roomIds: UUID[]; - agentId?: UUID; - tableName: string; - limit?: number; - }): Promise { - return this.withDatabase(async () => { - if (params.roomIds.length === 0) return []; - const placeholders = params.roomIds - .map((_, i) => `$${i + 2}`) - .join(", "); - - let query = `SELECT * FROM memories WHERE type = $1 AND "roomId" IN (${placeholders})`; - let queryParams = [params.tableName, ...params.roomIds]; - - if (params.agentId) { - query += ` AND "agentId" = $${params.roomIds.length + 2}`; - queryParams = [...queryParams, params.agentId]; - } - - // Add ordering and limit - query += ` ORDER BY "createdAt" DESC`; - if (params.limit) { - query += ` LIMIT $${queryParams.length + 1}`; - queryParams.push(params.limit.toString()); - } - - const { rows } = await this.query(query, queryParams); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); - }, "getMemoriesByRoomIds"); - } - - async setParticipantUserState( - roomId: UUID, - userId: UUID, - state: "FOLLOWED" | "MUTED" | null - ): Promise { - return this.withDatabase(async () => { - await this.query( - `UPDATE participants SET "userState" = $1 WHERE "roomId" = $2 AND "userId" = $3`, - [state, roomId, userId] - ); - }, "setParticipantUserState"); - } - - async getParticipantsForRoom(roomId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.query<{ userId: UUID }>( - 'SELECT "userId" FROM participants WHERE "roomId" = $1', - [roomId] - ); - return rows.map((row) => row.userId); - }, "getParticipantsForRoom"); - } - - async getAccountById(userId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.query( - "SELECT * FROM accounts WHERE id = $1", - [userId] - ); - if (rows.length === 0) { - elizaLogger.debug("Account not found:", { userId }); - return null; - } - - const account = rows[0]; - // elizaLogger.debug("Account retrieved:", { - // userId, - // hasDetails: !!account.details, - // }); - - return { - ...account, - details: - typeof account.details === "string" - ? JSON.parse(account.details) - : account.details, - }; - }, "getAccountById"); - } - - async createAccount(account: Account): Promise { - return this.withDatabase(async () => { - try { - const accountId = account.id ?? v4(); - await this.query( - `INSERT INTO accounts (id, name, username, email, "avatarUrl", details) - VALUES ($1, $2, $3, $4, $5, $6)`, - [ - accountId, - account.name, - account.username || "", - account.email || "", - account.avatarUrl || "", - JSON.stringify(account.details), - ] - ); - elizaLogger.debug("Account created successfully:", { - accountId, - }); - return true; - } catch (error) { - elizaLogger.error("Error creating account:", { - error: - error instanceof Error ? error.message : String(error), - accountId: account.id, - name: account.name, // Only log non-sensitive fields - }); - return false; // Return false instead of throwing to maintain existing behavior - } - }, "createAccount"); - } - - async getActorById(params: { roomId: UUID }): Promise { - return this.withDatabase(async () => { - const { rows } = await this.query( - `SELECT a.id, a.name, a.username, a.details - FROM participants p - LEFT JOIN accounts a ON p."userId" = a.id - WHERE p."roomId" = $1`, - [params.roomId] - ); - - elizaLogger.debug("Retrieved actors:", { - roomId: params.roomId, - actorCount: rows.length, - }); - - return rows.map((row) => { - try { - return { - ...row, - details: - typeof row.details === "string" - ? JSON.parse(row.details) - : row.details, - }; - } catch (error) { - elizaLogger.warn("Failed to parse actor details:", { - actorId: row.id, - error: - error instanceof Error - ? error.message - : String(error), - }); - return { - ...row, - details: {}, // Provide default empty details on parse error - }; - } - }); - }, "getActorById").catch((error) => { - elizaLogger.error("Failed to get actors:", { - roomId: params.roomId, - error: error.message, - }); - throw error; // Re-throw to let caller handle database errors - }); - } - - async getMemoryById(id: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.query( - "SELECT * FROM memories WHERE id = $1", - [id] - ); - if (rows.length === 0) return null; - - return { - ...rows[0], - content: - typeof rows[0].content === "string" - ? JSON.parse(rows[0].content) - : rows[0].content, - }; - }, "getMemoryById"); - } - - async getMemoriesByIds( - memoryIds: UUID[], - tableName?: string - ): Promise { - return this.withDatabase(async () => { - if (memoryIds.length === 0) return []; - const placeholders = memoryIds.map((_, i) => `$${i + 1}`).join(","); - let sql = `SELECT * FROM memories WHERE id IN (${placeholders})`; - const queryParams: any[] = [...memoryIds]; - - if (tableName) { - sql += ` AND type = $${memoryIds.length + 1}`; - queryParams.push(tableName); - } - - const { rows } = await this.query(sql, queryParams); - - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); - }, "getMemoriesByIds"); - } - - async createMemory(memory: Memory, tableName: string): Promise { - return this.withDatabase(async () => { - elizaLogger.debug("PostgresAdapter createMemory:", { - memoryId: memory.id, - embeddingLength: memory.embedding?.length, - contentLength: memory.content?.text?.length, - }); - - let isUnique = true; - if (memory.embedding) { - const similarMemories = await this.searchMemoriesByEmbedding( - memory.embedding, - { - tableName, - roomId: memory.roomId, - match_threshold: 0.95, - count: 1, - } - ); - isUnique = similarMemories.length === 0; - } - - await this.query( - `INSERT INTO memories ( - id, type, content, embedding, "userId", "roomId", "agentId", "unique", "createdAt" - ) VALUES ($1, $2, $3, $4, $5::uuid, $6::uuid, $7::uuid, $8, to_timestamp($9/1000.0))`, - [ - memory.id ?? v4(), - tableName, - JSON.stringify(memory.content), - memory.embedding ? `[${memory.embedding.join(",")}]` : null, - memory.userId, - memory.roomId, - memory.agentId, - memory.unique ?? isUnique, - Date.now(), - ] - ); - }, "createMemory"); - } - - async searchMemories(params: { - tableName: string; - agentId: UUID; - roomId: UUID; - embedding: number[]; - match_threshold: number; - match_count: number; - unique: boolean; - }): Promise { - return await this.searchMemoriesByEmbedding(params.embedding, { - match_threshold: params.match_threshold, - count: params.match_count, - agentId: params.agentId, - roomId: params.roomId, - unique: params.unique, - tableName: params.tableName, - }); - } - - async getMemories(params: { - roomId: UUID; - count?: number; - unique?: boolean; - tableName: string; - agentId?: UUID; - start?: number; - end?: number; - }): Promise { - // Parameter validation - if (!params.tableName) throw new Error("tableName is required"); - if (!params.roomId) throw new Error("roomId is required"); - - return this.withDatabase(async () => { - // Build query - let sql = `SELECT * FROM memories WHERE type = $1 AND "roomId" = $2`; - const values: unknown[] = [params.tableName, params.roomId]; - let paramCount = 2; - - // Add time range filters - if (params.start) { - paramCount++; - sql += ` AND "createdAt" >= to_timestamp($${paramCount})`; - values.push(params.start / 1000); - } - - if (params.end) { - paramCount++; - sql += ` AND "createdAt" <= to_timestamp($${paramCount})`; - values.push(params.end / 1000); - } - - // Add other filters - if (params.unique) { - sql += ` AND "unique" = true`; - } - - if (params.agentId) { - paramCount++; - sql += ` AND "agentId" = $${paramCount}`; - values.push(params.agentId); - } - - // Add ordering and limit - sql += ' ORDER BY "createdAt" DESC'; - - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } - - elizaLogger.debug("Fetching memories:", { - roomId: params.roomId, - tableName: params.tableName, - unique: params.unique, - agentId: params.agentId, - timeRange: - params.start || params.end - ? { - start: params.start - ? new Date(params.start).toISOString() - : undefined, - end: params.end - ? new Date(params.end).toISOString() - : undefined, - } - : undefined, - limit: params.count, - }); - - const { rows } = await this.query(sql, values); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); - }, "getMemories"); - } - - async getGoals(params: { - roomId: UUID; - userId?: UUID | null; - onlyInProgress?: boolean; - count?: number; - }): Promise { - return this.withDatabase(async () => { - let sql = `SELECT * FROM goals WHERE "roomId" = $1`; - const values: unknown[] = [params.roomId]; - let paramCount = 1; - - if (params.userId) { - paramCount++; - sql += ` AND "userId" = $${paramCount}`; - values.push(params.userId); - } - - if (params.onlyInProgress) { - sql += " AND status = 'IN_PROGRESS'"; - } - - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } - - const { rows } = await this.query(sql, values); - return rows.map((row) => ({ - ...row, - objectives: - typeof row.objectives === "string" - ? JSON.parse(row.objectives) - : row.objectives, - })); - }, "getGoals"); - } - - async updateGoal(goal: Goal): Promise { - return this.withDatabase(async () => { - try { - await this.query( - `UPDATE goals SET name = $1, status = $2, objectives = $3 WHERE id = $4`, - [ - goal.name, - goal.status, - JSON.stringify(goal.objectives), - goal.id, - ] - ); - } catch (error) { - elizaLogger.error("Failed to update goal:", { - goalId: goal.id, - error: - error instanceof Error ? error.message : String(error), - status: goal.status, - }); - throw error; - } - }, "updateGoal"); - } - - async createGoal(goal: Goal): Promise { - return this.withDatabase(async () => { - await this.query( - `INSERT INTO goals (id, "roomId", "userId", name, status, objectives) - VALUES ($1, $2, $3, $4, $5, $6)`, - [ - goal.id ?? v4(), - goal.roomId, - goal.userId, - goal.name, - goal.status, - JSON.stringify(goal.objectives), - ] - ); - }, "createGoal"); - } - - async removeGoal(goalId: UUID): Promise { - if (!goalId) throw new Error("Goal ID is required"); - - return this.withDatabase(async () => { - try { - const result = await this.query( - "DELETE FROM goals WHERE id = $1 RETURNING id", - [goalId] - ); - - elizaLogger.debug("Goal removal attempt:", { - goalId, - removed: result?.affectedRows ?? 0 > 0, - }); - } catch (error) { - elizaLogger.error("Failed to remove goal:", { - goalId, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "removeGoal"); - } - - async createRoom(roomId?: UUID): Promise { - return this.withDatabase(async () => { - const newRoomId = roomId || v4(); - await this.query("INSERT INTO rooms (id) VALUES ($1)", [newRoomId]); - return newRoomId as UUID; - }, "createRoom"); - } - - async removeRoom(roomId: UUID): Promise { - if (!roomId) throw new Error("Room ID is required"); - - return this.withTransaction(async (tx) => { - try { - // First check if room exists - const checkResult = await tx.query( - "SELECT id FROM rooms WHERE id = $1", - [roomId] - ); - - if (checkResult.rows.length === 0) { - elizaLogger.warn("No room found to remove:", { - roomId, - }); - throw new Error(`Room not found: ${roomId}`); - } - - // Remove related data first (if not using CASCADE) - await tx.query('DELETE FROM memories WHERE "roomId" = $1', [ - roomId, - ]); - await tx.query('DELETE FROM participants WHERE "roomId" = $1', [ - roomId, - ]); - await tx.query('DELETE FROM goals WHERE "roomId" = $1', [ - roomId, - ]); - - // Finally remove the room - const result = await tx.query( - "DELETE FROM rooms WHERE id = $1 RETURNING id", - [roomId] - ); - - elizaLogger.debug( - "Room and related data removed successfully:", - { - roomId, - removed: result?.affectedRows ?? 0 > 0, - } - ); - } catch (error) { - elizaLogger.error("Failed to remove room:", { - roomId, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "removeRoom"); - } - - async createRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - // Input validation - if (!params.userA || !params.userB) { - throw new Error("userA and userB are required"); - } - - return this.withDatabase(async () => { - try { - const relationshipId = v4(); - await this.query( - `INSERT INTO relationships (id, "userA", "userB", "userId") - VALUES ($1, $2, $3, $4) - RETURNING id`, - [relationshipId, params.userA, params.userB, params.userA] - ); - - elizaLogger.debug("Relationship created successfully:", { - relationshipId, - userA: params.userA, - userB: params.userB, - }); - - return true; - } catch (error) { - // Check for unique constraint violation or other specific errors - if ((error as { code?: string }).code === "23505") { - // Unique violation - elizaLogger.warn("Relationship already exists:", { - userA: params.userA, - userB: params.userB, - error: - error instanceof Error - ? error.message - : String(error), - }); - } else { - elizaLogger.error("Failed to create relationship:", { - userA: params.userA, - userB: params.userB, - error: - error instanceof Error - ? error.message - : String(error), - }); - } - return false; - } - }, "createRelationship"); - } - - async getRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - if (!params.userA || !params.userB) { - throw new Error("userA and userB are required"); - } - - return this.withDatabase(async () => { - try { - const { rows } = await this.query( - `SELECT * FROM relationships - WHERE ("userA" = $1 AND "userB" = $2) - OR ("userA" = $2 AND "userB" = $1)`, - [params.userA, params.userB] - ); - - if (rows.length > 0) { - elizaLogger.debug("Relationship found:", { - relationshipId: rows[0].id, - userA: params.userA, - userB: params.userB, - }); - return rows[0]; - } - - elizaLogger.debug("No relationship found between users:", { - userA: params.userA, - userB: params.userB, - }); - return null; - } catch (error) { - elizaLogger.error("Error fetching relationship:", { - userA: params.userA, - userB: params.userB, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "getRelationship"); - } - - async getRelationships(params: { userId: UUID }): Promise { - if (!params.userId) { - throw new Error("userId is required"); - } - - return this.withDatabase(async () => { - try { - const { rows } = await this.query( - `SELECT * FROM relationships - WHERE "userA" = $1 OR "userB" = $1 - ORDER BY "createdAt" DESC`, // Add ordering if you have this field - [params.userId] - ); - - elizaLogger.debug("Retrieved relationships:", { - userId: params.userId, - count: rows.length, - }); - - return rows; - } catch (error) { - elizaLogger.error("Failed to fetch relationships:", { - userId: params.userId, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "getRelationships"); - } - - async getCachedEmbeddings(opts: { - query_table_name: string; - query_threshold: number; - query_input: string; - query_field_name: string; - query_field_sub_name: string; - query_match_count: number; - }): Promise<{ embedding: number[]; levenshtein_score: number }[]> { - // Input validation - if (!opts.query_table_name) - throw new Error("query_table_name is required"); - if (!opts.query_input) throw new Error("query_input is required"); - if (!opts.query_field_name) - throw new Error("query_field_name is required"); - if (!opts.query_field_sub_name) - throw new Error("query_field_sub_name is required"); - if (opts.query_match_count <= 0) - throw new Error("query_match_count must be positive"); - - return this.withDatabase(async () => { - try { - elizaLogger.debug("Fetching cached embeddings:", { - tableName: opts.query_table_name, - fieldName: opts.query_field_name, - subFieldName: opts.query_field_sub_name, - matchCount: opts.query_match_count, - inputLength: opts.query_input.length, - }); - - const sql = ` - WITH content_text AS ( - SELECT - embedding, - COALESCE( - content->>$2, - '' - ) as content_text - FROM memories - WHERE type = $3 - AND content->>$2 IS NOT NULL - ) - SELECT - embedding, - levenshtein( - $1, - content_text - ) as levenshtein_score - FROM content_text - WHERE levenshtein( - $1, - content_text - ) <= $5 -- Add threshold check - ORDER BY levenshtein_score - LIMIT $4 - `; - - const { rows } = await this.query<{ - embedding: number[]; - levenshtein_score: number; - }>(sql, [ - opts.query_input, - opts.query_field_sub_name, - opts.query_table_name, - opts.query_match_count, - opts.query_threshold, - ]); - - elizaLogger.debug("Retrieved cached embeddings:", { - count: rows.length, - tableName: opts.query_table_name, - matchCount: opts.query_match_count, - }); - - return rows - .map( - ( - row - ): { - embedding: number[]; - levenshtein_score: number; - } | null => { - if (!Array.isArray(row.embedding)) return null; - return { - embedding: row.embedding, - levenshtein_score: Number( - row.levenshtein_score - ), - }; - } - ) - .filter( - ( - row - ): row is { - embedding: number[]; - levenshtein_score: number; - } => row !== null - ); - } catch (error) { - elizaLogger.error("Error in getCachedEmbeddings:", { - error: - error instanceof Error ? error.message : String(error), - tableName: opts.query_table_name, - fieldName: opts.query_field_name, - }); - throw error; - } - }, "getCachedEmbeddings"); - } - - async log(params: { - body: { [key: string]: unknown }; - userId: UUID; - roomId: UUID; - type: string; - }): Promise { - // Input validation - if (!params.userId) throw new Error("userId is required"); - if (!params.roomId) throw new Error("roomId is required"); - if (!params.type) throw new Error("type is required"); - if (!params.body || typeof params.body !== "object") { - throw new Error("body must be a valid object"); - } - - return this.withDatabase(async () => { - try { - const logId = v4(); // Generate ID for tracking - await this.query( - `INSERT INTO logs ( - id, - body, - "userId", - "roomId", - type, - "createdAt" - ) VALUES ($1, $2, $3, $4, $5, NOW()) - RETURNING id`, - [ - logId, - JSON.stringify(params.body), // Ensure body is stringified - params.userId, - params.roomId, - params.type, - ] - ); - - elizaLogger.debug("Log entry created:", { - logId, - type: params.type, - roomId: params.roomId, - userId: params.userId, - bodyKeys: Object.keys(params.body), - }); - } catch (error) { - elizaLogger.error("Failed to create log entry:", { - error: - error instanceof Error ? error.message : String(error), - type: params.type, - roomId: params.roomId, - userId: params.userId, - }); - throw error; - } - }, "log"); - } - - async searchMemoriesByEmbedding( - embedding: number[], - params: { - match_threshold?: number; - count?: number; - agentId?: UUID; - roomId?: UUID; - unique?: boolean; - tableName: string; - } - ): Promise { - return this.withDatabase(async () => { - elizaLogger.debug("Incoming vector:", { - length: embedding.length, - sample: embedding.slice(0, 5), - isArray: Array.isArray(embedding), - allNumbers: embedding.every((n) => typeof n === "number"), - }); - - // Validate embedding dimension - if (embedding.length !== getEmbeddingConfig().dimensions) { - throw new Error( - `Invalid embedding dimension: expected ${getEmbeddingConfig().dimensions}, got ${embedding.length}` - ); - } - - // Ensure vector is properly formatted - const cleanVector = embedding.map((n) => { - if (!Number.isFinite(n)) return 0; - // Limit precision to avoid floating point issues - return Number(n.toFixed(6)); - }); - - // Format for Postgres pgvector - const vectorStr = `[${cleanVector.join(",")}]`; - - elizaLogger.debug("Vector debug:", { - originalLength: embedding.length, - cleanLength: cleanVector.length, - sampleStr: vectorStr.slice(0, 100), - }); - - let sql = ` - SELECT *, - 1 - (embedding <-> $1::vector(${getEmbeddingConfig().dimensions})) as similarity - FROM memories - WHERE type = $2 - `; - - const values: unknown[] = [vectorStr, params.tableName]; - - // Log the query for debugging - elizaLogger.debug("Query debug:", { - sql: sql.slice(0, 200), - paramTypes: values.map((v) => typeof v), - vectorStrLength: vectorStr.length, - }); - - let paramCount = 2; - - if (params.unique) { - sql += ` AND "unique" = true`; - } - - if (params.agentId) { - paramCount++; - sql += ` AND "agentId" = $${paramCount}`; - values.push(params.agentId); - } - - if (params.roomId) { - paramCount++; - sql += ` AND "roomId" = $${paramCount}::uuid`; - values.push(params.roomId); - } - - if (params.match_threshold) { - paramCount++; - sql += ` AND 1 - (embedding <-> $1::vector) >= $${paramCount}`; - values.push(params.match_threshold); - } - - sql += ` ORDER BY embedding <-> $1::vector`; - - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } - - const { rows } = await this.query(sql, values); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - similarity: row.similarity, - })); - }, "searchMemoriesByEmbedding"); - } - - async addParticipant(userId: UUID, roomId: UUID): Promise { - return this.withDatabase(async () => { - try { - await this.query( - `INSERT INTO participants (id, "userId", "roomId") - VALUES ($1, $2, $3)`, - [v4(), userId, roomId] - ); - return true; - } catch (error) { - console.log("Error adding participant", error); - return false; - } - }, "addParticpant"); - } - - async removeParticipant(userId: UUID, roomId: UUID): Promise { - return this.withDatabase(async () => { - try { - await this.query( - `DELETE FROM participants WHERE "userId" = $1 AND "roomId" = $2`, - [userId, roomId] - ); - return true; - } catch (error) { - console.log("Error removing participant", error); - return false; - } - }, "removeParticipant"); - } - - async updateGoalStatus(params: { - goalId: UUID; - status: GoalStatus; - }): Promise { - return this.withDatabase(async () => { - await this.query("UPDATE goals SET status = $1 WHERE id = $2", [ - params.status, - params.goalId, - ]); - }, "updateGoalStatus"); - } - - async removeMemory(memoryId: UUID, tableName: string): Promise { - return this.withDatabase(async () => { - await this.query( - "DELETE FROM memories WHERE type = $1 AND id = $2", - [tableName, memoryId] - ); - }, "removeMemory"); - } - - async removeAllMemories(roomId: UUID, tableName: string): Promise { - return this.withDatabase(async () => { - await this.query( - `DELETE FROM memories WHERE type = $1 AND "roomId" = $2`, - [tableName, roomId] - ); - }, "removeAllMemories"); - } - - async countMemories( - roomId: UUID, - unique = true, - tableName = "" - ): Promise { - if (!tableName) throw new Error("tableName is required"); - - return this.withDatabase(async () => { - let sql = `SELECT COUNT(*) as count FROM memories WHERE type = $1 AND "roomId" = $2`; - if (unique) { - sql += ` AND "unique" = true`; - } - - const { rows } = await this.query<{ count: number }>(sql, [ - tableName, - roomId, - ]); - return rows[0].count; - }, "countMemories"); - } - - async removeAllGoals(roomId: UUID): Promise { - return this.withDatabase(async () => { - await this.query(`DELETE FROM goals WHERE "roomId" = $1`, [roomId]); - }, "removeAllGoals"); - } - - async getRoomsForParticipant(userId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.query<{ roomId: UUID }>( - `SELECT "roomId" FROM participants WHERE "userId" = $1`, - [userId] - ); - return rows.map((row) => row.roomId); - }, "getRoomsForParticipant"); - } - - async getRoomsForParticipants(userIds: UUID[]): Promise { - return this.withDatabase(async () => { - const placeholders = userIds.map((_, i) => `$${i + 1}`).join(", "); - const { rows } = await this.query<{ roomId: UUID }>( - `SELECT DISTINCT "roomId" FROM participants WHERE "userId" IN (${placeholders})`, - userIds - ); - return rows.map((row) => row.roomId); - }, "getRoomsForParticipants"); - } - - async getActorDetails(params: { roomId: string }): Promise { - if (!params.roomId) { - throw new Error("roomId is required"); - } - - return this.withDatabase(async () => { - try { - const sql = ` - SELECT - a.id, - a.name, - a.username, - a."avatarUrl", - COALESCE(a.details::jsonb, '{}'::jsonb) as details - FROM participants p - LEFT JOIN accounts a ON p."userId" = a.id - WHERE p."roomId" = $1 - ORDER BY a.name - `; - - const result = await this.query(sql, [params.roomId]); - - elizaLogger.debug("Retrieved actor details:", { - roomId: params.roomId, - actorCount: result.rows.length, - }); - - return result.rows.map((row) => { - try { - return { - ...row, - details: - typeof row.details === "string" - ? JSON.parse(row.details) - : row.details, - }; - } catch (parseError) { - elizaLogger.warn("Failed to parse actor details:", { - actorId: row.id, - error: - parseError instanceof Error - ? parseError.message - : String(parseError), - }); - return { - ...row, - details: {}, // Fallback to empty object if parsing fails - }; - } - }); - } catch (error) { - elizaLogger.error("Failed to fetch actor details:", { - roomId: params.roomId, - error: - error instanceof Error ? error.message : String(error), - }); - throw new Error( - `Failed to fetch actor details: ${error instanceof Error ? error.message : String(error)}` - ); - } - }, "getActorDetails"); - } - - async getCache(params: { - key: string; - agentId: UUID; - }): Promise { - return this.withDatabase(async () => { - try { - const sql = `SELECT "value"::TEXT FROM cache WHERE "key" = $1 AND "agentId" = $2`; - const { rows } = await this.query<{ value: string }>(sql, [ - params.key, - params.agentId, - ]); - return rows[0]?.value ?? undefined; - } catch (error) { - elizaLogger.error("Error fetching cache", { - error: - error instanceof Error ? error.message : String(error), - key: params.key, - agentId: params.agentId, - }); - return undefined; - } - }, "getCache"); - } - - async setCache(params: { - key: string; - agentId: UUID; - value: string; - }): Promise { - return ( - (await this.withTransaction(async (tx) => { - try { - await tx.query( - `INSERT INTO cache ("key", "agentId", "value", "createdAt") - VALUES ($1, $2, $3, CURRENT_TIMESTAMP) - ON CONFLICT ("key", "agentId") - DO UPDATE SET "value" = EXCLUDED.value, "createdAt" = CURRENT_TIMESTAMP`, - [params.key, params.agentId, params.value] - ); - return true; - } catch (error) { - await tx.rollback(); - elizaLogger.error("Error setting cache", { - error: - error instanceof Error - ? error.message - : String(error), - key: params.key, - agentId: params.agentId, - }); - return false; - } - }, "setCache")) ?? false - ); - } - - async deleteCache(params: { - key: string; - agentId: UUID; - }): Promise { - return ( - (await this.withTransaction(async (tx) => { - try { - await tx.query( - `DELETE FROM cache WHERE "key" = $1 AND "agentId" = $2`, - [params.key, params.agentId] - ); - return true; - } catch (error) { - tx.rollback(); - elizaLogger.error("Error deleting cache", { - error: - error instanceof Error - ? error.message - : String(error), - key: params.key, - agentId: params.agentId, - }); - return false; - } - }, "deleteCache")) ?? false - ); - } - - async getKnowledge(params: { - id?: UUID; - agentId: UUID; - limit?: number; - query?: string; - }): Promise { - return this.withDatabase(async () => { - try { - let sql = `SELECT * FROM knowledge WHERE ("agentId" = $1 OR "isShared" = true)`; - const queryParams: any[] = [params.agentId]; - let paramCount = 1; - - if (params.id) { - paramCount++; - sql += ` AND id = $${paramCount}`; - queryParams.push(params.id); - } - - if (params.limit) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - queryParams.push(params.limit); - } - - const { rows } = await this.query( - sql, - queryParams - ); - - return rows.map((row) => ({ - id: row.id, - agentId: row.agentId, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, - createdAt: row.createdAt - ? new Date(row.createdAt).getTime() - : undefined, - })); - } catch (error) { - elizaLogger.error("Error getting knowledge", { - error: - error instanceof Error ? error.message : String(error), - id: params.id, - agentId: params.agentId, - }); - throw new Error( - `Failed to getting knowledge: ${error instanceof Error ? error.message : String(error)}` - ); - } - }, "getKnowledge"); - } - - async searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - match_count: number; - searchText?: string; - }): Promise { - return this.withDatabase(async () => { - interface KnowledgeSearchRow { - id: UUID; - agentId: UUID; - content: string; - embedding: Buffer | null; - createdAt: string | number; - vector_score: number; - keyword_score: number; - combined_score: number; - } - try { - const cacheKey = `embedding_${params.agentId}_${params.searchText}`; - const cachedResult = await this.getCache({ - key: cacheKey, - agentId: params.agentId, - }); - - if (cachedResult) { - return JSON.parse(cachedResult); - } - - const vectorStr = `[${Array.from(params.embedding).join(",")}]`; - - const sql = ` - WITH vector_scores AS ( - SELECT id, - 1 - (embedding <-> $1::vector) as vector_score - FROM knowledge - WHERE ("agentId" IS NULL AND "isShared" = true) OR "agentId" = $2 - AND embedding IS NOT NULL - ), - keyword_matches AS ( - SELECT id, - CASE - WHEN content->>'text' ILIKE $3 THEN 3.0 - ELSE 1.0 - END * - CASE - WHEN (content->'metadata'->>'isChunk')::boolean = true THEN 1.5 - WHEN (content->'metadata'->>'isMain')::boolean = true THEN 1.2 - ELSE 1.0 - END as keyword_score - FROM knowledge - WHERE ("agentId" IS NULL AND "isShared" = true) OR "agentId" = $2 - ) - SELECT k.*, - v.vector_score, - kw.keyword_score, - (v.vector_score * kw.keyword_score) as combined_score - FROM knowledge k - JOIN vector_scores v ON k.id = v.id - LEFT JOIN keyword_matches kw ON k.id = kw.id - WHERE ("agentId" IS NULL AND "isShared" = true) OR k."agentId" = $2 - AND ( - v.vector_score >= $4 - OR (kw.keyword_score > 1.0 AND v.vector_score >= 0.3) - ) - ORDER BY combined_score DESC - LIMIT $5 - `; - - const { rows } = await this.query(sql, [ - vectorStr, - params.agentId, - `%${params.searchText || ""}%`, - params.match_threshold, - params.match_count, - ]); - - const results = rows.map((row) => ({ - id: row.id, - agentId: row.agentId, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, - createdAt: row.createdAt - ? new Date(row.createdAt).getTime() - : undefined, - similarity: row.combined_score, - })); - - await this.setCache({ - key: cacheKey, - agentId: params.agentId, - value: JSON.stringify(results), - }); - - return results; - } catch (error) { - elizaLogger.error("Error searching knowledge", { - error: - error instanceof Error ? error.message : String(error), - searchText: params.searchText, - agentId: params.agentId, - }); - throw new Error( - `Failed to search knowledge: ${error instanceof Error ? error.message : String(error)}` - ); - } - }, "searchKnowledge"); - } - - async createKnowledge(knowledge: RAGKnowledgeItem): Promise { - return this.withTransaction(async (tx) => { - try { - const sql = ` - INSERT INTO knowledge ( - id, "agentId", content, embedding, "createdAt", - "isMain", "originalId", "chunkIndex", "isShared" - ) VALUES ($1, $2, $3, $4, to_timestamp($5/1000.0), $6, $7, $8, $9) - ON CONFLICT (id) DO NOTHING - `; - - const metadata = knowledge.content.metadata || {}; - const vectorStr = knowledge.embedding - ? `[${Array.from(knowledge.embedding).join(",")}]` - : null; - - await tx.query(sql, [ - knowledge.id, - metadata.isShared ? null : knowledge.agentId, - knowledge.content, - vectorStr, - knowledge.createdAt || Date.now(), - metadata.isMain || false, - metadata.originalId || null, - metadata.chunkIndex || null, - metadata.isShared || false, - ]); - } catch (error) { - elizaLogger.error("Failed to create knowledge:", { - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "createKnowledge"); - } - - async removeKnowledge(id: UUID): Promise { - return await this.withTransaction(async (tx) => { - try { - await tx.query("DELETE FROM knowledge WHERE id = $1", [id]); - } catch (error) { - tx.rollback(); - elizaLogger.error("Error removing knowledge", { - error: - error instanceof Error ? error.message : String(error), - id, - }); - } - }, "removeKnowledge"); - } - - async clearKnowledge(agentId: UUID, shared?: boolean): Promise { - return await this.withTransaction(async (tx) => { - try { - const sql = shared - ? 'DELETE FROM knowledge WHERE ("agentId" = $1 OR "isShared" = true)' - : 'DELETE FROM knowledge WHERE "agentId" = $1'; - await tx.query(sql, [agentId]); - } catch (error) { - tx.rollback(); - elizaLogger.error("Error clearing knowledge", { - error: - error instanceof Error ? error.message : String(error), - agentId, - }); - } - }, "clearKnowledge"); - } -} - -export default PGLiteDatabaseAdapter; diff --git a/packages/adapter-pglite/tsconfig.json b/packages/adapter-pglite/tsconfig.json deleted file mode 100644 index 673cf100f4734..0000000000000 --- a/packages/adapter-pglite/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "strict": true - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/adapter-pglite/tsup.config.ts b/packages/adapter-pglite/tsup.config.ts deleted file mode 100644 index 964bdc8685428..0000000000000 --- a/packages/adapter-pglite/tsup.config.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "@anush008/tokenizers", - "uuid", - // Add other modules you want to externalize - ], -}); diff --git a/packages/adapter-postgres/.npmignore b/packages/adapter-postgres/.npmignore deleted file mode 100644 index eb4b3947ffa87..0000000000000 --- a/packages/adapter-postgres/.npmignore +++ /dev/null @@ -1,9 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts -!schema.sql -!seed.sql -!config.toml \ No newline at end of file diff --git a/packages/adapter-postgres/config.toml b/packages/adapter-postgres/config.toml deleted file mode 100644 index c1f016d4a40f8..0000000000000 --- a/packages/adapter-postgres/config.toml +++ /dev/null @@ -1,159 +0,0 @@ -# A string used to distinguish different Supabase projects on the same host. Defaults to the -# working directory name when running `supabase init`. -project_id = "eliza" - -[api] -enabled = true -# Port to use for the API URL. -port = 54321 -# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API -# endpoints. public and storage are always included. -schemas = ["public", "storage", "graphql_public"] -# Extra schemas to add to the search_path of every request. public is always included. -extra_search_path = ["public", "extensions"] -# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size -# for accidental or malicious requests. -max_rows = 1000 - -[db] -# Port to use for the local database URL. -port = 54322 -# Port used by db diff command to initialize the shadow database. -shadow_port = 54320 -# The database major version to use. This has to be the same as your remote database's. Run `SHOW -# server_version;` on the remote database to check. -major_version = 15 - -[db.pooler] -enabled = false -# Port to use for the local connection pooler. -port = 54329 -# Specifies when a server connection can be reused by other clients. -# Configure one of the supported pooler modes: `transaction`, `session`. -pool_mode = "transaction" -# How many server connections to allow per user/database pair. -default_pool_size = 20 -# Maximum number of client connections allowed. -max_client_conn = 100 - -[realtime] -enabled = true -# Bind realtime via either IPv4 or IPv6. (default: IPv6) -# ip_version = "IPv6" -# The maximum length in bytes of HTTP request headers. (default: 4096) -# max_header_length = 4096 - -[studio] -enabled = true -# Port to use for Supabase Studio. -port = 54323 -# External URL of the API server that frontend connects to. -api_url = "http://127.0.0.1" - -# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they -# are monitored, and you can view the emails that would have been sent from the web interface. -[inbucket] -enabled = true -# Port to use for the email testing server web interface. -port = 54324 -# Uncomment to expose additional ports for testing user applications that send emails. -# smtp_port = 54325 -# pop3_port = 54326 - -[storage] -enabled = true -# The maximum file size allowed (e.g. "5MB", "500KB"). -file_size_limit = "50MiB" - -[auth] -enabled = true -# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used -# in emails. -site_url = "http://127.0.0.1:3000" -# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. -additional_redirect_urls = ["https://127.0.0.1:3000"] -# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). -jwt_expiry = 3600 -# If disabled, the refresh token will never expire. -enable_refresh_token_rotation = true -# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. -# Requires enable_refresh_token_rotation = true. -refresh_token_reuse_interval = 10 -# Allow/disallow new user signups to your project. -enable_signup = true -# Allow/disallow testing manual linking of accounts -enable_manual_linking = false - -[auth.email] -# Allow/disallow new user signups via email to your project. -enable_signup = true -# If enabled, a user will be required to confirm any email change on both the old, and new email -# addresses. If disabled, only the new email is required to confirm. -double_confirm_changes = true -# If enabled, users need to confirm their email address before signing in. -enable_confirmations = false - -# Uncomment to customize email template -# [auth.email.template.invite] -# subject = "You have been invited" -# content_path = "./supabase/templates/invite.html" - -[auth.sms] -# Allow/disallow new user signups via SMS to your project. -enable_signup = true -# If enabled, users need to confirm their phone number before signing in. -enable_confirmations = false -# Template for sending OTP to users -template = "Your code is {{ .Code }} ." - -# Use pre-defined map of phone number to OTP for testing. -[auth.sms.test_otp] -# 4152127777 = "123456" - -# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. -[auth.hook.custom_access_token] -# enabled = true -# uri = "pg-functions:////" - - -# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. -[auth.sms.twilio] -enabled = false -account_sid = "" -message_service_sid = "" -# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: -auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" - -# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, -# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, -# `twitter`, `slack`, `spotify`, `workos`, `zoom`. -[auth.external.apple] -enabled = false -client_id = "" -# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: -secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" -# Overrides the default auth redirectUrl. -redirect_uri = "" -# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, -# or any other third-party OIDC providers. -url = "" - -[analytics] -enabled = false -port = 54327 -vector_port = 54328 -# Configure one of the supported backends: `postgres`, `bigquery`. -backend = "postgres" - -# Experimental features may be deprecated any time -[experimental] -# Configures Postgres storage engine to use OrioleDB (S3) -orioledb_version = "" -# Configures S3 bucket URL, eg. .s3-.amazonaws.com -s3_host = "env(S3_HOST)" -# Configures S3 bucket region, eg. us-east-1 -s3_region = "env(S3_REGION)" -# Configures AWS_ACCESS_KEY_ID for S3 bucket -s3_access_key = "env(S3_ACCESS_KEY)" -# Configures AWS_SECRET_ACCESS_KEY for S3 bucket -s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/packages/adapter-postgres/migrations/20240318103238_remote_schema.sql b/packages/adapter-postgres/migrations/20240318103238_remote_schema.sql deleted file mode 100644 index 30b0854ce3d2e..0000000000000 --- a/packages/adapter-postgres/migrations/20240318103238_remote_schema.sql +++ /dev/null @@ -1,819 +0,0 @@ - -SET statement_timeout = 0; -SET lock_timeout = 0; -SET idle_in_transaction_session_timeout = 0; -SET client_encoding = 'UTF8'; -SET standard_conforming_strings = on; -SELECT pg_catalog.set_config('search_path', '', false); -SET check_function_bodies = false; -SET xmloption = content; -SET client_min_messages = warning; -SET row_security = off; - -CREATE SCHEMA IF NOT EXISTS "public"; - -ALTER SCHEMA "public" OWNER TO "pg_database_owner"; - -DO $$ -BEGIN - IF NOT EXISTS ( - SELECT 1 - FROM pg_extension - WHERE extname = 'vector' - ) THEN - CREATE EXTENSION vector IF NOT EXISTS - SCHEMA extensions; - END IF; -END $$; - -DO $$ -BEGIN - IF NOT EXISTS ( - SELECT 1 - FROM pg_extension - WHERE extname = 'fuzzystrmatch' - ) THEN - CREATE EXTENSION fuzzystrmatch IF NOT EXISTS - SCHEMA extensions; - END IF; -END $$; - -CREATE TABLE IF NOT EXISTS "public"."secrets" ( - "key" "text" PRIMARY KEY, - "value" "text" NOT NULL -); - -ALTER TABLE "public"."secrets" OWNER TO "postgres"; - -CREATE TABLE "public"."user_data" ( - owner_id INT, - target_id INT, - data JSONB, - PRIMARY KEY (owner_id, target_id), - FOREIGN KEY (owner_id) REFERENCES accounts(id), - FOREIGN KEY (target_id) REFERENCES accounts(id) -); - -ALTER TABLE "public"."user_data" OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."after_account_created"() RETURNS "trigger" - LANGUAGE "plpgsql" SECURITY DEFINER - SET "search_path" TO 'extensions', 'public', 'pg_temp' - AS $$ -DECLARE - response RECORD; -- Define response with the expected return type - newuser_url TEXT; - token TEXT; -BEGIN - -- Retrieve the newuser URL and token from the secrets table - SELECT value INTO newuser_url FROM secrets WHERE key = 'newuser_url'; - SELECT value INTO token FROM secrets WHERE key = 'token'; - - -- Ensure newuser_url and token are both defined and not empty - IF newuser_url IS NOT NULL AND newuser_url <> '' AND token IS NOT NULL AND token <> '' THEN - -- Make the HTTP POST request to the endpoint - SELECT * INTO response FROM http_post( - newuser_url, - jsonb_build_object( - 'token', token, - 'userId', NEW.id::text - ) - ); - END IF; - - RETURN NEW; -END; -$$; - -ALTER FUNCTION "public"."after_account_created"() OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."check_similarity_and_insert"("query_table_name" "text", "query_userId" "uuid", "query_content" "jsonb", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "similarity_threshold" double precision, "query_createdAt" "timestamp with time zone") -RETURNS "void" -LANGUAGE "plpgsql" -AS $$ -DECLARE - similar_found BOOLEAN := FALSE; - select_query TEXT; - insert_query TEXT; -BEGIN - -- Only perform the similarity check if query_embedding is not NULL - IF query_embedding IS NOT NULL THEN - -- Build a dynamic query to check for existing similar embeddings using cosine distance - select_query := format( - 'SELECT EXISTS (' || - 'SELECT 1 ' || - 'FROM memories ' || - 'WHERE userId = %L ' || - 'AND roomId = %L ' || - 'AND type = %L ' || -- Filter by the 'type' field using query_table_name - 'AND embedding <=> %L < %L ' || - 'LIMIT 1' || - ')', - query_userId, - query_roomId, - query_table_name, -- Use query_table_name to filter by 'type' - query_embedding, - similarity_threshold - ); - - -- Execute the query to check for similarity - EXECUTE select_query INTO similar_found; - END IF; - - -- Prepare the insert query with 'unique' field set based on the presence of similar records or NULL query_embedding - insert_query := format( - 'INSERT INTO memories (userId, content, roomId, type, embedding, "unique", createdAt) ' || -- Insert into the 'memories' table - 'VALUES (%L, %L, %L, %L, %L, %L, %L)', - query_userId, - query_content, - query_roomId, - query_table_name, -- Use query_table_name as the 'type' value - query_embedding, - NOT similar_found OR query_embedding IS NULL -- Set 'unique' to true if no similar record is found or query_embedding is NULL - ); - - -- Execute the insert query - EXECUTE insert_query; -END; -$$; - -ALTER FUNCTION "public"."check_similarity_and_insert"("query_table_name" "text", "query_userId" "uuid", "query_content" "jsonb", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "similarity_threshold" double precision) OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean DEFAULT false) RETURNS bigint - LANGUAGE "plpgsql" - AS $$ -DECLARE - query TEXT; - total BIGINT; -BEGIN - -- Initialize the base query - query := format('SELECT COUNT(*) FROM memories WHERE type = %L', query_table_name); - - -- Add condition for roomId if not null, ensuring proper spacing - IF query_roomId IS NOT NULL THEN - query := query || format(' AND roomId = %L', query_roomId); - END IF; - - -- Add condition for unique if TRUE, ensuring proper spacing - IF query_unique THEN - query := query || ' AND "unique" = TRUE'; -- Use double quotes if "unique" is a reserved keyword or potentially problematic - END IF; - - -- Debug: Output the constructed query - RAISE NOTICE 'Executing query: %', query; - - -- Execute the constructed query - EXECUTE query INTO total; - RETURN total; -END; -$$; - - -ALTER FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."create_room"(roomId uuid) - RETURNS TABLE(id uuid) - LANGUAGE plpgsql -AS $function$ -BEGIN - -- Check if the room already exists - IF EXISTS (SELECT 1 FROM rooms WHERE rooms.id = roomId) THEN - RETURN QUERY SELECT rooms.id FROM rooms WHERE rooms.id = roomId; - ELSE - -- Create a new room with the provided roomId - RETURN QUERY INSERT INTO rooms (id) VALUES (roomId) RETURNING rooms.id; - END IF; -END; -$function$ - -ALTER FUNCTION "public"."create_room"() OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."create_friendship_with_host_agent"() RETURNS "trigger" - LANGUAGE "plpgsql" - AS $$ -DECLARE - host_agent_id UUID := '00000000-0000-0000-0000-000000000000'; - new_roomId UUID; -BEGIN - -- Create a new room for the direct message between the new user and the host agent - INSERT INTO rooms DEFAULT VALUES - RETURNING id INTO new_roomId; - - -- Create a new friendship between the new user and the host agent - INSERT INTO relationships (userA, userB, userId, status) - VALUES (NEW.id, host_agent_id, host_agent_id, 'FRIENDS'); - - -- Add both users as participants of the new room - INSERT INTO participants (userId, roomId) - VALUES (NEW.id, new_roomId), (host_agent_id, new_roomId); - - RETURN NEW; -END; -$$; - -ALTER FUNCTION "public"."create_friendship_with_host_agent"() OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."fn_notify_agents"() RETURNS "trigger" - LANGUAGE "plpgsql" - AS $$ -DECLARE - participant RECORD; - agent_flag BOOLEAN; - response RECORD; - payload TEXT; - message_url TEXT; - token TEXT; -BEGIN - -- Retrieve the message URL and token from the secrets table - SELECT value INTO message_url FROM secrets WHERE key = 'message_url'; - SELECT value INTO token FROM secrets WHERE key = 'token'; - - -- Iterate over the participants of the room - FOR participant IN ( - SELECT p.userId - FROM participants p - WHERE p.roomId = NEW.roomId - ) - LOOP - -- Check if the participant is an agent - SELECT is_agent INTO agent_flag FROM accounts WHERE id = participant.userId; - - -- Add a condition to ensure the sender is not the agent - IF agent_flag AND NEW.userId <> participant.userId THEN - -- Construct the payload JSON object and explicitly cast to TEXT - payload := jsonb_build_object( - 'token', token, - 'senderId', NEW.userId::text, - 'content', NEW.content, - 'roomId', NEW.roomId::text - )::text; - - -- Make the HTTP POST request to the Cloudflare worker endpoint - SELECT * INTO response FROM http_post( - message_url, - payload, - 'application/json' - ); - END IF; - END LOOP; - - RETURN NEW; -END; -$$; - - - -ALTER FUNCTION "public"."fn_notify_agents"() OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) -RETURNS TABLE("embedding" "extensions"."vector", "levenshtein_score" integer) -LANGUAGE "plpgsql" -AS $$ -DECLARE - QUERY TEXT; -BEGIN - -- Check the length of query_input - IF LENGTH(query_input) > 255 THEN - -- For inputs longer than 255 characters, use exact match only - QUERY := format(' - SELECT - embedding - FROM - memories - WHERE - type = $1 AND - (content->>''%s'')::TEXT = $2 - LIMIT - $3 - ', query_field_name); - -- Execute the query with adjusted parameters for exact match - RETURN QUERY EXECUTE QUERY USING query_table_name, query_input, query_match_count; - ELSE - -- For inputs of 255 characters or less, use Levenshtein distance - QUERY := format(' - SELECT - embedding, - levenshtein($2, (content->>''%s'')::TEXT) AS levenshtein_score - FROM - memories - WHERE - type = $1 AND - levenshtein($2, (content->>''%s'')::TEXT) <= $3 - ORDER BY - levenshtein_score - LIMIT - $4 - ', query_field_name, query_field_name); - -- Execute the query with original parameters for Levenshtein distance - RETURN QUERY EXECUTE QUERY USING query_table_name, query_input, query_threshold, query_match_count; - END IF; -END; -$$; - -ALTER FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) OWNER TO "postgres"; - -SET default_tablespace = ''; - -SET default_table_access_method = "heap"; - -CREATE TABLE IF NOT EXISTS "public"."goals" ( - "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, - "createdAt" timestamp with time zone DEFAULT "now"() NOT NULL, - "userId" "uuid", - "roomId" "uuid", - "status" "text", - "name" "text", - "objectives" "jsonb"[] DEFAULT '{}'::"jsonb"[] NOT NULL -); - -ALTER TABLE "public"."goals" OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid" DEFAULT NULL::"uuid", "only_in_progress" boolean DEFAULT true, "row_count" integer DEFAULT 5) RETURNS SETOF "public"."goals" - LANGUAGE "plpgsql" - AS $$ -BEGIN - RETURN QUERY - SELECT * FROM goals - WHERE - (query_userId IS NULL OR userId = query_userId) - AND (roomId = query_roomId) - AND (NOT only_in_progress OR status = 'IN_PROGRESS') - LIMIT row_count; -END; -$$; - -ALTER FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) OWNER TO "postgres"; - -CREATE TABLE IF NOT EXISTS "public"."relationships" ( - "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL, - "userA" "uuid", - "userB" "uuid", - "status" "text", - "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, - "userId" "uuid" NOT NULL -); - -ALTER TABLE "public"."relationships" OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") RETURNS SETOF "public"."relationships" - LANGUAGE "plpgsql" STABLE - AS $$ -BEGIN - RETURN QUERY - SELECT * - FROM relationships - WHERE (userA = usera AND userB = userb) - OR (userA = userb AND userB = usera); -END; -$$; - -ALTER FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") RETURNS "void" - LANGUAGE "plpgsql" - AS $_$DECLARE - dynamic_query TEXT; -BEGIN - dynamic_query := format('DELETE FROM memories WHERE roomId = $1 AND type = $2'); - EXECUTE dynamic_query USING query_roomId, query_table_name; -END; -$_$; - - -ALTER FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") OWNER TO "postgres"; - -CREATE OR REPLACE FUNCTION "public"."search_memories"("query_table_name" "text", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "query_match_threshold" double precision, "query_match_count" integer, "query_unique" boolean) -RETURNS TABLE("id" "uuid", "userId" "uuid", "content" "jsonb", "createdAt" timestamp with time zone, "similarity" double precision, "roomId" "uuid", "embedding" "extensions"."vector") -LANGUAGE "plpgsql" -AS $$ -DECLARE - query TEXT; -BEGIN - query := format($fmt$ - SELECT - id, - userId, - content, - createdAt, - 1 - (embedding <=> %L) AS similarity, -- Use '<=>' for cosine distance - roomId, - embedding - FROM memories - WHERE (1 - (embedding <=> %L) > %L) - AND type = %L - %s -- Additional condition for 'unique' column - %s -- Additional condition for 'roomId' - ORDER BY similarity DESC - LIMIT %L - $fmt$, - query_embedding, - query_embedding, - query_match_threshold, - query_table_name, - CASE WHEN query_unique THEN ' AND "unique" IS TRUE' ELSE '' END, - CASE WHEN query_roomId IS NOT NULL THEN format(' AND roomId = %L', query_roomId) ELSE '' END, - query_match_count - ); - - RETURN QUERY EXECUTE query; -END; -$$; - - - -ALTER FUNCTION "public"."search_memories"("query_table_name" "text", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "query_match_threshold" double precision, "query_match_count" integer, "query_unique" boolean) OWNER TO "postgres"; - -CREATE TABLE IF NOT EXISTS "public"."accounts" ( - "id" "uuid" DEFAULT "auth"."uid"() NOT NULL, - "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL, - "name" "text", - "username" "text", - "email" "text" NOT NULL, - "avatarUrl" "text", - "details" "jsonb" DEFAULT '{}'::"jsonb", - "is_agent" boolean DEFAULT false NOT NULL, - "location" "text", - "profile_line" "text", - "signed_tos" boolean DEFAULT false NOT NULL -); - -ALTER TABLE "public"."accounts" OWNER TO "postgres"; - -CREATE TABLE IF NOT EXISTS "public"."logs" ( - "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, - "createdAt" timestamp with time zone DEFAULT "now"() NOT NULL, - "userId" "uuid" NOT NULL, - "body" "jsonb" NOT NULL, - "type" "text" NOT NULL, - "roomId" "uuid" -); - -ALTER TABLE "public"."logs" OWNER TO "postgres"; - -CREATE TABLE IF NOT EXISTS "public"."memories" ( - "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, - "createdAt" timestamp with time zone DEFAULT "now"() NOT NULL, - "content" "jsonb" NOT NULL, - "embedding" "extensions"."vector" NOT NULL, - "userId" "uuid", - "roomId" "uuid", - "unique" boolean DEFAULT true NOT NULL, - "type" "text" NOT NULL -); - -ALTER TABLE "public"."memories" OWNER TO "postgres"; - -CREATE TABLE IF NOT EXISTS "public"."participants" ( - "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL, - "userId" "uuid", - "roomId" "uuid", - "userState" "text" DEFAULT NULL, -- Add userState field to track MUTED, NULL, or FOLLOWED - "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, - "last_message_read" "uuid", - FOREIGN KEY ("userId") REFERENCES "accounts"("id"), - FOREIGN KEY ("roomId") REFERENCES "rooms"("id") -); - - -ALTER TABLE "public"."participants" OWNER TO "postgres"; - - -CREATE OR REPLACE FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") -RETURNS "text" -LANGUAGE "plpgsql" -AS $$ -BEGIN - RETURN ( - SELECT userState - FROM participants - WHERE roomId = $1 AND userId = $2 - ); -END; -$$; - -CREATE OR REPLACE FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") -RETURNS "void" -LANGUAGE "plpgsql" -AS $$ -BEGIN - UPDATE participants - SET userState = $3 - WHERE roomId = $1 AND userId = $2; -END; -$$; - -CREATE TABLE IF NOT EXISTS "public"."rooms" ( - "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, - "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL -); - -CREATE OR REPLACE FUNCTION "public"."search_knowledge"( - "query_embedding" "extensions"."vector", - "query_agent_id" "uuid", - "match_threshold" double precision, - "match_count" integer, - "search_text" text -) RETURNS TABLE ( - "id" "uuid", - "agentId" "uuid", - "content" "jsonb", - "embedding" "extensions"."vector", - "createdAt" timestamp with time zone, - "similarity" double precision -) LANGUAGE "plpgsql" AS $$ -BEGIN - RETURN QUERY - WITH vector_matches AS ( - SELECT id, - 1 - (embedding <=> query_embedding) as vector_score - FROM knowledge - WHERE (agentId IS NULL AND isShared = true) OR agentId = query_agent_id - AND embedding IS NOT NULL - ), - keyword_matches AS ( - SELECT id, - CASE - WHEN content->>'text' ILIKE '%' || search_text || '%' THEN 3.0 - ELSE 1.0 - END * - CASE - WHEN content->'metadata'->>'isChunk' = 'true' THEN 1.5 - WHEN content->'metadata'->>'isMain' = 'true' THEN 1.2 - ELSE 1.0 - END as keyword_score - FROM knowledge - WHERE (agentId IS NULL AND isShared = true) OR agentId = query_agent_id - ) - SELECT - k.id, - k."agentId", - k.content, - k.embedding, - k."createdAt", - (v.vector_score * kw.keyword_score) as similarity - FROM knowledge k - JOIN vector_matches v ON k.id = v.id - LEFT JOIN keyword_matches kw ON k.id = kw.id - WHERE (k.agentId IS NULL AND k.isShared = true) OR k.agentId = query_agent_id - AND ( - v.vector_score >= match_threshold - OR (kw.keyword_score > 1.0 AND v.vector_score >= 0.3) - ) - ORDER BY similarity DESC - LIMIT match_count; -END; -$$; - -ALTER TABLE "public"."rooms" OWNER TO "postgres"; - -ALTER TABLE ONLY "public"."relationships" - ADD CONSTRAINT "friendships_id_key" UNIQUE ("id"); - -ALTER TABLE ONLY "public"."relationships" - ADD CONSTRAINT "friendships_pkey" PRIMARY KEY ("id"); - -ALTER TABLE ONLY "public"."goals" - ADD CONSTRAINT "goals_pkey" PRIMARY KEY ("id"); - -ALTER TABLE ONLY "public"."logs" - ADD CONSTRAINT "logs_pkey" PRIMARY KEY ("id"); - -ALTER TABLE ONLY "public"."participants" - ADD CONSTRAINT "participants_id_key" UNIQUE ("id"); - -ALTER TABLE ONLY "public"."participants" - ADD CONSTRAINT "participants_pkey" PRIMARY KEY ("id"); - -ALTER TABLE ONLY "public"."memories" - ADD CONSTRAINT "memories_pkey" PRIMARY KEY ("id"); - -ALTER TABLE ONLY "public"."rooms" - ADD CONSTRAINT "rooms_pkey" PRIMARY KEY ("id"); - -ALTER TABLE ONLY "public"."accounts" - ADD CONSTRAINT "users_email_key" UNIQUE ("email"); - -ALTER TABLE ONLY "public"."accounts" - ADD CONSTRAINT "users_pkey" PRIMARY KEY ("id"); - -CREATE OR REPLACE TRIGGER "trigger_after_account_created" AFTER INSERT ON "public"."accounts" FOR EACH ROW EXECUTE FUNCTION "public"."after_account_created"(); - -CREATE OR REPLACE TRIGGER "trigger_create_friendship_with_host_agent" AFTER INSERT ON "public"."accounts" FOR EACH ROW EXECUTE FUNCTION "public"."create_friendship_with_host_agent"(); - -ALTER TABLE ONLY "public"."participants" - ADD CONSTRAINT "participants_roomId_fkey" FOREIGN KEY ("roomId") REFERENCES "public"."rooms"("id"); - -ALTER TABLE ONLY "public"."participants" - ADD CONSTRAINT "participants_userId_fkey" FOREIGN KEY ("userId") REFERENCES "public"."accounts"("id"); - -ALTER TABLE ONLY "public"."memories" - ADD CONSTRAINT "memories_roomId_fkey" FOREIGN KEY ("roomId") REFERENCES "public"."rooms"("id"); - -ALTER TABLE ONLY "public"."memories" - ADD CONSTRAINT "memories_userId_fkey" FOREIGN KEY ("userId") REFERENCES "public"."accounts"("id"); - -ALTER TABLE ONLY "public"."relationships" - ADD CONSTRAINT "relationships_userA_fkey" FOREIGN KEY ("userA") REFERENCES "public"."accounts"("id"); - -ALTER TABLE ONLY "public"."relationships" - ADD CONSTRAINT "relationships_userB_fkey" FOREIGN KEY ("userB") REFERENCES "public"."accounts"("id"); - -ALTER TABLE ONLY "public"."relationships" - ADD CONSTRAINT "relationships_userId_fkey" FOREIGN KEY ("userId") REFERENCES "public"."accounts"("id"); - -ALTER TABLE ONLY "public"."knowledge" - ADD CONSTRAINT "knowledge_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "public"."accounts"("id") ON DELETE CASCADE; - -CREATE POLICY "Can select and update all data" ON "public"."accounts" USING (("auth"."uid"() = "id")) WITH CHECK (("auth"."uid"() = "id")); - -CREATE POLICY "Enable delete for users based on userId" ON "public"."goals" FOR DELETE TO "authenticated" USING (("auth"."uid"() = "userId")); - -CREATE POLICY "Enable insert for authenticated users only" ON "public"."accounts" FOR INSERT TO "authenticated", "anon", "service_role", "supabase_replication_admin", "supabase_read_only_user" WITH CHECK (true); - -CREATE POLICY "Enable insert for authenticated users only" ON "public"."goals" FOR INSERT TO "authenticated" WITH CHECK (true); - -CREATE POLICY "Enable insert for authenticated users only" ON "public"."logs" FOR INSERT TO "authenticated", "anon" WITH CHECK (true); - -CREATE POLICY "Enable insert for authenticated users only" ON "public"."participants" FOR INSERT TO "authenticated" WITH CHECK (true); - -CREATE POLICY "Enable insert for authenticated users only" ON "public"."relationships" FOR INSERT TO "authenticated" WITH CHECK ((("auth"."uid"() = "userA") OR ("auth"."uid"() = "userB"))); - -CREATE POLICY "Enable insert for authenticated users only" ON "public"."rooms" FOR INSERT WITH CHECK (true); - -CREATE POLICY "Enable insert for self id" ON "public"."participants" USING (("auth"."uid"() = "userId")) WITH CHECK (("auth"."uid"() = "userId")); - -CREATE POLICY "Enable read access for all users" ON "public"."accounts" FOR SELECT USING (true); - -CREATE POLICY "Enable read access for all users" ON "public"."goals" FOR SELECT USING (true); - -CREATE POLICY "Enable read access for all users" ON "public"."relationships" FOR SELECT TO "authenticated" USING (true); - -CREATE POLICY "Enable read access for all users" ON "public"."rooms" FOR SELECT TO "authenticated" USING (true); - -CREATE POLICY "Enable read access for own rooms" ON "public"."participants" FOR SELECT TO "authenticated" USING (("auth"."uid"() = "userId")); - -CREATE POLICY "Enable read access for user to their own relationships" ON "public"."relationships" FOR SELECT TO "authenticated" USING ((("auth"."uid"() = "userA") OR ("auth"."uid"() = "userB"))); - -CREATE POLICY "Enable update for users based on email" ON "public"."goals" FOR UPDATE TO "authenticated" USING (true) WITH CHECK (true); - -CREATE POLICY "Enable update for users of own id" ON "public"."rooms" FOR UPDATE USING (true) WITH CHECK (true); - -CREATE POLICY "Enable users to delete their own relationships/friendships" ON "public"."relationships" FOR DELETE TO "authenticated" USING ((("auth"."uid"() = "userA") OR ("auth"."uid"() = "userB"))); - -CREATE POLICY "Enable read access for all users" ON "public"."knowledge" - FOR SELECT USING (true); - -CREATE POLICY "Enable insert for authenticated users only" ON "public"."knowledge" - FOR INSERT TO "authenticated" WITH CHECK (true); - -CREATE POLICY "Enable update for authenticated users" ON "public"."knowledge" - FOR UPDATE TO "authenticated" USING (true) WITH CHECK (true); - -CREATE POLICY "Enable delete for users based on agentId" ON "public"."knowledge" - FOR DELETE TO "authenticated" USING (("auth"."uid"() = "agentId")); - -ALTER TABLE "public"."accounts" ENABLE ROW LEVEL SECURITY; - -ALTER TABLE "public"."goals" ENABLE ROW LEVEL SECURITY; - -ALTER TABLE "public"."logs" ENABLE ROW LEVEL SECURITY; - -ALTER TABLE "public"."memories" ENABLE ROW LEVEL SECURITY; - -ALTER TABLE "public"."participants" ENABLE ROW LEVEL SECURITY; - -ALTER TABLE "public"."relationships" ENABLE ROW LEVEL SECURITY; - -ALTER TABLE "public"."rooms" ENABLE ROW LEVEL SECURITY; - -ALTER TABLE "public"."knowledge" ENABLE ROW LEVEL SECURITY; - -CREATE POLICY "select_own_account" ON "public"."accounts" FOR SELECT USING (("auth"."uid"() = "id")); - -GRANT USAGE ON SCHEMA "public" TO "postgres"; -GRANT USAGE ON SCHEMA "public" TO "authenticated"; -GRANT USAGE ON SCHEMA "public" TO "service_role"; -GRANT USAGE ON SCHEMA "public" TO "supabase_admin"; -GRANT USAGE ON SCHEMA "public" TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."after_account_created"() TO "authenticated"; -GRANT ALL ON FUNCTION "public"."after_account_created"() TO "service_role"; -GRANT ALL ON FUNCTION "public"."after_account_created"() TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."after_account_created"() TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "authenticated"; -GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "service_role"; -GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "authenticated"; -GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "service_role"; -GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "authenticated"; -GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "service_role"; -GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "authenticated"; -GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "service_role"; -GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."goals" TO "authenticated"; -GRANT ALL ON TABLE "public"."goals" TO "service_role"; -GRANT ALL ON TABLE "public"."goals" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."goals" TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "authenticated"; -GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "service_role"; -GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."relationships" TO "authenticated"; -GRANT ALL ON TABLE "public"."relationships" TO "service_role"; -GRANT ALL ON TABLE "public"."relationships" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."relationships" TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "authenticated"; -GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "service_role"; -GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "authenticated"; -GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "service_role"; -GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."accounts" TO "authenticated"; -GRANT ALL ON TABLE "public"."accounts" TO "service_role"; -GRANT SELECT,INSERT ON TABLE "public"."accounts" TO "authenticator"; -GRANT ALL ON TABLE "public"."accounts" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."accounts" TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."logs" TO "authenticated"; -GRANT ALL ON TABLE "public"."logs" TO "service_role"; -GRANT ALL ON TABLE "public"."logs" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."logs" TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."memories" TO "authenticated"; -GRANT ALL ON TABLE "public"."memories" TO "service_role"; -GRANT ALL ON TABLE "public"."memories" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."memories" TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."participants" TO "authenticated"; -GRANT ALL ON TABLE "public"."participants" TO "service_role"; -GRANT ALL ON TABLE "public"."participants" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."participants" TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."rooms" TO "authenticated"; -GRANT ALL ON TABLE "public"."rooms" TO "service_role"; -GRANT ALL ON TABLE "public"."rooms" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."rooms" TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."secrets" TO "authenticated"; -GRANT ALL ON TABLE "public"."secrets" TO "service_role"; -GRANT ALL ON TABLE "public"."secrets" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."secrets" TO "supabase_auth_admin"; - -GRANT ALL ON TABLE "public"."knowledge" TO "authenticated"; -GRANT ALL ON TABLE "public"."knowledge" TO "service_role"; -GRANT ALL ON TABLE "public"."knowledge" TO "supabase_admin"; -GRANT ALL ON TABLE "public"."knowledge" TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "authenticated"; -GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "service_role"; -GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "authenticated"; -GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "service_role"; -GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "supabase_auth_admin"; - - -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "postgres"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "authenticated"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "service_role"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "supabase_admin"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "supabase_auth_admin"; - -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "postgres"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "authenticated"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "service_role"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "supabase_admin"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "supabase_auth_admin"; - -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "postgres"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "authenticated"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "service_role"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "supabase_admin"; -ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "supabase_auth_admin"; - -GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "authenticated"; -GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "service_role"; -GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "supabase_admin"; -GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "supabase_auth_admin"; - -RESET ALL; \ No newline at end of file diff --git a/packages/adapter-postgres/package.json b/packages/adapter-postgres/package.json deleted file mode 100644 index 02fbaa7de5cca..0000000000000 --- a/packages/adapter-postgres/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "@elizaos/adapter-postgres", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist", - "schema.sql", - "seed.sql" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@types/pg": "8.11.10", - "pg": "8.13.1" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - } -} diff --git a/packages/adapter-postgres/schema.sql b/packages/adapter-postgres/schema.sql deleted file mode 100644 index 7a367318e1f51..0000000000000 --- a/packages/adapter-postgres/schema.sql +++ /dev/null @@ -1,168 +0,0 @@ --- Enable pgvector extension - --- -- Drop existing tables and extensions --- DROP EXTENSION IF EXISTS vector CASCADE; --- DROP TABLE IF EXISTS relationships CASCADE; --- DROP TABLE IF EXISTS participants CASCADE; --- DROP TABLE IF EXISTS logs CASCADE; --- DROP TABLE IF EXISTS goals CASCADE; --- DROP TABLE IF EXISTS memories CASCADE; --- DROP TABLE IF EXISTS rooms CASCADE; --- DROP TABLE IF EXISTS accounts CASCADE; --- DROP TABLE IF EXISTS knowledge CASCADE; - - -CREATE EXTENSION IF NOT EXISTS vector; -CREATE EXTENSION IF NOT EXISTS fuzzystrmatch; - --- Create a function to determine vector dimension -CREATE OR REPLACE FUNCTION get_embedding_dimension() -RETURNS INTEGER AS $$ -BEGIN - -- Check for OpenAI first - IF current_setting('app.use_openai_embedding', TRUE) = 'true' THEN - RETURN 1536; -- OpenAI dimension - -- Then check for Ollama - ELSIF current_setting('app.use_ollama_embedding', TRUE) = 'true' THEN - RETURN 1024; -- Ollama mxbai-embed-large dimension - -- Then check for GAIANET - ELSIF current_setting('app.use_gaianet_embedding', TRUE) = 'true' THEN - RETURN 768; -- Gaianet nomic-embed dimension - ELSE - RETURN 384; -- BGE/Other embedding dimension - END IF; -END; -$$ LANGUAGE plpgsql; - -BEGIN; - -CREATE TABLE IF NOT EXISTS accounts ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "name" TEXT, - "username" TEXT, - "email" TEXT NOT NULL, - "avatarUrl" TEXT, - "details" JSONB DEFAULT '{}'::jsonb -); - -CREATE TABLE IF NOT EXISTS rooms ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP -); - -DO $$ -DECLARE - vector_dim INTEGER; -BEGIN - vector_dim := get_embedding_dimension(); - - EXECUTE format(' - CREATE TABLE IF NOT EXISTS memories ( - "id" UUID PRIMARY KEY, - "type" TEXT NOT NULL, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "content" JSONB NOT NULL, - "embedding" vector(%s), - "userId" UUID REFERENCES accounts("id"), - "agentId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "unique" BOOLEAN DEFAULT true NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE - )', vector_dim); -END $$; - -CREATE TABLE IF NOT EXISTS goals ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID REFERENCES accounts("id"), - "name" TEXT, - "status" TEXT, - "description" TEXT, - "roomId" UUID REFERENCES rooms("id"), - "objectives" JSONB DEFAULT '[]'::jsonb NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS logs ( - "id" UUID PRIMARY KEY DEFAULT gen_random_uuid(), - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID NOT NULL REFERENCES accounts("id"), - "body" JSONB NOT NULL, - "type" TEXT NOT NULL, - "roomId" UUID NOT NULL REFERENCES rooms("id"), - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS participants ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "userState" TEXT, - "last_message_read" TEXT, - UNIQUE("userId", "roomId"), - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS relationships ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userA" UUID NOT NULL REFERENCES accounts("id"), - "userB" UUID NOT NULL REFERENCES accounts("id"), - "status" TEXT, - "userId" UUID NOT NULL REFERENCES accounts("id"), - CONSTRAINT fk_user_a FOREIGN KEY ("userA") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_user_b FOREIGN KEY ("userB") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE IF NOT EXISTS cache ( - "key" TEXT NOT NULL, - "agentId" TEXT NOT NULL, - "value" JSONB DEFAULT '{}'::jsonb, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "expiresAt" TIMESTAMP, - PRIMARY KEY ("key", "agentId") -); - -DO $$ -DECLARE - vector_dim INTEGER; -BEGIN - vector_dim := get_embedding_dimension(); - - EXECUTE format(' - CREATE TABLE IF NOT EXISTS knowledge ( - "id" UUID PRIMARY KEY, - "agentId" UUID REFERENCES accounts("id"), - "content" JSONB NOT NULL, - "embedding" vector(%s), - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "isMain" BOOLEAN DEFAULT FALSE, - "originalId" UUID REFERENCES knowledge("id"), - "chunkIndex" INTEGER, - "isShared" BOOLEAN DEFAULT FALSE, - CHECK(("isShared" = true AND "agentId" IS NULL) OR ("isShared" = false AND "agentId" IS NOT NULL)) - )', vector_dim); -END $$; - --- Indexes -CREATE INDEX IF NOT EXISTS idx_memories_embedding ON memories USING hnsw ("embedding" vector_cosine_ops); -CREATE INDEX IF NOT EXISTS idx_memories_type_room ON memories("type", "roomId"); -CREATE INDEX IF NOT EXISTS idx_participants_user ON participants("userId"); -CREATE INDEX IF NOT EXISTS idx_participants_room ON participants("roomId"); -CREATE INDEX IF NOT EXISTS idx_relationships_users ON relationships("userA", "userB"); -CREATE INDEX IF NOT EXISTS idx_knowledge_agent ON knowledge("agentId"); -CREATE INDEX IF NOT EXISTS idx_knowledge_agent_main ON knowledge("agentId", "isMain"); -CREATE INDEX IF NOT EXISTS idx_knowledge_original ON knowledge("originalId"); -CREATE INDEX IF NOT EXISTS idx_knowledge_created ON knowledge("agentId", "createdAt"); -CREATE INDEX IF NOT EXISTS idx_knowledge_shared ON knowledge("isShared"); -CREATE INDEX IF NOT EXISTS idx_knowledge_embedding ON knowledge USING ivfflat (embedding vector_cosine_ops); - -COMMIT; diff --git a/packages/adapter-postgres/seed.sql b/packages/adapter-postgres/seed.sql deleted file mode 100644 index 1cf1c088fc360..0000000000000 --- a/packages/adapter-postgres/seed.sql +++ /dev/null @@ -1,9 +0,0 @@ - -INSERT INTO public.accounts (id, name, email, "avatarUrl", details) -VALUES ('00000000-0000-0000-0000-000000000000', 'Default Agent', 'default@agent.com', '', '{}'::jsonb); - -INSERT INTO public.rooms (id) -VALUES ('00000000-0000-0000-0000-000000000000'); - -INSERT INTO public.participants (id, "userId", "roomId") -VALUES ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000'); diff --git a/packages/adapter-postgres/src/__tests__/README.md b/packages/adapter-postgres/src/__tests__/README.md deleted file mode 100644 index 98896ff4f2b84..0000000000000 --- a/packages/adapter-postgres/src/__tests__/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# PostgreSQL Adapter Tests - -This directory contains tests for the PostgreSQL adapter with vector extension support. - -## Prerequisites - -- Docker installed and running -- Node.js and pnpm installed -- Bash shell (for Unix/Mac) or Git Bash (for Windows) - -## Test Environment - -The tests run against a PostgreSQL instance with the `pgvector` extension enabled. We use Docker to ensure a consistent test environment: - -- PostgreSQL 16 with pgvector extension -- Test database: `eliza_test` -- Port: 5433 (to avoid conflicts with local PostgreSQL) -- Vector dimensions: 1536 (OpenAI compatible) - -## Running Tests - -The easiest way to run tests is using the provided script: - -```bash -./run_tests.sh -``` - -This script will: -1. Start the PostgreSQL container with vector extension -2. Wait for the database to be ready -3. Run the test suite - -## Manual Setup - -If you prefer to run tests manually: - -1. Start the test database: - ```bash - docker compose -f docker-compose.test.yml up -d - ``` - -2. Wait for the database to be ready (about 30 seconds) - -3. Run tests: - ```bash - pnpm vitest vector-extension.test.ts - ``` - -## Test Structure - -- `vector-extension.test.ts`: Main test suite for vector operations -- `docker-compose.test.yml`: Docker configuration for test database -- `run_tests.sh`: Helper script to run tests - -## Troubleshooting - -1. If tests fail with connection errors: - - Check if Docker is running - - Verify port 5433 is available - - Wait a bit longer for database initialization - -2. If vector operations fail: - - Check if pgvector extension is properly loaded - - Verify schema initialization - - Check vector dimensions match (1536 for OpenAI) - -## Notes - -- Tests automatically clean up after themselves -- Each test run starts with a fresh database -- Vector extension is initialized as part of the schema setup \ No newline at end of file diff --git a/packages/adapter-postgres/src/__tests__/docker-compose.test.yml b/packages/adapter-postgres/src/__tests__/docker-compose.test.yml deleted file mode 100644 index 7a589ec1926b1..0000000000000 --- a/packages/adapter-postgres/src/__tests__/docker-compose.test.yml +++ /dev/null @@ -1,16 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json -version: '3.8' -services: - postgres-test: - image: pgvector/pgvector:pg16 - environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: eliza_test - ports: - - "5433:5432" - healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] - interval: 5s - timeout: 5s - retries: 5 \ No newline at end of file diff --git a/packages/adapter-postgres/src/__tests__/run_tests.sh b/packages/adapter-postgres/src/__tests__/run_tests.sh deleted file mode 100755 index f9acc13c13a9b..0000000000000 --- a/packages/adapter-postgres/src/__tests__/run_tests.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash - -# Color output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -NC='\033[0m' # No Color - -# Get script directory -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -SCHEMA_PATH="$SCRIPT_DIR/../../schema.sql" - -echo -e "${YELLOW}Starting PostgreSQL test environment...${NC}" - -# Determine Docker Compose command -if [[ "$OSTYPE" == "darwin"* ]]; then - DOCKER_COMPOSE_CMD="docker compose" -else - DOCKER_COMPOSE_CMD="docker-compose" -fi - -# Stop any existing containers -echo -e "${YELLOW}Cleaning up existing containers...${NC}" -$DOCKER_COMPOSE_CMD -f docker-compose.test.yml down - -# Start fresh container -echo -e "${YELLOW}Starting PostgreSQL container...${NC}" -$DOCKER_COMPOSE_CMD -f docker-compose.test.yml up -d - -# Function to check if PostgreSQL is ready -check_postgres() { - $DOCKER_COMPOSE_CMD -f docker-compose.test.yml exec -T postgres-test pg_isready -U postgres -} - -# Wait for PostgreSQL to be ready -echo -e "${YELLOW}Waiting for PostgreSQL to be ready...${NC}" -RETRIES=30 -until check_postgres || [ $RETRIES -eq 0 ]; do - echo -e "${YELLOW}Waiting for PostgreSQL to be ready... ($RETRIES attempts left)${NC}" - RETRIES=$((RETRIES-1)) - sleep 1 -done - -if [ $RETRIES -eq 0 ]; then - echo -e "${RED}Failed to connect to PostgreSQL${NC}" - $DOCKER_COMPOSE_CMD -f docker-compose.test.yml logs - exit 1 -fi - -echo -e "${GREEN}PostgreSQL is ready!${NC}" - -# Load schema -echo -e "${YELLOW}Loading database schema...${NC}" -if [ ! -f "$SCHEMA_PATH" ]; then - echo -e "${RED}Schema file not found at: $SCHEMA_PATH${NC}" - exit 1 -fi - -# Fix: Check exit code directly instead of using $? -if ! $DOCKER_COMPOSE_CMD -f docker-compose.test.yml exec -T postgres-test psql -U postgres -d eliza_test -f - < "$SCHEMA_PATH"; then - echo -e "${RED}Failed to load schema${NC}" - exit 1 -fi -echo -e "${GREEN}Schema loaded successfully!${NC}" - -# Run the tests -echo -e "${YELLOW}Running tests...${NC}" -if ! pnpm vitest vector-extension.test.ts; then - echo -e "${RED}Tests failed!${NC}" - $DOCKER_COMPOSE_CMD -f docker-compose.test.yml down - exit 1 -fi - -echo -e "${GREEN}Tests completed successfully!${NC}" - -# Clean up -echo -e "${YELLOW}Cleaning up test environment...${NC}" -$DOCKER_COMPOSE_CMD -f docker-compose.test.yml down \ No newline at end of file diff --git a/packages/adapter-postgres/src/__tests__/vector-extension.test.ts b/packages/adapter-postgres/src/__tests__/vector-extension.test.ts deleted file mode 100644 index a22c51c79f6fe..0000000000000 --- a/packages/adapter-postgres/src/__tests__/vector-extension.test.ts +++ /dev/null @@ -1,433 +0,0 @@ -import { PostgresDatabaseAdapter } from '../index'; -import pg from 'pg'; -import fs from 'fs'; -import path from 'path'; -import { describe, test, expect, beforeEach, afterEach, vi, beforeAll } from 'vitest'; -import { elizaLogger, type Memory, type Content } from '@elizaos/core'; - -// Increase test timeout -vi.setConfig({ testTimeout: 15000 }); - -// Mock the @elizaos/core module -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - error: vi.fn().mockImplementation(console.error), - info: vi.fn().mockImplementation(console.log), - success: vi.fn().mockImplementation(console.log), - debug: vi.fn().mockImplementation(console.log), - warn: vi.fn().mockImplementation(console.warn), - }, - getEmbeddingConfig: () => ({ - provider: 'OpenAI', - dimensions: 1536, - model: 'text-embedding-3-small' - }), - DatabaseAdapter: class { - protected circuitBreaker = { - execute: async (operation: () => Promise) => operation() - }; - protected async withCircuitBreaker(operation: () => Promise) { - return this.circuitBreaker.execute(operation); - } - }, - EmbeddingProvider: { - OpenAI: 'OpenAI', - Ollama: 'Ollama', - BGE: 'BGE' - } -})); - -// Helper function to parse vector string from PostgreSQL -const parseVectorString = (vectorStr: string): number[] => { - if (!vectorStr) return []; - // Remove brackets and split by comma - return vectorStr.replace(/[[\]]/g, '').split(',').map(Number); -}; - -describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { - let adapter: PostgresDatabaseAdapter; - let testClient: pg.PoolClient; - let testPool: pg.Pool; - - const initializeDatabase = async (client: pg.PoolClient) => { - elizaLogger.info('Initializing database with schema...'); - try { - // Set app settings for vector dimension - await client.query(` - ALTER DATABASE eliza_test SET app.use_openai_embedding = 'true'; - ALTER DATABASE eliza_test SET app.use_ollama_embedding = 'false'; - `); - - // Read and execute schema file - const schemaPath = path.resolve(__dirname, '../../schema.sql'); - const schema = fs.readFileSync(schemaPath, 'utf8'); - await client.query(schema); - - // Verify schema setup - const { rows: vectorExt } = await client.query(` - SELECT * FROM pg_extension WHERE extname = 'vector' - `); - elizaLogger.info('Vector extension status:', { isInstalled: vectorExt.length > 0 }); - - const { rows: dimension } = await client.query('SELECT get_embedding_dimension()'); - elizaLogger.info('Vector dimension:', { dimension: dimension[0].get_embedding_dimension }); - - // Verify search path - const { rows: searchPath } = await client.query('SHOW search_path'); - elizaLogger.info('Search path:', { searchPath: searchPath[0].search_path }); - - } catch (error) { - elizaLogger.error(`Database initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }; - - const cleanDatabase = async (client: pg.PoolClient) => { - elizaLogger.info('Starting database cleanup...'); - try { - await client.query('DROP TABLE IF EXISTS relationships CASCADE'); - await client.query('DROP TABLE IF EXISTS participants CASCADE'); - await client.query('DROP TABLE IF EXISTS logs CASCADE'); - await client.query('DROP TABLE IF EXISTS goals CASCADE'); - await client.query('DROP TABLE IF EXISTS memories CASCADE'); - await client.query('DROP TABLE IF EXISTS rooms CASCADE'); - await client.query('DROP TABLE IF EXISTS accounts CASCADE'); - await client.query('DROP TABLE IF EXISTS cache CASCADE'); - await client.query('DROP EXTENSION IF EXISTS vector CASCADE'); - await client.query('DROP SCHEMA IF EXISTS extensions CASCADE'); - elizaLogger.success('Database cleanup completed successfully'); - } catch (error) { - elizaLogger.error(`Database cleanup failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }; - - beforeAll(async () => { - elizaLogger.info('Setting up test database...'); - const setupPool = new pg.Pool({ - host: 'localhost', - port: 5433, - database: 'eliza_test', - user: 'postgres', - password: 'postgres' - }); - - const setupClient = await setupPool.connect(); - try { - await cleanDatabase(setupClient); - await initializeDatabase(setupClient); - } finally { - await setupClient.release(); - await setupPool.end(); - } - }); - - beforeEach(async () => { - elizaLogger.info('Setting up test environment...'); - try { - // Setup test database connection - testPool = new pg.Pool({ - host: 'localhost', - port: 5433, - database: 'eliza_test', - user: 'postgres', - password: 'postgres' - }); - - testClient = await testPool.connect(); - elizaLogger.debug('Database connection established'); - - await cleanDatabase(testClient); - elizaLogger.debug('Database cleaned'); - - adapter = new PostgresDatabaseAdapter({ - host: 'localhost', - port: 5433, - database: 'eliza_test', - user: 'postgres', - password: 'postgres' - }); - elizaLogger.success('Test environment setup completed'); - } catch (error) { - elizaLogger.error(`Test environment setup failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }); - - afterEach(async () => { - elizaLogger.info('Cleaning up test environment...'); - try { - await cleanDatabase(testClient); - await testClient?.release(); - await testPool?.end(); - await adapter?.close(); - elizaLogger.success('Test environment cleanup completed'); - } catch (error) { - elizaLogger.error(`Test environment cleanup failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }); - - describe('Schema and Extension Management', () => { - test('should initialize with vector extension', async () => { - elizaLogger.info('Testing vector extension initialization...'); - try { - // Act - elizaLogger.debug('Initializing adapter...'); - await adapter.init(); - elizaLogger.success('Adapter initialized successfully'); - - // Assert - elizaLogger.debug('Verifying vector extension existence...'); - const { rows } = await testClient.query(` - SELECT 1 FROM pg_extension WHERE extname = 'vector' - `); - expect(rows.length).toBe(1); - elizaLogger.success('Vector extension verified successfully'); - } catch (error) { - elizaLogger.error(`Vector extension test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }); - - test('should handle missing rooms table', async () => { - elizaLogger.info('Testing rooms table creation...'); - try { - // Act - elizaLogger.debug('Initializing adapter...'); - await adapter.init(); - elizaLogger.success('Adapter initialized successfully'); - - // Assert - elizaLogger.debug('Verifying rooms table existence...'); - const { rows } = await testClient.query(` - SELECT EXISTS ( - SELECT FROM information_schema.tables - WHERE table_name = 'rooms' - ); - `); - expect(rows[0].exists).toBe(true); - elizaLogger.success('Rooms table verified successfully'); - } catch (error) { - elizaLogger.error(`Rooms table test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }); - - test('should not reapply schema when everything exists', async () => { - elizaLogger.info('Testing schema reapplication prevention...'); - try { - // Arrange - elizaLogger.debug('Setting up initial schema...'); - await adapter.init(); - elizaLogger.success('Initial schema setup completed'); - - const spy = vi.spyOn(fs, 'readFileSync'); - elizaLogger.debug('File read spy installed'); - - // Act - elizaLogger.debug('Attempting schema reapplication...'); - await adapter.init(); - elizaLogger.success('Second initialization completed'); - - // Assert - expect(spy).not.toHaveBeenCalled(); - elizaLogger.success('Verified schema was not reapplied'); - spy.mockRestore(); - } catch (error) { - elizaLogger.error(`Schema reapplication test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }); - - test('should handle transaction rollback on error', async () => { - elizaLogger.info('Testing transaction rollback...'); - try { - // Arrange - elizaLogger.debug('Setting up file read error simulation...'); - const spy = vi.spyOn(fs, 'readFileSync').mockImplementation(() => { - elizaLogger.warn('Simulating schema read error'); - throw new Error('Schema read error'); - }); - - // Act & Assert - elizaLogger.debug('Attempting initialization with error...'); - await expect(adapter.init()).rejects.toThrow('Schema read error'); - elizaLogger.success('Error thrown as expected'); - - // Verify no tables were created - elizaLogger.debug('Verifying rollback...'); - const { rows } = await testClient.query(` - SELECT EXISTS ( - SELECT FROM information_schema.tables - WHERE table_name = 'rooms' - ); - `); - expect(rows[0].exists).toBe(false); - elizaLogger.success('Rollback verified successfully'); - spy.mockRestore(); - } catch (error) { - elizaLogger.error(`Transaction rollback test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - throw error; - } - }); - }); - - // Memory Operations tests will be updated in the next iteration - describe('Memory Operations with Vector', () => { - const TEST_UUID = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'; - const TEST_TABLE = 'test_memories'; - - beforeEach(async () => { - elizaLogger.info('Setting up memory operations test...'); - try { - // Ensure clean state and proper initialization - await adapter.init(); - - // Verify vector extension and search path - await testClient.query(` - SET search_path TO public, extensions; - SELECT set_config('app.use_openai_embedding', 'true', false); - `); - - // Create necessary account and room first - await testClient.query('BEGIN'); - try { - await testClient.query(` - INSERT INTO accounts (id, email) - VALUES ($1, 'test@test.com') - ON CONFLICT (id) DO NOTHING - `, [TEST_UUID]); - - await testClient.query(` - INSERT INTO rooms (id) - VALUES ($1) - ON CONFLICT (id) DO NOTHING - `, [TEST_UUID]); - - await testClient.query('COMMIT'); - } catch (error) { - await testClient.query('ROLLBACK'); - throw error; - } - - } catch (error) { - elizaLogger.error('Memory operations setup failed:', { - error: error instanceof Error ? error.message : String(error) - }); - throw error; - } - }); - - test('should create and retrieve memory with vector embedding', async () => { - // Arrange - const content: Content = { - text: 'test content' - }; - - const memory: Memory = { - id: TEST_UUID, - content, - embedding: new Array(1536).fill(0.1), - unique: true, - userId: TEST_UUID, - agentId: TEST_UUID, - roomId: TEST_UUID, - createdAt: Date.now() - }; - - // Act - await testClient.query('BEGIN'); - try { - await adapter.createMemory(memory, TEST_TABLE); - await testClient.query('COMMIT'); - } catch (error) { - await testClient.query('ROLLBACK'); - throw error; - } - - // Verify the embedding dimension - const { rows: [{ get_embedding_dimension }] } = await testClient.query('SELECT get_embedding_dimension()'); - expect(get_embedding_dimension).toBe(1536); - - // Retrieve and verify - const retrieved = await adapter.getMemoryById(TEST_UUID); - expect(retrieved).toBeDefined(); - const parsedEmbedding = typeof retrieved?.embedding === 'string' ? parseVectorString(retrieved.embedding) : retrieved?.embedding; - expect(Array.isArray(parsedEmbedding)).toBe(true); - expect(parsedEmbedding).toHaveLength(1536); - expect(retrieved?.content).toEqual(content); - }); - - test('should search memories by embedding', async () => { - // Arrange - const content: Content = { text: 'test content' }; - const embedding = new Array(1536).fill(0.1); - const memory: Memory = { - id: TEST_UUID, - content, - embedding, - unique: true, - userId: TEST_UUID, - agentId: TEST_UUID, - roomId: TEST_UUID, - createdAt: Date.now() - }; - - // Create memory within transaction - await testClient.query('BEGIN'); - try { - await adapter.createMemory(memory, TEST_TABLE); - await testClient.query('COMMIT'); - } catch (error) { - await testClient.query('ROLLBACK'); - throw error; - } - - // Act - const results = await adapter.searchMemoriesByEmbedding(embedding, { - tableName: TEST_TABLE, - roomId: TEST_UUID, - match_threshold: 0.8, - count: 1 - }); - - // Assert - expect(results).toBeDefined(); - expect(Array.isArray(results)).toBe(true); - expect(results.length).toBeGreaterThan(0); - const parsedEmbedding = typeof results[0].embedding === 'string' ? parseVectorString(results[0].embedding) : results[0].embedding; - expect(parsedEmbedding).toHaveLength(1536); - }); - - test('should handle invalid embedding dimensions', async () => { - // Arrange - const content: Content = { - text: 'test content' - }; - - const memory: Memory = { - id: TEST_UUID, - content, - embedding: new Array(100).fill(0.1), // Wrong dimension - unique: true, - userId: TEST_UUID, - agentId: TEST_UUID, - roomId: TEST_UUID, - createdAt: Date.now() - }; - - // Act & Assert - await testClient.query('BEGIN'); - try { - await expect(adapter.createMemory(memory, TEST_TABLE)) - .rejects - .toThrow('Invalid embedding dimension: expected 1536, got 100'); - await testClient.query('ROLLBACK'); - } catch (error) { - await testClient.query('ROLLBACK'); - throw error; - } - }, { timeout: 30000 }); // Increased timeout for retry attempts - }); -}); \ No newline at end of file diff --git a/packages/adapter-postgres/src/index.ts b/packages/adapter-postgres/src/index.ts deleted file mode 100644 index 55511e6d0253d..0000000000000 --- a/packages/adapter-postgres/src/index.ts +++ /dev/null @@ -1,1814 +0,0 @@ -import { v4 } from "uuid"; - -// Import the entire module as default -import pg from "pg"; -type Pool = pg.Pool; - -import { - type Account, - type Actor, - DatabaseAdapter, - EmbeddingProvider, - type GoalStatus, - type Participant, - type RAGKnowledgeItem, - elizaLogger, - getEmbeddingConfig, - type Goal, - type IDatabaseCacheAdapter, - type Memory, - type Relationship, - type UUID, -} from "@elizaos/core"; -import fs from "fs"; -import path from "path"; -import type { - QueryConfig, - QueryConfigValues, - QueryResult, - QueryResultRow, -} from "pg"; -import { fileURLToPath } from "url"; - -const __filename = fileURLToPath(import.meta.url); // get the resolved path to the file -const __dirname = path.dirname(__filename); // get the name of the directory - -export class PostgresDatabaseAdapter - extends DatabaseAdapter - implements IDatabaseCacheAdapter -{ - private pool: Pool; - private readonly maxRetries: number = 3; - private readonly baseDelay: number = 1000; // 1 second - private readonly maxDelay: number = 10000; // 10 seconds - private readonly jitterMax: number = 1000; // 1 second - private readonly connectionTimeout: number = 5000; // 5 seconds - - constructor(connectionConfig: any) { - super({ - //circuitbreaker stuff - failureThreshold: 5, - resetTimeout: 60000, - halfOpenMaxAttempts: 3, - }); - - const defaultConfig = { - max: 20, - idleTimeoutMillis: 30000, - connectionTimeoutMillis: this.connectionTimeout, - }; - - this.pool = new pg.Pool({ - ...defaultConfig, - ...connectionConfig, // Allow overriding defaults - }); - - this.pool.on("error", (err) => { - elizaLogger.error("Unexpected pool error", err); - this.handlePoolError(err); - }); - - this.setupPoolErrorHandling(); - this.testConnection(); - } - - private setupPoolErrorHandling() { - process.on("SIGINT", async () => { - await this.cleanup(); - process.exit(0); - }); - - process.on("SIGTERM", async () => { - await this.cleanup(); - process.exit(0); - }); - - process.on("beforeExit", async () => { - await this.cleanup(); - }); - } - - private async withDatabase( - operation: () => Promise, - context: string - ): Promise { - return this.withCircuitBreaker(async () => { - return this.withRetry(operation); - }, context); - } - - private async withRetry(operation: () => Promise): Promise { - let lastError: Error = new Error("Unknown error"); // Initialize with default - - for (let attempt = 1; attempt <= this.maxRetries; attempt++) { - try { - return await operation(); - } catch (error) { - lastError = error as Error; - - if (attempt < this.maxRetries) { - // Calculate delay with exponential backoff - const backoffDelay = Math.min( - this.baseDelay * Math.pow(2, attempt - 1), - this.maxDelay - ); - - // Add jitter to prevent thundering herd - const jitter = Math.random() * this.jitterMax; - const delay = backoffDelay + jitter; - - elizaLogger.warn( - `Database operation failed (attempt ${attempt}/${this.maxRetries}):`, - { - error: - error instanceof Error - ? error.message - : String(error), - nextRetryIn: `${(delay / 1000).toFixed(1)}s`, - } - ); - - await new Promise((resolve) => setTimeout(resolve, delay)); - } else { - elizaLogger.error("Max retry attempts reached:", { - error: - error instanceof Error - ? error.message - : String(error), - totalAttempts: attempt, - }); - throw error instanceof Error - ? error - : new Error(String(error)); - } - } - } - - throw lastError; - } - - private async handlePoolError(error: Error) { - elizaLogger.error("Pool error occurred, attempting to reconnect", { - error: error.message, - }); - - try { - // Close existing pool - await this.pool.end(); - - // Create new pool - this.pool = new pg.Pool({ - ...this.pool.options, - connectionTimeoutMillis: this.connectionTimeout, - }); - - await this.testConnection(); - elizaLogger.success("Pool reconnection successful"); - } catch (reconnectError) { - elizaLogger.error("Failed to reconnect pool", { - error: - reconnectError instanceof Error - ? reconnectError.message - : String(reconnectError), - }); - throw reconnectError; - } - } - - async query( - queryTextOrConfig: string | QueryConfig, - values?: QueryConfigValues - ): Promise> { - return this.withDatabase(async () => { - return await this.pool.query(queryTextOrConfig, values); - }, "query"); - } - - private async validateVectorSetup(): Promise { - try { - const vectorExt = await this.query(` - SELECT 1 FROM pg_extension WHERE extname = 'vector' - `); - const hasVector = vectorExt.rows.length > 0; - - if (!hasVector) { - elizaLogger.error("Vector extension not found in database"); - return false; - } - - return true; - } catch (error) { - elizaLogger.error("Failed to validate vector extension:", { - error: error instanceof Error ? error.message : String(error), - }); - return false; - } - } - - async init() { - await this.testConnection(); - - const client = await this.pool.connect(); - try { - await client.query("BEGIN"); - - // Set application settings for embedding dimension - const embeddingConfig = getEmbeddingConfig(); - if (embeddingConfig.provider === EmbeddingProvider.OpenAI) { - await client.query("SET app.use_openai_embedding = 'true'"); - await client.query("SET app.use_ollama_embedding = 'false'"); - await client.query("SET app.use_gaianet_embedding = 'false'"); - } else if (embeddingConfig.provider === EmbeddingProvider.Ollama) { - await client.query("SET app.use_openai_embedding = 'false'"); - await client.query("SET app.use_ollama_embedding = 'true'"); - await client.query("SET app.use_gaianet_embedding = 'false'"); - } else if (embeddingConfig.provider === EmbeddingProvider.GaiaNet) { - await client.query("SET app.use_openai_embedding = 'false'"); - await client.query("SET app.use_ollama_embedding = 'false'"); - await client.query("SET app.use_gaianet_embedding = 'true'"); - } else { - await client.query("SET app.use_openai_embedding = 'false'"); - await client.query("SET app.use_ollama_embedding = 'false'"); - } - - // Check if schema already exists (check for a core table) - const { rows } = await client.query(` - SELECT EXISTS ( - SELECT FROM information_schema.tables - WHERE table_name = 'rooms' - ); - `); - - if (!rows[0].exists || !(await this.validateVectorSetup())) { - elizaLogger.info( - "Applying database schema - tables or vector extension missing" - ); - const schema = fs.readFileSync( - path.resolve(__dirname, "../schema.sql"), - "utf8" - ); - await client.query(schema); - } - - await client.query("COMMIT"); - } catch (error) { - await client.query("ROLLBACK"); - throw error; - } finally { - client.release(); - } - } - - async close() { - await this.pool.end(); - } - - async testConnection(): Promise { - let client; - try { - client = await this.pool.connect(); - const result = await client.query("SELECT NOW()"); - elizaLogger.success( - "Database connection test successful:", - result.rows[0] - ); - return true; - } catch (error) { - elizaLogger.error("Database connection test failed:", error); - throw new Error( - `Failed to connect to database: ${(error as Error).message}` - ); - } finally { - if (client) client.release(); - } - } - - async cleanup(): Promise { - try { - await this.pool.end(); - elizaLogger.info("Database pool closed"); - } catch (error) { - elizaLogger.error("Error closing database pool:", error); - } - } - - async getRoom(roomId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - "SELECT id FROM rooms WHERE id = $1", - [roomId] - ); - return rows.length > 0 ? (rows[0].id as UUID) : null; - }, "getRoom"); - } - - async getParticipantsForAccount(userId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - `SELECT id, "userId", "roomId", "last_message_read" - FROM participants - WHERE "userId" = $1`, - [userId] - ); - return rows as Participant[]; - }, "getParticipantsForAccount"); - } - - async getParticipantUserState( - roomId: UUID, - userId: UUID - ): Promise<"FOLLOWED" | "MUTED" | null> { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - `SELECT "userState" FROM participants WHERE "roomId" = $1 AND "userId" = $2`, - [roomId, userId] - ); - return rows.length > 0 ? rows[0].userState : null; - }, "getParticipantUserState"); - } - - async getMemoriesByRoomIds(params: { - roomIds: UUID[]; - agentId?: UUID; - tableName: string; - limit?: number; - }): Promise { - return this.withDatabase(async () => { - if (params.roomIds.length === 0) return []; - const placeholders = params.roomIds - .map((_, i) => `$${i + 2}`) - .join(", "); - - let query = `SELECT * FROM memories WHERE type = $1 AND "roomId" IN (${placeholders})`; - let queryParams = [params.tableName, ...params.roomIds]; - - if (params.agentId) { - query += ` AND "agentId" = $${params.roomIds.length + 2}`; - queryParams = [...queryParams, params.agentId]; - } - - // Add sorting, and conditionally add LIMIT if provided - query += ` ORDER BY "createdAt" DESC`; - if (params.limit) { - query += ` LIMIT $${queryParams.length + 1}`; - queryParams.push(params.limit.toString()); - } - - const { rows } = await this.pool.query(query, queryParams); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); - }, "getMemoriesByRoomIds"); - } - - async setParticipantUserState( - roomId: UUID, - userId: UUID, - state: "FOLLOWED" | "MUTED" | null - ): Promise { - return this.withDatabase(async () => { - await this.pool.query( - `UPDATE participants SET "userState" = $1 WHERE "roomId" = $2 AND "userId" = $3`, - [state, roomId, userId] - ); - }, "setParticipantUserState"); - } - - async getParticipantsForRoom(roomId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - 'SELECT "userId" FROM participants WHERE "roomId" = $1', - [roomId] - ); - return rows.map((row) => row.userId); - }, "getParticipantsForRoom"); - } - - async getAccountById(userId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - "SELECT * FROM accounts WHERE id = $1", - [userId] - ); - if (rows.length === 0) { - elizaLogger.debug("Account not found:", { userId }); - return null; - } - - const account = rows[0]; - // elizaLogger.debug("Account retrieved:", { - // userId, - // hasDetails: !!account.details, - // }); - - return { - ...account, - details: - typeof account.details === "string" - ? JSON.parse(account.details) - : account.details, - }; - }, "getAccountById"); - } - - async createAccount(account: Account): Promise { - return this.withDatabase(async () => { - try { - const accountId = account.id ?? v4(); - await this.pool.query( - `INSERT INTO accounts (id, name, username, email, "avatarUrl", details) - VALUES ($1, $2, $3, $4, $5, $6)`, - [ - accountId, - account.name, - account.username || "", - account.email || "", - account.avatarUrl || "", - JSON.stringify(account.details), - ] - ); - elizaLogger.debug("Account created successfully:", { - accountId, - }); - return true; - } catch (error) { - elizaLogger.error("Error creating account:", { - error: - error instanceof Error ? error.message : String(error), - accountId: account.id, - name: account.name, // Only log non-sensitive fields - }); - return false; // Return false instead of throwing to maintain existing behavior - } - }, "createAccount"); - } - - async getActorById(params: { roomId: UUID }): Promise { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - `SELECT a.id, a.name, a.username, a.details - FROM participants p - LEFT JOIN accounts a ON p."userId" = a.id - WHERE p."roomId" = $1`, - [params.roomId] - ); - - elizaLogger.debug("Retrieved actors:", { - roomId: params.roomId, - actorCount: rows.length, - }); - - return rows.map((row) => { - try { - return { - ...row, - details: - typeof row.details === "string" - ? JSON.parse(row.details) - : row.details, - }; - } catch (error) { - elizaLogger.warn("Failed to parse actor details:", { - actorId: row.id, - error: - error instanceof Error - ? error.message - : String(error), - }); - return { - ...row, - details: {}, // Provide default empty details on parse error - }; - } - }); - }, "getActorById").catch((error) => { - elizaLogger.error("Failed to get actors:", { - roomId: params.roomId, - error: error.message, - }); - throw error; // Re-throw to let caller handle database errors - }); - } - - async getMemoryById(id: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - "SELECT * FROM memories WHERE id = $1", - [id] - ); - if (rows.length === 0) return null; - - return { - ...rows[0], - content: - typeof rows[0].content === "string" - ? JSON.parse(rows[0].content) - : rows[0].content, - }; - }, "getMemoryById"); - } - - async getMemoriesByIds( - memoryIds: UUID[], - tableName?: string - ): Promise { - return this.withDatabase(async () => { - if (memoryIds.length === 0) return []; - const placeholders = memoryIds.map((_, i) => `$${i + 1}`).join(","); - let sql = `SELECT * FROM memories WHERE id IN (${placeholders})`; - const queryParams: any[] = [...memoryIds]; - - if (tableName) { - sql += ` AND type = $${memoryIds.length + 1}`; - queryParams.push(tableName); - } - - const { rows } = await this.pool.query(sql, queryParams); - - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); - }, "getMemoriesByIds"); - } - - async createMemory(memory: Memory, tableName: string): Promise { - return this.withDatabase(async () => { - elizaLogger.debug("PostgresAdapter createMemory:", { - memoryId: memory.id, - embeddingLength: memory.embedding?.length, - contentLength: memory.content?.text?.length, - }); - - let isUnique = true; - if (memory.embedding) { - const similarMemories = await this.searchMemoriesByEmbedding( - memory.embedding, - { - tableName, - roomId: memory.roomId, - match_threshold: 0.95, - count: 1, - } - ); - isUnique = similarMemories.length === 0; - } - - await this.pool.query( - `INSERT INTO memories ( - id, type, content, embedding, "userId", "roomId", "agentId", "unique", "createdAt" - ) VALUES ($1, $2, $3, $4, $5::uuid, $6::uuid, $7::uuid, $8, to_timestamp($9/1000.0))`, - [ - memory.id ?? v4(), - tableName, - JSON.stringify(memory.content), - memory.embedding ? `[${memory.embedding.join(",")}]` : null, - memory.userId, - memory.roomId, - memory.agentId, - memory.unique ?? isUnique, - Date.now(), - ] - ); - }, "createMemory"); - } - - async searchMemories(params: { - tableName: string; - agentId: UUID; - roomId: UUID; - embedding: number[]; - match_threshold: number; - match_count: number; - unique: boolean; - }): Promise { - return await this.searchMemoriesByEmbedding(params.embedding, { - match_threshold: params.match_threshold, - count: params.match_count, - agentId: params.agentId, - roomId: params.roomId, - unique: params.unique, - tableName: params.tableName, - }); - } - - async getMemories(params: { - roomId: UUID; - count?: number; - unique?: boolean; - tableName: string; - agentId?: UUID; - start?: number; - end?: number; - }): Promise { - // Parameter validation - if (!params.tableName) throw new Error("tableName is required"); - if (!params.roomId) throw new Error("roomId is required"); - - return this.withDatabase(async () => { - // Build query - let sql = `SELECT * FROM memories WHERE type = $1 AND "roomId" = $2`; - const values: any[] = [params.tableName, params.roomId]; - let paramCount = 2; - - // Add time range filters - if (params.start) { - paramCount++; - sql += ` AND "createdAt" >= to_timestamp($${paramCount})`; - values.push(params.start / 1000); - } - - if (params.end) { - paramCount++; - sql += ` AND "createdAt" <= to_timestamp($${paramCount})`; - values.push(params.end / 1000); - } - - // Add other filters - if (params.unique) { - sql += ` AND "unique" = true`; - } - - if (params.agentId) { - paramCount++; - sql += ` AND "agentId" = $${paramCount}`; - values.push(params.agentId); - } - - // Add ordering and limit - sql += ' ORDER BY "createdAt" DESC'; - - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } - - elizaLogger.debug("Fetching memories:", { - roomId: params.roomId, - tableName: params.tableName, - unique: params.unique, - agentId: params.agentId, - timeRange: - params.start || params.end - ? { - start: params.start - ? new Date(params.start).toISOString() - : undefined, - end: params.end - ? new Date(params.end).toISOString() - : undefined, - } - : undefined, - limit: params.count, - }); - - const { rows } = await this.pool.query(sql, values); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); - }, "getMemories"); - } - - async getGoals(params: { - roomId: UUID; - userId?: UUID | null; - onlyInProgress?: boolean; - count?: number; - }): Promise { - return this.withDatabase(async () => { - let sql = `SELECT * FROM goals WHERE "roomId" = $1`; - const values: any[] = [params.roomId]; - let paramCount = 1; - - if (params.userId) { - paramCount++; - sql += ` AND "userId" = $${paramCount}`; - values.push(params.userId); - } - - if (params.onlyInProgress) { - sql += " AND status = 'IN_PROGRESS'"; - } - - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } - - const { rows } = await this.pool.query(sql, values); - return rows.map((row) => ({ - ...row, - objectives: - typeof row.objectives === "string" - ? JSON.parse(row.objectives) - : row.objectives, - })); - }, "getGoals"); - } - - async updateGoal(goal: Goal): Promise { - return this.withDatabase(async () => { - try { - await this.pool.query( - `UPDATE goals SET name = $1, status = $2, objectives = $3 WHERE id = $4`, - [ - goal.name, - goal.status, - JSON.stringify(goal.objectives), - goal.id, - ] - ); - } catch (error) { - elizaLogger.error("Failed to update goal:", { - goalId: goal.id, - error: - error instanceof Error ? error.message : String(error), - status: goal.status, - }); - throw error; - } - }, "updateGoal"); - } - - async createGoal(goal: Goal): Promise { - return this.withDatabase(async () => { - await this.pool.query( - `INSERT INTO goals (id, "roomId", "userId", name, status, objectives) - VALUES ($1, $2, $3, $4, $5, $6)`, - [ - goal.id ?? v4(), - goal.roomId, - goal.userId, - goal.name, - goal.status, - JSON.stringify(goal.objectives), - ] - ); - }, "createGoal"); - } - - async removeGoal(goalId: UUID): Promise { - if (!goalId) throw new Error("Goal ID is required"); - - return this.withDatabase(async () => { - try { - const result = await this.pool.query( - "DELETE FROM goals WHERE id = $1 RETURNING id", - [goalId] - ); - - elizaLogger.debug("Goal removal attempt:", { - goalId, - removed: result?.rowCount ?? 0 > 0, - }); - } catch (error) { - elizaLogger.error("Failed to remove goal:", { - goalId, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "removeGoal"); - } - - async createRoom(roomId?: UUID): Promise { - return this.withDatabase(async () => { - const newRoomId = roomId || v4(); - await this.pool.query("INSERT INTO rooms (id) VALUES ($1)", [ - newRoomId, - ]); - return newRoomId as UUID; - }, "createRoom"); - } - - async removeRoom(roomId: UUID): Promise { - if (!roomId) throw new Error("Room ID is required"); - - return this.withDatabase(async () => { - const client = await this.pool.connect(); - try { - await client.query("BEGIN"); - - // First check if room exists - const checkResult = await client.query( - "SELECT id FROM rooms WHERE id = $1", - [roomId] - ); - - if (checkResult.rowCount === 0) { - elizaLogger.warn("No room found to remove:", { roomId }); - throw new Error(`Room not found: ${roomId}`); - } - - // Remove related data first (if not using CASCADE) - await client.query('DELETE FROM memories WHERE "roomId" = $1', [ - roomId, - ]); - await client.query( - 'DELETE FROM participants WHERE "roomId" = $1', - [roomId] - ); - await client.query('DELETE FROM goals WHERE "roomId" = $1', [ - roomId, - ]); - - // Finally remove the room - const result = await client.query( - "DELETE FROM rooms WHERE id = $1 RETURNING id", - [roomId] - ); - - await client.query("COMMIT"); - - elizaLogger.debug( - "Room and related data removed successfully:", - { - roomId, - removed: result?.rowCount ?? 0 > 0, - } - ); - } catch (error) { - await client.query("ROLLBACK"); - elizaLogger.error("Failed to remove room:", { - roomId, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } finally { - if (client) client.release(); - } - }, "removeRoom"); - } - - async createRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - // Input validation - if (!params.userA || !params.userB) { - throw new Error("userA and userB are required"); - } - - return this.withDatabase(async () => { - try { - const relationshipId = v4(); - await this.pool.query( - `INSERT INTO relationships (id, "userA", "userB", "userId") - VALUES ($1, $2, $3, $4) - RETURNING id`, - [relationshipId, params.userA, params.userB, params.userA] - ); - - elizaLogger.debug("Relationship created successfully:", { - relationshipId, - userA: params.userA, - userB: params.userB, - }); - - return true; - } catch (error) { - // Check for unique constraint violation or other specific errors - if ((error as { code?: string }).code === "23505") { - // Unique violation - elizaLogger.warn("Relationship already exists:", { - userA: params.userA, - userB: params.userB, - error: - error instanceof Error - ? error.message - : String(error), - }); - } else { - elizaLogger.error("Failed to create relationship:", { - userA: params.userA, - userB: params.userB, - error: - error instanceof Error - ? error.message - : String(error), - }); - } - return false; - } - }, "createRelationship"); - } - - async getRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - if (!params.userA || !params.userB) { - throw new Error("userA and userB are required"); - } - - return this.withDatabase(async () => { - try { - const { rows } = await this.pool.query( - `SELECT * FROM relationships - WHERE ("userA" = $1 AND "userB" = $2) - OR ("userA" = $2 AND "userB" = $1)`, - [params.userA, params.userB] - ); - - if (rows.length > 0) { - elizaLogger.debug("Relationship found:", { - relationshipId: rows[0].id, - userA: params.userA, - userB: params.userB, - }); - return rows[0]; - } - - elizaLogger.debug("No relationship found between users:", { - userA: params.userA, - userB: params.userB, - }); - return null; - } catch (error) { - elizaLogger.error("Error fetching relationship:", { - userA: params.userA, - userB: params.userB, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "getRelationship"); - } - - async getRelationships(params: { userId: UUID }): Promise { - if (!params.userId) { - throw new Error("userId is required"); - } - - return this.withDatabase(async () => { - try { - const { rows } = await this.pool.query( - `SELECT * FROM relationships - WHERE "userA" = $1 OR "userB" = $1 - ORDER BY "createdAt" DESC`, // Add ordering if you have this field - [params.userId] - ); - - elizaLogger.debug("Retrieved relationships:", { - userId: params.userId, - count: rows.length, - }); - - return rows; - } catch (error) { - elizaLogger.error("Failed to fetch relationships:", { - userId: params.userId, - error: - error instanceof Error ? error.message : String(error), - }); - throw error; - } - }, "getRelationships"); - } - - async getCachedEmbeddings(opts: { - query_table_name: string; - query_threshold: number; - query_input: string; - query_field_name: string; - query_field_sub_name: string; - query_match_count: number; - }): Promise<{ embedding: number[]; levenshtein_score: number }[]> { - // Input validation - if (!opts.query_table_name) - throw new Error("query_table_name is required"); - if (!opts.query_input) throw new Error("query_input is required"); - if (!opts.query_field_name) - throw new Error("query_field_name is required"); - if (!opts.query_field_sub_name) - throw new Error("query_field_sub_name is required"); - if (opts.query_match_count <= 0) - throw new Error("query_match_count must be positive"); - - return this.withDatabase(async () => { - try { - elizaLogger.debug("Fetching cached embeddings:", { - tableName: opts.query_table_name, - fieldName: opts.query_field_name, - subFieldName: opts.query_field_sub_name, - matchCount: opts.query_match_count, - inputLength: opts.query_input.length, - }); - - const sql = ` - WITH content_text AS ( - SELECT - embedding, - COALESCE( - content->>$2, - '' - ) as content_text - FROM memories - WHERE type = $3 - AND content->>$2 IS NOT NULL - ) - SELECT - embedding, - levenshtein( - $1, - content_text - ) as levenshtein_score - FROM content_text - WHERE levenshtein( - $1, - content_text - ) <= $5 -- Add threshold check - ORDER BY levenshtein_score - LIMIT $4 - `; - - const { rows } = await this.pool.query(sql, [ - opts.query_input, - opts.query_field_sub_name, - opts.query_table_name, - opts.query_match_count, - opts.query_threshold, - ]); - - elizaLogger.debug("Retrieved cached embeddings:", { - count: rows.length, - tableName: opts.query_table_name, - matchCount: opts.query_match_count, - }); - - return rows - .map( - ( - row - ): { - embedding: number[]; - levenshtein_score: number; - } | null => { - if (!Array.isArray(row.embedding)) return null; - return { - embedding: row.embedding, - levenshtein_score: Number( - row.levenshtein_score - ), - }; - } - ) - .filter( - ( - row - ): row is { - embedding: number[]; - levenshtein_score: number; - } => row !== null - ); - } catch (error) { - elizaLogger.error("Error in getCachedEmbeddings:", { - error: - error instanceof Error ? error.message : String(error), - tableName: opts.query_table_name, - fieldName: opts.query_field_name, - }); - throw error; - } - }, "getCachedEmbeddings"); - } - - async log(params: { - body: { [key: string]: unknown }; - userId: UUID; - roomId: UUID; - type: string; - }): Promise { - // Input validation - if (!params.userId) throw new Error("userId is required"); - if (!params.roomId) throw new Error("roomId is required"); - if (!params.type) throw new Error("type is required"); - if (!params.body || typeof params.body !== "object") { - throw new Error("body must be a valid object"); - } - - return this.withDatabase(async () => { - try { - const logId = v4(); // Generate ID for tracking - await this.pool.query( - `INSERT INTO logs ( - id, - body, - "userId", - "roomId", - type, - "createdAt" - ) VALUES ($1, $2, $3, $4, $5, NOW()) - RETURNING id`, - [ - logId, - JSON.stringify(params.body), // Ensure body is stringified - params.userId, - params.roomId, - params.type, - ] - ); - - elizaLogger.debug("Log entry created:", { - logId, - type: params.type, - roomId: params.roomId, - userId: params.userId, - bodyKeys: Object.keys(params.body), - }); - } catch (error) { - elizaLogger.error("Failed to create log entry:", { - error: - error instanceof Error ? error.message : String(error), - type: params.type, - roomId: params.roomId, - userId: params.userId, - }); - throw error; - } - }, "log"); - } - - async searchMemoriesByEmbedding( - embedding: number[], - params: { - match_threshold?: number; - count?: number; - agentId?: UUID; - roomId?: UUID; - unique?: boolean; - tableName: string; - } - ): Promise { - return this.withDatabase(async () => { - elizaLogger.debug("Incoming vector:", { - length: embedding.length, - sample: embedding.slice(0, 5), - isArray: Array.isArray(embedding), - allNumbers: embedding.every((n) => typeof n === "number"), - }); - - // Validate embedding dimension - if (embedding.length !== getEmbeddingConfig().dimensions) { - throw new Error( - `Invalid embedding dimension: expected ${getEmbeddingConfig().dimensions}, got ${embedding.length}` - ); - } - - // Ensure vector is properly formatted - const cleanVector = embedding.map((n) => { - if (!Number.isFinite(n)) return 0; - // Limit precision to avoid floating point issues - return Number(n.toFixed(6)); - }); - - // Format for Postgres pgvector - const vectorStr = `[${cleanVector.join(",")}]`; - - elizaLogger.debug("Vector debug:", { - originalLength: embedding.length, - cleanLength: cleanVector.length, - sampleStr: vectorStr.slice(0, 100), - }); - - let sql = ` - SELECT *, - 1 - (embedding <-> $1::vector(${getEmbeddingConfig().dimensions})) as similarity - FROM memories - WHERE type = $2 - `; - - const values: any[] = [vectorStr, params.tableName]; - - // Log the query for debugging - elizaLogger.debug("Query debug:", { - sql: sql.slice(0, 200), - paramTypes: values.map((v) => typeof v), - vectorStrLength: vectorStr.length, - }); - - let paramCount = 2; - - if (params.unique) { - sql += ` AND "unique" = true`; - } - - if (params.agentId) { - paramCount++; - sql += ` AND "agentId" = $${paramCount}`; - values.push(params.agentId); - } - - if (params.roomId) { - paramCount++; - sql += ` AND "roomId" = $${paramCount}::uuid`; - values.push(params.roomId); - } - - if (params.match_threshold) { - paramCount++; - sql += ` AND 1 - (embedding <-> $1::vector) >= $${paramCount}`; - values.push(params.match_threshold); - } - - sql += ` ORDER BY embedding <-> $1::vector`; - - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } - - const { rows } = await this.pool.query(sql, values); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - similarity: row.similarity, - })); - }, "searchMemoriesByEmbedding"); - } - - async addParticipant(userId: UUID, roomId: UUID): Promise { - return this.withDatabase(async () => { - try { - await this.pool.query( - `INSERT INTO participants (id, "userId", "roomId") - VALUES ($1, $2, $3)`, - [v4(), userId, roomId] - ); - return true; - } catch (error) { - console.log("Error adding participant", error); - return false; - } - }, "addParticpant"); - } - - async removeParticipant(userId: UUID, roomId: UUID): Promise { - return this.withDatabase(async () => { - try { - await this.pool.query( - `DELETE FROM participants WHERE "userId" = $1 AND "roomId" = $2`, - [userId, roomId] - ); - return true; - } catch (error) { - console.log("Error removing participant", error); - return false; - } - }, "removeParticipant"); - } - - async updateGoalStatus(params: { - goalId: UUID; - status: GoalStatus; - }): Promise { - return this.withDatabase(async () => { - await this.pool.query( - "UPDATE goals SET status = $1 WHERE id = $2", - [params.status, params.goalId] - ); - }, "updateGoalStatus"); - } - - async removeMemory(memoryId: UUID, tableName: string): Promise { - return this.withDatabase(async () => { - await this.pool.query( - "DELETE FROM memories WHERE type = $1 AND id = $2", - [tableName, memoryId] - ); - }, "removeMemory"); - } - - async removeAllMemories(roomId: UUID, tableName: string): Promise { - return this.withDatabase(async () => { - await this.pool.query( - `DELETE FROM memories WHERE type = $1 AND "roomId" = $2`, - [tableName, roomId] - ); - }, "removeAllMemories"); - } - - async countMemories( - roomId: UUID, - unique = true, - tableName = "" - ): Promise { - if (!tableName) throw new Error("tableName is required"); - - return this.withDatabase(async () => { - let sql = `SELECT COUNT(*) as count FROM memories WHERE type = $1 AND "roomId" = $2`; - if (unique) { - sql += ` AND "unique" = true`; - } - - const { rows } = await this.pool.query(sql, [tableName, roomId]); - return Number.parseInt(rows[0].count); - }, "countMemories"); - } - - async removeAllGoals(roomId: UUID): Promise { - return this.withDatabase(async () => { - await this.pool.query(`DELETE FROM goals WHERE "roomId" = $1`, [ - roomId, - ]); - }, "removeAllGoals"); - } - - async getRoomsForParticipant(userId: UUID): Promise { - return this.withDatabase(async () => { - const { rows } = await this.pool.query( - `SELECT "roomId" FROM participants WHERE "userId" = $1`, - [userId] - ); - return rows.map((row) => row.roomId); - }, "getRoomsForParticipant"); - } - - async getRoomsForParticipants(userIds: UUID[]): Promise { - return this.withDatabase(async () => { - const placeholders = userIds.map((_, i) => `$${i + 1}`).join(", "); - const { rows } = await this.pool.query( - `SELECT DISTINCT "roomId" FROM participants WHERE "userId" IN (${placeholders})`, - userIds - ); - return rows.map((row) => row.roomId); - }, "getRoomsForParticipants"); - } - - async getActorDetails(params: { roomId: string }): Promise { - if (!params.roomId) { - throw new Error("roomId is required"); - } - - return this.withDatabase(async () => { - try { - const sql = ` - SELECT - a.id, - a.name, - a.username, - a."avatarUrl", - COALESCE(a.details::jsonb, '{}'::jsonb) as details - FROM participants p - LEFT JOIN accounts a ON p."userId" = a.id - WHERE p."roomId" = $1 - ORDER BY a.name - `; - - const result = await this.pool.query(sql, [ - params.roomId, - ]); - - elizaLogger.debug("Retrieved actor details:", { - roomId: params.roomId, - actorCount: result.rows.length, - }); - - return result.rows.map((row) => { - try { - return { - ...row, - details: - typeof row.details === "string" - ? JSON.parse(row.details) - : row.details, - }; - } catch (parseError) { - elizaLogger.warn("Failed to parse actor details:", { - actorId: row.id, - error: - parseError instanceof Error - ? parseError.message - : String(parseError), - }); - return { - ...row, - details: {}, // Fallback to empty object if parsing fails - }; - } - }); - } catch (error) { - elizaLogger.error("Failed to fetch actor details:", { - roomId: params.roomId, - error: - error instanceof Error ? error.message : String(error), - }); - throw new Error( - `Failed to fetch actor details: ${error instanceof Error ? error.message : String(error)}` - ); - } - }, "getActorDetails"); - } - - async getCache(params: { - key: string; - agentId: UUID; - }): Promise { - return this.withDatabase(async () => { - try { - const sql = `SELECT "value"::TEXT FROM cache WHERE "key" = $1 AND "agentId" = $2`; - const { rows } = await this.query<{ value: string }>(sql, [ - params.key, - params.agentId, - ]); - return rows[0]?.value ?? undefined; - } catch (error) { - elizaLogger.error("Error fetching cache", { - error: - error instanceof Error ? error.message : String(error), - key: params.key, - agentId: params.agentId, - }); - return undefined; - } - }, "getCache"); - } - - async setCache(params: { - key: string; - agentId: UUID; - value: string; - }): Promise { - return this.withDatabase(async () => { - try { - const client = await this.pool.connect(); - try { - await client.query("BEGIN"); - await client.query( - `INSERT INTO cache ("key", "agentId", "value", "createdAt") - VALUES ($1, $2, $3, CURRENT_TIMESTAMP) - ON CONFLICT ("key", "agentId") - DO UPDATE SET "value" = EXCLUDED.value, "createdAt" = CURRENT_TIMESTAMP`, - [params.key, params.agentId, params.value] - ); - await client.query("COMMIT"); - return true; - } catch (error) { - await client.query("ROLLBACK"); - elizaLogger.error("Error setting cache", { - error: - error instanceof Error - ? error.message - : String(error), - key: params.key, - agentId: params.agentId, - }); - return false; - } finally { - if (client) client.release(); - } - } catch (error) { - elizaLogger.error( - "Database connection error in setCache", - error - ); - return false; - } - }, "setCache"); - } - - async deleteCache(params: { - key: string; - agentId: UUID; - }): Promise { - return this.withDatabase(async () => { - try { - const client = await this.pool.connect(); - try { - await client.query("BEGIN"); - await client.query( - `DELETE FROM cache WHERE "key" = $1 AND "agentId" = $2`, - [params.key, params.agentId] - ); - await client.query("COMMIT"); - return true; - } catch (error) { - await client.query("ROLLBACK"); - elizaLogger.error("Error deleting cache", { - error: - error instanceof Error - ? error.message - : String(error), - key: params.key, - agentId: params.agentId, - }); - return false; - } finally { - client.release(); - } - } catch (error) { - elizaLogger.error( - "Database connection error in deleteCache", - error - ); - return false; - } - }, "deleteCache"); - } - - async getKnowledge(params: { - id?: UUID; - agentId: UUID; - limit?: number; - query?: string; - }): Promise { - return this.withDatabase(async () => { - let sql = `SELECT * FROM knowledge WHERE ("agentId" = $1 OR "isShared" = true)`; - const queryParams: any[] = [params.agentId]; - let paramCount = 1; - - if (params.id) { - paramCount++; - sql += ` AND id = $${paramCount}`; - queryParams.push(params.id); - } - - if (params.limit) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - queryParams.push(params.limit); - } - - const { rows } = await this.pool.query(sql, queryParams); - - return rows.map((row) => ({ - id: row.id, - agentId: row.agentId, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, - createdAt: row.createdAt.getTime(), - })); - }, "getKnowledge"); - } - - async searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - match_count: number; - searchText?: string; - }): Promise { - return this.withDatabase(async () => { - const cacheKey = `embedding_${params.agentId}_${params.searchText}`; - const cachedResult = await this.getCache({ - key: cacheKey, - agentId: params.agentId, - }); - - if (cachedResult) { - return JSON.parse(cachedResult); - } - - const vectorStr = `[${Array.from(params.embedding).join(",")}]`; - - const sql = ` - WITH vector_scores AS ( - SELECT id, - 1 - (embedding <-> $1::vector) as vector_score - FROM knowledge - WHERE ("agentId" IS NULL AND "isShared" = true) OR "agentId" = $2 - AND embedding IS NOT NULL - ), - keyword_matches AS ( - SELECT id, - CASE - WHEN content->>'text' ILIKE $3 THEN 3.0 - ELSE 1.0 - END * - CASE - WHEN (content->'metadata'->>'isChunk')::boolean = true THEN 1.5 - WHEN (content->'metadata'->>'isMain')::boolean = true THEN 1.2 - ELSE 1.0 - END as keyword_score - FROM knowledge - WHERE ("agentId" IS NULL AND "isShared" = true) OR "agentId" = $2 - ) - SELECT k.*, - v.vector_score, - kw.keyword_score, - (v.vector_score * kw.keyword_score) as combined_score - FROM knowledge k - JOIN vector_scores v ON k.id = v.id - LEFT JOIN keyword_matches kw ON k.id = kw.id - WHERE ("agentId" IS NULL AND "isShared" = true) OR k."agentId" = $2 - AND ( - v.vector_score >= $4 - OR (kw.keyword_score > 1.0 AND v.vector_score >= 0.3) - ) - ORDER BY combined_score DESC - LIMIT $5 - `; - - const { rows } = await this.pool.query(sql, [ - vectorStr, - params.agentId, - `%${params.searchText || ""}%`, - params.match_threshold, - params.match_count, - ]); - - const results = rows.map((row) => ({ - id: row.id, - agentId: row.agentId, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, - createdAt: row.createdAt.getTime(), - similarity: row.combined_score, - })); - - await this.setCache({ - key: cacheKey, - agentId: params.agentId, - value: JSON.stringify(results), - }); - - return results; - }, "searchKnowledge"); - } - - async createKnowledge(knowledge: RAGKnowledgeItem): Promise { - return this.withDatabase(async () => { - const client = await this.pool.connect(); - try { - await client.query("BEGIN"); - - const metadata = knowledge.content.metadata || {}; - const vectorStr = knowledge.embedding - ? `[${Array.from(knowledge.embedding).join(",")}]` - : null; - - // If this is a chunk, use createKnowledgeChunk - if (metadata.isChunk && metadata.originalId) { - await this.createKnowledgeChunk({ - id: knowledge.id, - originalId: metadata.originalId, - agentId: metadata.isShared ? null : knowledge.agentId, - content: knowledge.content, - embedding: knowledge.embedding, - chunkIndex: metadata.chunkIndex || 0, - isShared: metadata.isShared || false, - createdAt: knowledge.createdAt || Date.now(), - }); - } else { - // This is a main knowledge item - await client.query( - ` - INSERT INTO knowledge ( - id, "agentId", content, embedding, "createdAt", - "isMain", "originalId", "chunkIndex", "isShared" - ) VALUES ($1, $2, $3, $4, to_timestamp($5/1000.0), $6, $7, $8, $9) - ON CONFLICT (id) DO NOTHING - `, - [ - knowledge.id, - metadata.isShared ? null : knowledge.agentId, - knowledge.content, - vectorStr, - knowledge.createdAt || Date.now(), - true, - null, - null, - metadata.isShared || false, - ] - ); - } - - await client.query("COMMIT"); - } catch (error) { - await client.query("ROLLBACK"); - throw error; - } finally { - client.release(); - } - }, "createKnowledge"); - } - - async removeKnowledge(id: UUID): Promise { - return this.withDatabase(async () => { - const client = await this.pool.connect(); - try { - await client.query("BEGIN"); - - // Check if this is a pattern-based chunk deletion (e.g., "id-chunk-*") - if (typeof id === "string" && id.includes("-chunk-*")) { - const mainId = id.split("-chunk-")[0]; - // Delete chunks for this main ID - await client.query( - 'DELETE FROM knowledge WHERE "originalId" = $1', - [mainId] - ); - } else { - // First delete all chunks associated with this knowledge item - await client.query( - 'DELETE FROM knowledge WHERE "originalId" = $1', - [id] - ); - // Then delete the main knowledge item - await client.query("DELETE FROM knowledge WHERE id = $1", [ - id, - ]); - } - - await client.query("COMMIT"); - } catch (error) { - await client.query("ROLLBACK"); - elizaLogger.error("Error removing knowledge", { - error: - error instanceof Error ? error.message : String(error), - id, - }); - throw error; - } finally { - client.release(); - } - }, "removeKnowledge"); - } - - async clearKnowledge(agentId: UUID, shared?: boolean): Promise { - return this.withDatabase(async () => { - const sql = shared - ? 'DELETE FROM knowledge WHERE ("agentId" = $1 OR "isShared" = true)' - : 'DELETE FROM knowledge WHERE "agentId" = $1'; - - await this.pool.query(sql, [agentId]); - }, "clearKnowledge"); - } - - private async createKnowledgeChunk(params: { - id: UUID; - originalId: UUID; - agentId: UUID | null; - content: any; - embedding: Float32Array | undefined | null; - chunkIndex: number; - isShared: boolean; - createdAt: number; - }): Promise { - const vectorStr = params.embedding - ? `[${Array.from(params.embedding).join(",")}]` - : null; - - // Store the pattern-based ID in the content metadata for compatibility - const patternId = `${params.originalId}-chunk-${params.chunkIndex}`; - const contentWithPatternId = { - ...params.content, - metadata: { - ...params.content.metadata, - patternId, - }, - }; - - await this.pool.query( - ` - INSERT INTO knowledge ( - id, "agentId", content, embedding, "createdAt", - "isMain", "originalId", "chunkIndex", "isShared" - ) VALUES ($1, $2, $3, $4, to_timestamp($5/1000.0), $6, $7, $8, $9) - ON CONFLICT (id) DO NOTHING - `, - [ - v4(), // Generate a proper UUID for PostgreSQL - params.agentId, - contentWithPatternId, // Store the pattern ID in metadata - vectorStr, - params.createdAt, - false, - params.originalId, - params.chunkIndex, - params.isShared, - ] - ); - } -} - -export default PostgresDatabaseAdapter; diff --git a/packages/adapter-postgres/tsconfig.json b/packages/adapter-postgres/tsconfig.json deleted file mode 100644 index ea4e73360bf40..0000000000000 --- a/packages/adapter-postgres/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "strict": true - }, - "include": ["src/**/*.ts"], - "exclude": ["node_modules", "dist"] -} diff --git a/packages/adapter-postgres/tsup.config.ts b/packages/adapter-postgres/tsup.config.ts deleted file mode 100644 index 9acebc5ba9ab4..0000000000000 --- a/packages/adapter-postgres/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "uuid", - // Add other modules you want to externalize - ], -}); diff --git a/packages/adapter-qdrant/.npmignore b/packages/adapter-qdrant/.npmignore deleted file mode 100644 index eb4b3947ffa87..0000000000000 --- a/packages/adapter-qdrant/.npmignore +++ /dev/null @@ -1,9 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts -!schema.sql -!seed.sql -!config.toml \ No newline at end of file diff --git a/packages/adapter-qdrant/eslint.config.mjs b/packages/adapter-qdrant/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/adapter-qdrant/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/adapter-qdrant/package.json b/packages/adapter-qdrant/package.json deleted file mode 100644 index e34ae0ca18040..0000000000000 --- a/packages/adapter-qdrant/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@elizaos/adapter-qdrant", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "dependencies": { - "@elizaos/core": "workspace:*", - "@qdrant/js-client-rest": "^1.12.0" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint --fix --cache ." - } -} diff --git a/packages/adapter-qdrant/src/index.ts b/packages/adapter-qdrant/src/index.ts deleted file mode 100644 index 0233a3fb8a033..0000000000000 --- a/packages/adapter-qdrant/src/index.ts +++ /dev/null @@ -1,406 +0,0 @@ -import { v4,v5 } from "uuid"; -import { QdrantClient } from "@qdrant/js-client-rest"; -import { - Account, - Actor, - GoalStatus, - IDatabaseCacheAdapter, - UUID, - elizaLogger, - RAGKnowledgeItem, - DatabaseAdapter, - Participant, - type Memory, - type Goal, - type Relationship, -} from "@elizaos/core"; - - -export class QdrantDatabaseAdapter extends DatabaseAdapter implements IDatabaseCacheAdapter { - db: QdrantClient; - collectionName: string = 'collection'; - qdrantV5UUIDNamespace: string = "00000000-0000-0000-0000-000000000000"; - cacheM: Map = new Map(); - vectorSize: number; - constructor(url: string, apiKey: string, port: number, vectorSize: number) { - super(); - elizaLogger.info("new Qdrant client..."); - this.db = new QdrantClient({ - url: url, - apiKey:apiKey, - port: port, - }); - this.vectorSize = vectorSize; - } - - private preprocess(content: string): string { - if (!content || typeof content !== "string") { - elizaLogger.warn("Invalid input for preprocessing"); - return ""; - } - const processedContent = content - .replace(/```[\s\S]*?```/g, "") - .replace(/`.*?`/g, "") - .replace(/#{1,6}\s*(.*)/g, "$1") - .replace(/!\[(.*?)\]\(.*?\)/g, "$1") - .replace(/\[(.*?)\]\(.*?\)/g, "$1") - .replace(/(https?:\/\/)?(www\.)?([^\s]+\.[^\s]+)/g, "$3") - .replace(/<@[!&]?\d+>/g, "") - .replace(/<[^>]*>/g, "") - .replace(/^\s*[-*_]{3,}\s*$/gm, "") - .replace(/\/\*[\s\S]*?\*\//g, "") - .replace(/\/\/.*/g, "") - .replace(/\s+/g, " ") - .replace(/\n{3,}/g, "\n\n") - .replace(/[^a-zA-Z0-9\s\-_./:?=&]/g, "") - .trim() - return processedContent - } - - async init () { - const response = await this.db.getCollections(); - const collectionNames = response.collections.map((collection) => collection.name); - if (collectionNames.includes(this.collectionName)) { - elizaLogger.info("Collection already exists."); - } else { - elizaLogger.info("create collection..."); - await this.db.createCollection(this.collectionName, { - vectors: { - size: this.vectorSize, - distance: 'Cosine', - }, - }); - } - } - - async createKnowledge(knowledge: RAGKnowledgeItem): Promise { - const metadata = knowledge.content.metadata || {} - elizaLogger.info("Qdrant adapter createKnowledge id:", knowledge.id); - await this.db.upsert(this.collectionName, { - wait: true, - points: [ - { - id: this.buildQdrantID(knowledge.id), // the qdrant id must be a standard uuid - vector: knowledge.embedding ? Array.from(knowledge.embedding) : [], - payload:{ - agentId: metadata.isShared ? null : knowledge.agentId, - content: { - text: knowledge.content.text, - metadata: metadata - }, - createdAt: knowledge.createdAt || Date.now(), - isMain: metadata.isMain || false, - originalId: metadata.originalId || null, - chunkIndex: metadata.chunkIndex || null, - isShared : metadata.isShared || false - } - } - ], - }) - } - - async getKnowledge(params: { - query?: string; - id?: UUID; - conversationContext?: string; - limit?: number; - agentId?: UUID; - }): Promise { - elizaLogger.info("Qdrant adapter getKnowledge...", params.id); - const rows = await this.db.retrieve(this.collectionName, { - ids: params.id ? [params.id.toString()] : [], - }); - const results: RAGKnowledgeItem[] = rows.map((row) => { - const contentObj = typeof row.payload?.content === "string" - ? JSON.parse(row.payload.content) - : row.payload?.content; - return { - id: row.id.toString() as UUID, - agentId: (row.payload?.agentId || "") as UUID, - content: { - text: String(contentObj.text || ""), - metadata: contentObj.metadata as { [key: string]: unknown } - }, - embedding: row.vector ? Float32Array.from(row.vector as number[]) : undefined, - createdAt: row.payload?.createdAt as number - }; - }); - return results; - } - - async processFile(file: { path: string; content: string; type: "pdf" | "md" | "txt"; isShared: boolean }): Promise { - return Promise.resolve(undefined); - } - - async removeKnowledge(id: UUID): Promise { - return Promise.resolve(undefined); - } - - async searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array | number[]; - match_threshold?: number; - match_count?: number; - searchText?: string - }): Promise { - const cacheKey = `${params.agentId}:${params.embedding.toString()}`; - const cachedResult = await this.getCache({ - key: cacheKey, - agentId: params.agentId - }); - - if (cachedResult) { - return JSON.parse(cachedResult); - } - const rows = await this.db.search(this.collectionName, { - vector: Array.from(params.embedding), - with_vector: true - }); - - const results: RAGKnowledgeItem[] = rows.map((row) => { - const contentObj = typeof row.payload?.content === "string" - ? JSON.parse(row.payload.content) - : row.payload?.content; - elizaLogger.info("Qdrant adapter searchKnowledge id:", row.id.toString() as UUID); - return { - id: row.id.toString() as UUID, - agentId: (row.payload?.agentId || "") as UUID, - content: { - text: String(contentObj.text || ""), - metadata: contentObj.metadata as { [key: string]: unknown } - }, - embedding: row.vector ? Float32Array.from(row.vector as number[]) : undefined, - createdAt: row.payload?.createdAt as number, - similarity: row.score || 0 - }; - }); - elizaLogger.debug("Qdrant adapter searchKnowledge results:", results); - await this.setCache({ - key: cacheKey, - agentId: params.agentId, - value: JSON.stringify(results) - }); - return results; - } - - async addParticipant(userId: UUID, roomId: UUID): Promise { - return Promise.resolve(false); - } - - async clearKnowledge(agentId: UUID, shared?: boolean): Promise { - return Promise.resolve(undefined); - } - - async close(): Promise { - return Promise.resolve(undefined); - } - - async countMemories(roomId: UUID, unique?: boolean, tableName?: string): Promise { - return Promise.resolve(0); - } - - async createAccount(account: Account): Promise { - return Promise.resolve(false); - } - - async createGoal(goal: Goal): Promise { - return Promise.resolve(undefined); - } - - async createMemory(memory: Memory, tableName: string, unique?: boolean): Promise { - return Promise.resolve(undefined); - } - - async createRelationship(params: { userA: UUID; userB: UUID }): Promise { - return Promise.resolve(false); - } - - async createRoom(roomId?: UUID): Promise { - const newRoomId = roomId || v4(); - return newRoomId as UUID; - } - - async getAccountById(userId: UUID): Promise { - return null; - } - - async getActorDetails(params: { roomId: UUID }): Promise { - return Promise.resolve([]); - } - - async getCachedEmbeddings(params: { - query_table_name: string; - query_threshold: number; - query_input: string; - query_field_name: string; - query_field_sub_name: string; - query_match_count: number - }): Promise<{ embedding: number[]; levenshtein_score: number }[]> { - return Promise.resolve([]); - } - - async getGoals(params: { - agentId: UUID; - roomId: UUID; - userId?: UUID | null; - onlyInProgress?: boolean; - count?: number - }): Promise { - return Promise.resolve([]); - } - - async getMemories(params: { - roomId: UUID; - count?: number; - unique?: boolean; - tableName: string; - agentId: UUID; - start?: number; - end?: number - }): Promise { - return Promise.resolve([]); - } - - async getMemoriesByRoomIds(params: { tableName: string; agentId: UUID; roomIds: UUID[] }): Promise { - return Promise.resolve([]); - } - - async getMemoryById(id: UUID): Promise { - return null; - } - - async getParticipantUserState(roomId: UUID, userId: UUID): Promise<"FOLLOWED" | "MUTED" | null> { - return null; - } - - async getParticipantsForAccount(userId: UUID): Promise { - return Promise.resolve([]); - } - - async getParticipantsForRoom(roomId: UUID): Promise { - return Promise.resolve([]); - } - - async getRelationship(params: { userA: UUID; userB: UUID }): Promise { - return null; - } - - async getRelationships(params: { userId: UUID }): Promise { - return Promise.resolve([]); - } - - async getRoom(roomId: UUID): Promise { - return null; - } - - async getRoomsForParticipant(userId: UUID): Promise { - return Promise.resolve([]); - } - - async getRoomsForParticipants(userIds: UUID[]): Promise { - return Promise.resolve([]); - } - - async log(params: { body: { [p: string]: unknown }; userId: UUID; roomId: UUID; type: string }): Promise { - return Promise.resolve(undefined); - } - - async removeAllGoals(roomId: UUID): Promise { - return Promise.resolve(undefined); - } - - async removeAllMemories(roomId: UUID, tableName: string): Promise { - return Promise.resolve(undefined); - } - - async removeGoal(goalId: UUID): Promise { - return Promise.resolve(undefined); - } - - async removeMemory(memoryId: UUID, tableName: string): Promise { - return Promise.resolve(undefined); - } - - async removeParticipant(userId: UUID, roomId: UUID): Promise { - return Promise.resolve(false); - } - - async removeRoom(roomId: UUID): Promise { - return Promise.resolve(undefined); - } - - async searchMemories(params: { - tableName: string; - agentId: UUID; - roomId: UUID; - embedding: number[]; - match_threshold: number; - match_count: number; - unique: boolean - }): Promise { - return Promise.resolve([]); - } - - async searchMemoriesByEmbedding(embedding: number[], params: { - match_threshold?: number; - count?: number; - roomId?: UUID; - agentId?: UUID; - unique?: boolean; - tableName: string - }): Promise { - return Promise.resolve([]); - } - - async setParticipantUserState(roomId: UUID, userId: UUID, state: "FOLLOWED" | "MUTED" | null): Promise { - return Promise.resolve(undefined); - } - - async updateGoal(goal: Goal): Promise { - return Promise.resolve(undefined); - } - - async updateGoalStatus(params: { goalId: UUID; status: GoalStatus }): Promise { - return Promise.resolve(undefined); - } - - getMemoriesByIds(memoryIds: UUID[], tableName?: string): Promise { - throw new Error("Method not implemented."); - } - - async getCache(params: { - key: string; - agentId: UUID; - }): Promise { - let key = this.buildKey(params.agentId, params.key); - let result = this.cacheM.get(key); - return result; - } - - async setCache(params: { - key: string; - agentId: UUID; - value: string; - }): Promise { - this.cacheM.set(this.buildKey(params.agentId, params.key),params.value) - return true; - } - - async deleteCache(params: { - key: string; - agentId: UUID; - }): Promise { - const key = this.buildKey(params.agentId, params.key); - return this.cacheM.delete(key); - } - - private buildKey(agentId: UUID, key: string): string { - return `${agentId}:${key}`; - } - - private buildQdrantID(id: string): string{ - return v5(id,this.qdrantV5UUIDNamespace); - } -} - -export default QdrantDatabaseAdapter; diff --git a/packages/adapter-qdrant/tsconfig.json b/packages/adapter-qdrant/tsconfig.json deleted file mode 100644 index ea4e73360bf40..0000000000000 --- a/packages/adapter-qdrant/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "strict": true - }, - "include": ["src/**/*.ts"], - "exclude": ["node_modules", "dist"] -} diff --git a/packages/adapter-qdrant/tsup.config.ts b/packages/adapter-qdrant/tsup.config.ts deleted file mode 100644 index 9acebc5ba9ab4..0000000000000 --- a/packages/adapter-qdrant/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "uuid", - // Add other modules you want to externalize - ], -}); diff --git a/packages/adapter-redis/.npmignore b/packages/adapter-redis/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/adapter-redis/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/adapter-redis/__tests__/redis-adapter.test.ts b/packages/adapter-redis/__tests__/redis-adapter.test.ts deleted file mode 100644 index dff80d0837eba..0000000000000 --- a/packages/adapter-redis/__tests__/redis-adapter.test.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { RedisClient } from '../src'; -import { type UUID, elizaLogger } from '@elizaos/core'; -import Redis from 'ioredis'; - -// Mock ioredis -vi.mock('ioredis', () => { - const MockRedis = vi.fn(() => ({ - on: vi.fn(), - get: vi.fn(), - set: vi.fn(), - del: vi.fn(), - quit: vi.fn() - })); - return { default: MockRedis }; -}); - -// Mock elizaLogger -vi.mock('@elizaos/core', async () => { - const actual = await vi.importActual('@elizaos/core'); - return { - ...actual as any, - elizaLogger: { - success: vi.fn(), - error: vi.fn() - } - }; -}); - -describe('RedisClient', () => { - let client: RedisClient; - let mockRedis: any; - - beforeEach(() => { - vi.clearAllMocks(); - client = new RedisClient('redis://localhost:6379'); - // Get the instance created by the constructor - mockRedis = (Redis as unknown as ReturnType).mock.results[0].value; - }); - - afterEach(() => { - vi.clearAllMocks(); - }); - - describe('constructor', () => { - it('should set up event handlers', () => { - expect(mockRedis.on).toHaveBeenCalledWith('connect', expect.any(Function)); - expect(mockRedis.on).toHaveBeenCalledWith('error', expect.any(Function)); - }); - - it('should log success on connect', () => { - const connectHandler = mockRedis.on.mock.calls.find(call => call[0] === 'connect')[1]; - connectHandler(); - expect(elizaLogger.success).toHaveBeenCalledWith('Connected to Redis'); - }); - - it('should log error on error event', () => { - const error = new Error('Redis connection error'); - const errorHandler = mockRedis.on.mock.calls.find(call => call[0] === 'error')[1]; - errorHandler(error); - expect(elizaLogger.error).toHaveBeenCalledWith('Redis error:', error); - }); - }); - - describe('getCache', () => { - const agentId = 'test-agent' as UUID; - const key = 'test-key'; - const expectedRedisKey = `${agentId}:${key}`; - - it('should return cached value when it exists', async () => { - const cachedValue = 'cached-data'; - mockRedis.get.mockResolvedValueOnce(cachedValue); - - const result = await client.getCache({ agentId, key }); - - expect(mockRedis.get).toHaveBeenCalledWith(expectedRedisKey); - expect(result).toBe(cachedValue); - }); - - it('should return undefined when key does not exist', async () => { - mockRedis.get.mockResolvedValueOnce(null); - - const result = await client.getCache({ agentId, key }); - - expect(mockRedis.get).toHaveBeenCalledWith(expectedRedisKey); - expect(result).toBeUndefined(); - }); - - it('should handle errors and return undefined', async () => { - const error = new Error('Redis error'); - mockRedis.get.mockRejectedValueOnce(error); - - const result = await client.getCache({ agentId, key }); - - expect(mockRedis.get).toHaveBeenCalledWith(expectedRedisKey); - expect(elizaLogger.error).toHaveBeenCalledWith('Error getting cache:', error); - expect(result).toBeUndefined(); - }); - }); - - describe('setCache', () => { - const agentId = 'test-agent' as UUID; - const key = 'test-key'; - const value = 'test-value'; - const expectedRedisKey = `${agentId}:${key}`; - - it('should successfully set cache value', async () => { - mockRedis.set.mockResolvedValueOnce('OK'); - - const result = await client.setCache({ agentId, key, value }); - - expect(mockRedis.set).toHaveBeenCalledWith(expectedRedisKey, value); - expect(result).toBe(true); - }); - - it('should handle errors and return false', async () => { - const error = new Error('Redis error'); - mockRedis.set.mockRejectedValueOnce(error); - - const result = await client.setCache({ agentId, key, value }); - - expect(mockRedis.set).toHaveBeenCalledWith(expectedRedisKey, value); - expect(elizaLogger.error).toHaveBeenCalledWith('Error setting cache:', error); - expect(result).toBe(false); - }); - }); - - describe('deleteCache', () => { - const agentId = 'test-agent' as UUID; - const key = 'test-key'; - const expectedRedisKey = `${agentId}:${key}`; - - it('should successfully delete cache when key exists', async () => { - mockRedis.del.mockResolvedValueOnce(1); - - const result = await client.deleteCache({ agentId, key }); - - expect(mockRedis.del).toHaveBeenCalledWith(expectedRedisKey); - expect(result).toBe(true); - }); - - it('should return false when key does not exist', async () => { - mockRedis.del.mockResolvedValueOnce(0); - - const result = await client.deleteCache({ agentId, key }); - - expect(mockRedis.del).toHaveBeenCalledWith(expectedRedisKey); - expect(result).toBe(false); - }); - - it('should handle errors and return false', async () => { - const error = new Error('Redis error'); - mockRedis.del.mockRejectedValueOnce(error); - - const result = await client.deleteCache({ agentId, key }); - - expect(mockRedis.del).toHaveBeenCalledWith(expectedRedisKey); - expect(elizaLogger.error).toHaveBeenCalledWith('Error deleting cache:', error); - expect(result).toBe(false); - }); - }); - - describe('disconnect', () => { - it('should successfully disconnect from Redis', async () => { - mockRedis.quit.mockResolvedValueOnce('OK'); - - await client.disconnect(); - - expect(mockRedis.quit).toHaveBeenCalled(); - expect(elizaLogger.success).toHaveBeenCalledWith('Disconnected from Redis'); - }); - - it('should handle disconnect errors', async () => { - const error = new Error('Redis disconnect error'); - mockRedis.quit.mockRejectedValueOnce(error); - - await client.disconnect(); - - expect(mockRedis.quit).toHaveBeenCalled(); - expect(elizaLogger.error).toHaveBeenCalledWith('Error disconnecting from Redis:', error); - }); - }); -}); diff --git a/packages/adapter-redis/package.json b/packages/adapter-redis/package.json deleted file mode 100644 index 09e003054d75b..0000000000000 --- a/packages/adapter-redis/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "@elizaos/adapter-redis", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "ioredis": "5.4.2" - }, - "devDependencies": { - "@types/ioredis": "^5.0.0", - "tsup": "8.3.5", - "vitest": "^3.0.2" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/adapter-redis/src/index.ts b/packages/adapter-redis/src/index.ts deleted file mode 100644 index c993d8cec1459..0000000000000 --- a/packages/adapter-redis/src/index.ts +++ /dev/null @@ -1,76 +0,0 @@ -import Redis from "ioredis"; -import { type IDatabaseCacheAdapter, type UUID, elizaLogger } from "@elizaos/core"; - -export class RedisClient implements IDatabaseCacheAdapter { - private client: Redis; - - constructor(redisUrl: string) { - this.client = new Redis(redisUrl); - - this.client.on("connect", () => { - elizaLogger.success("Connected to Redis"); - }); - - this.client.on("error", (err) => { - elizaLogger.error("Redis error:", err); - }); - } - - async getCache(params: { - agentId: UUID; - key: string; - }): Promise { - try { - const redisKey = this.buildKey(params.agentId, params.key); - const value = await this.client.get(redisKey); - return value || undefined; - } catch (err) { - elizaLogger.error("Error getting cache:", err); - return undefined; - } - } - - async setCache(params: { - agentId: UUID; - key: string; - value: string; - }): Promise { - try { - const redisKey = this.buildKey(params.agentId, params.key); - await this.client.set(redisKey, params.value); - return true; - } catch (err) { - elizaLogger.error("Error setting cache:", err); - return false; - } - } - - async deleteCache(params: { - agentId: UUID; - key: string; - }): Promise { - try { - const redisKey = this.buildKey(params.agentId, params.key); - const result = await this.client.del(redisKey); - return result > 0; - } catch (err) { - elizaLogger.error("Error deleting cache:", err); - return false; - } - } - - async disconnect(): Promise { - try { - await this.client.quit(); - elizaLogger.success("Disconnected from Redis"); - } catch (err) { - elizaLogger.error("Error disconnecting from Redis:", err); - } - } - - private buildKey(agentId: UUID, key: string): string { - return `${agentId}:${key}`; // Constructs a unique key based on agentId and key - } -} - -export default RedisClient; diff --git a/packages/adapter-redis/tsconfig.json b/packages/adapter-redis/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/adapter-redis/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/adapter-redis/tsup.config.ts b/packages/adapter-redis/tsup.config.ts deleted file mode 100644 index 9acebc5ba9ab4..0000000000000 --- a/packages/adapter-redis/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "uuid", - // Add other modules you want to externalize - ], -}); diff --git a/packages/adapter-redis/vitest.config.ts b/packages/adapter-redis/vitest.config.ts deleted file mode 100644 index adbf725538008..0000000000000 --- a/packages/adapter-redis/vitest.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - }, -}); diff --git a/packages/adapter-sqlite/README.md b/packages/adapter-sqlite/README.md new file mode 100644 index 0000000000000..84cbdfa29d9e2 --- /dev/null +++ b/packages/adapter-sqlite/README.md @@ -0,0 +1,177 @@ +# SQLite Adapter for ElizaOS + +A lightweight SQLite adapter for ElizaOS that provides persistent storage with vector embedding support, relationship management, and caching capabilities. Designed for embedded applications and local development. + +## Features + +- Vector embedding storage with BLOB support +- JSON storage with validation +- Comprehensive schema for agents, rooms, and participants +- Relationship management system +- Memory and goal tracking +- Built-in caching system +- Foreign key constraints +- Automatic timestamp management + +## Installation + +```bash +pnpm add @elizaos/adapter-sqlite +``` + +## Database Schema + +### Core Tables + +#### Accounts +```sql +- id (TEXT PRIMARY KEY) +- name (TEXT) +- username (TEXT) +- email (TEXT) +- avatarUrl (TEXT) +- details (JSON) +``` + +#### Memories +```sql +- id (TEXT PRIMARY KEY) +- type (TEXT) +- content (TEXT) +- embedding (BLOB) +- userId (TEXT FK) +- roomId (TEXT FK) +- agentId (TEXT FK) +``` + +#### Goals +```sql +- id (TEXT PRIMARY KEY) +- name (TEXT) +- status (TEXT) +- description (TEXT) +- objectives (JSON) +``` + +### Relationship Management +```sql +- participants (user-room relationships) +- relationships (user-user connections) +- rooms (conversation spaces) +``` + +## Usage + +### Basic Setup + +```typescript +import { SqliteDatabaseAdapter } from '@elizaos/adapter-sqlite'; + +const adapter = new SqliteDatabaseAdapter('path/to/database.db'); +await adapter.init(); +``` + +### Room Management + +```typescript +// Get room by ID +const room = await adapter.getRoom(roomId); + +// Get participants in a room +const participants = await adapter.getParticipantsForRoom(roomId); +``` + +### Participant Management + +```typescript +// Get participant state +const state = await adapter.getParticipantUserState(roomId, userId); + +// Set participant state +await adapter.setParticipantUserState(roomId, userId, 'FOLLOWED'); + +// Get all participants for an account +const participants = await adapter.getParticipantsForAccount(userId); +``` + +### Memory Operations + +```typescript +// Store a memory with embedding +await adapter.createMemory({ + type: 'conversation', + content: 'Memory content', + embedding: new Float32Array([...]), + userId, + roomId +}); +``` + +### Goal Tracking + +```typescript +// Create a new goal +await adapter.createGoal({ + name: 'Complete task', + status: 'IN_PROGRESS', + objectives: ['Research', 'Implementation'], + userId, + roomId +}); +``` + +## Special Features + +### JSON Validation +- Automatic JSON validation for fields like `details`, `objectives`, and `value` +- Enforced through SQLite CHECK constraints + +### Vector Embeddings +- Optimized BLOB storage for embeddings +- Compatible with various embedding models +- Supports similarity search operations + +### Timestamp Management +- Automatic `createdAt` timestamps +- Consistent datetime handling + +## Performance Considerations + +1. Uses prepared statements for efficient queries +2. Implements proper indexing on frequently accessed columns +3. Enforces data integrity through foreign key constraints +4. Optimized blob storage for vector embeddings +5. JSON validation at the database level + +## Development and Testing + +```bash +# Run tests +pnpm test + +# Run tests in watch mode +pnpm test:watch +``` + +### Test Coverage +- Room management operations +- Participant state handling +- Account relationships +- Database initialization +- Connection management + +## Best Practices + +1. Always initialize the adapter before use +2. Properly close the connection when done +3. Use transactions for multiple related operations +4. Handle potential JSON validation errors +5. Consider embedding size limitations +6. Implement proper error handling + +## Requirements + +- Node.js 23.3.0+ +- SQLite 3.35.0+ (for JSON support) +- Sufficient disk space for vector storage +- ElizaOS core package diff --git a/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts b/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts index b95f8f944888b..ada4c77a86b00 100644 --- a/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts +++ b/packages/adapter-sqlite/__tests__/sqlite-adapter.test.ts @@ -1,8 +1,8 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { type UUID } from '@elizaos/core'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; import { SqliteDatabaseAdapter } from '../src'; -import { type UUID, elizaLogger } from '@elizaos/core'; -import type Database from 'better-sqlite3'; import { load } from '../src/sqlite_vec'; +import { Database } from 'better-sqlite3'; // Mock the elizaLogger vi.mock('@elizaos/core', async () => { diff --git a/packages/adapter-sqlite/package.json b/packages/adapter-sqlite/package.json index 3051dd9d18d45..f15548fbf9e9b 100644 --- a/packages/adapter-sqlite/package.json +++ b/packages/adapter-sqlite/package.json @@ -1,6 +1,6 @@ { - "name": "@elizaos/adapter-sqlite", - "version": "0.25.6-alpha.1", + "name": "@elizaos-plugins/adapter-sqlite", + "version": "0.25.8", "type": "module", "main": "dist/index.js", "module": "dist/index.js", @@ -22,9 +22,11 @@ "@elizaos/core": "workspace:*", "@types/better-sqlite3": "7.6.12", "better-sqlite3": "11.8.1", - "sqlite-vec": "0.1.6" + "sqlite-vec": "0.1.6", + "uuid": "11.0.5" }, "devDependencies": { + "@types/uuid": "10.0.0", "tsup": "8.3.5", "vitest": "^3.0.2", "@vitest/coverage-v8": "^3.0.2" @@ -37,5 +39,8 @@ }, "peerDependencies": { "whatwg-url": "7.1.0" + }, + "publishConfig": { + "access": "public" } } diff --git a/packages/adapter-sqlite/src/index.ts b/packages/adapter-sqlite/src/index.ts index fb56d29d6c69e..2dadf4558a2c5 100644 --- a/packages/adapter-sqlite/src/index.ts +++ b/packages/adapter-sqlite/src/index.ts @@ -1,3 +1,6 @@ +import path from "path"; +import fs from "fs"; + export * from "./sqliteTables.ts"; export * from "./sqlite_vec.ts"; @@ -17,14 +20,19 @@ import type { UUID, RAGKnowledgeItem, ChunkRow, + Adapter, + IAgentRuntime, + Plugin, } from "@elizaos/core"; -import type { Database } from "better-sqlite3"; +import type { Database as BetterSqlite3Database } from "better-sqlite3"; import { v4 } from "uuid"; import { load } from "./sqlite_vec.ts"; import { sqliteTables } from "./sqliteTables.ts"; +import Database from "better-sqlite3"; + export class SqliteDatabaseAdapter - extends DatabaseAdapter + extends DatabaseAdapter implements IDatabaseCacheAdapter { async getRoom(roomId: UUID): Promise { @@ -75,7 +83,7 @@ export class SqliteDatabaseAdapter stmt.run(state, roomId, userId); } - constructor(db: Database) { + constructor(db: BetterSqlite3Database) { super(); this.db = db; load(db); @@ -1084,3 +1092,37 @@ export class SqliteDatabaseAdapter } } } + +const sqliteDatabaseAdapter: Adapter = { + init: (runtime: IAgentRuntime) => { + const dataDir = path.join(process.cwd(), "data"); + + if (!fs.existsSync(dataDir)) { + fs.mkdirSync(dataDir, { recursive: true }); + } + + const filePath = runtime.getSetting("SQLITE_FILE") ?? path.resolve(dataDir, "db.sqlite"); + elizaLogger.info(`Initializing SQLite database at ${filePath}...`); + const db = new SqliteDatabaseAdapter(new Database(filePath)); + + // Test the connection + db.init() + .then(() => { + elizaLogger.success( + "Successfully connected to SQLite database" + ); + }) + .catch((error) => { + elizaLogger.error("Failed to connect to SQLite:", error); + }); + + return db; + }, +}; + +const sqlitePlugin: Plugin = { + name: "sqlite", + description: "SQLite database adapter plugin", + adapters: [sqliteDatabaseAdapter], +}; +export default sqlitePlugin; \ No newline at end of file diff --git a/packages/adapter-sqljs/.npmignore b/packages/adapter-sqljs/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/adapter-sqljs/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/adapter-sqljs/package.json b/packages/adapter-sqljs/package.json deleted file mode 100644 index 11e0c27de0f51..0000000000000 --- a/packages/adapter-sqljs/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@elizaos/adapter-sqljs", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@types/sql.js": "1.4.9", - "sql.js": "1.12.0", - "uuid": "11.0.3" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/adapter-sqljs/src/index.ts b/packages/adapter-sqljs/src/index.ts deleted file mode 100644 index 6aa6990fbbf1f..0000000000000 --- a/packages/adapter-sqljs/src/index.ts +++ /dev/null @@ -1,1048 +0,0 @@ -export * from "./sqliteTables.ts"; -export * from "./types.ts"; - -import { - type Account, - type Actor, - DatabaseAdapter, - type GoalStatus, - type IDatabaseCacheAdapter, - type Participant, - type Goal, - type Memory, - type Relationship, - type UUID, - type RAGKnowledgeItem, - elizaLogger, -} from "@elizaos/core"; -import { v4 } from "uuid"; -import { sqliteTables } from "./sqliteTables.ts"; -import type { Database } from "./types.ts"; - -export class SqlJsDatabaseAdapter - extends DatabaseAdapter - implements IDatabaseCacheAdapter -{ - constructor(db: Database) { - super(); - this.db = db; - } - - async init() { - this.db.exec(sqliteTables); - } - - async close() { - this.db.close(); - } - - async getRoom(roomId: UUID): Promise { - const sql = "SELECT id FROM rooms WHERE id = ?"; - const stmt = this.db.prepare(sql); - stmt.bind([roomId]); - const room = stmt.getAsObject() as { id: string } | undefined; - stmt.free(); - return room ? (room.id as UUID) : null; - } - - async getParticipantsForAccount(userId: UUID): Promise { - const sql = ` - SELECT p.id, p.userId, p.roomId, p.last_message_read - FROM participants p - WHERE p.userId = ? - `; - const stmt = this.db.prepare(sql); - stmt.bind([userId]); - const participants: Participant[] = []; - while (stmt.step()) { - const participant = stmt.getAsObject() as unknown as Participant; - participants.push(participant); - } - stmt.free(); - return participants; - } - - async getParticipantUserState( - roomId: UUID, - userId: UUID - ): Promise<"FOLLOWED" | "MUTED" | null> { - const sql = - "SELECT userState FROM participants WHERE roomId = ? AND userId = ?"; - const stmt = this.db.prepare(sql); - stmt.bind([roomId, userId]); - const result = stmt.getAsObject() as { - userState: "FOLLOWED" | "MUTED" | null; - }; - stmt.free(); - return result.userState ?? null; - } - - async getMemoriesByRoomIds(params: { - agentId: UUID; - roomIds: UUID[]; - tableName: string; - limit?: number; - }): Promise { - const placeholders = params.roomIds.map(() => "?").join(", "); - let sql = `SELECT * FROM memories WHERE 'type' = ? AND agentId = ? AND roomId IN (${placeholders})`; - - const queryParams = [ - params.tableName, - params.agentId, - ...params.roomIds, - ]; - - // Add ordering and limit - sql += ` ORDER BY createdAt DESC`; - if (params.limit) { - sql += ` LIMIT ?`; - queryParams.push(params.limit.toString()); - } - - const stmt = this.db.prepare(sql); - - elizaLogger.log({ queryParams }); - stmt.bind(queryParams); - elizaLogger.log({ queryParams }); - - const memories: Memory[] = []; - while (stmt.step()) { - const memory = stmt.getAsObject() as unknown as Memory; - memories.push({ - ...memory, - content: JSON.parse(memory.content as unknown as string), - }); - } - stmt.free(); - return memories; - } - - async setParticipantUserState( - roomId: UUID, - userId: UUID, - state: "FOLLOWED" | "MUTED" | null - ): Promise { - const sql = - "UPDATE participants SET userState = ? WHERE roomId = ? AND userId = ?"; - const stmt = this.db.prepare(sql); - stmt.bind([state, roomId, userId]); - stmt.step(); - stmt.free(); - } - - async getParticipantsForRoom(roomId: UUID): Promise { - const sql = "SELECT userId FROM participants WHERE roomId = ?"; - const stmt = this.db.prepare(sql); - stmt.bind([roomId]); - const userIds: UUID[] = []; - while (stmt.step()) { - const row = stmt.getAsObject() as { userId: string }; - userIds.push(row.userId as UUID); - } - stmt.free(); - return userIds; - } - - async getAccountById(userId: UUID): Promise { - const sql = "SELECT * FROM accounts WHERE id = ?"; - const stmt = this.db.prepare(sql); - stmt.bind([userId]); - const account = stmt.getAsObject() as unknown as Account | undefined; - - if (account && typeof account.details === "string") { - account.details = JSON.parse(account.details); - } - - stmt.free(); - return account || null; - } - - async createAccount(account: Account): Promise { - try { - const sql = ` - INSERT INTO accounts (id, name, username, email, avatarUrl, details) - VALUES (?, ?, ?, ?, ?, ?) - `; - const stmt = this.db.prepare(sql); - stmt.run([ - account.id ?? v4(), - account.name, - account.username || "", - account.email || "", - account.avatarUrl || "", - JSON.stringify(account.details), - ]); - stmt.free(); - return true; - } catch (error) { - elizaLogger.error("Error creating account", error); - return false; - } - } - - async getActorById(params: { roomId: UUID }): Promise { - const sql = ` - SELECT a.id, a.name, a.username, a.details - FROM participants p - LEFT JOIN accounts a ON p.userId = a.id - WHERE p.roomId = ? - `; - const stmt = this.db.prepare(sql); - stmt.bind([params.roomId]); - const rows: Actor[] = []; - while (stmt.step()) { - const row = stmt.getAsObject() as unknown as Actor; - rows.push({ - ...row, - details: - typeof row.details === "string" - ? JSON.parse(row.details) - : row.details, - }); - } - stmt.free(); - return rows; - } - - async getActorDetails(params: { roomId: UUID }): Promise { - const sql = ` - SELECT a.id, a.name, a.username, a.details - FROM participants p - LEFT JOIN accounts a ON p.userId = a.id - WHERE p.roomId = ? - `; - const stmt = this.db.prepare(sql); - stmt.bind([params.roomId]); - const rows: Actor[] = []; - while (stmt.step()) { - const row = stmt.getAsObject() as unknown as Actor; - rows.push({ - ...row, - details: - typeof row.details === "string" - ? JSON.parse(row.details) - : row.details, - }); - } - stmt.free(); - return rows; - } - - async getMemoryById(id: UUID): Promise { - const sql = "SELECT * FROM memories WHERE id = ?"; - const stmt = this.db.prepare(sql); - stmt.bind([id]); - const memory = stmt.getAsObject() as unknown as Memory | undefined; - stmt.free(); - return memory || null; - } - - async getMemoriesByIds( - memoryIds: UUID[], - tableName?: string - ): Promise { - if (memoryIds.length === 0) return []; - const placeholders = memoryIds.map(() => "?").join(","); - let sql = `SELECT * FROM memories WHERE id IN (${placeholders})`; - const queryParams: any[] = [...memoryIds]; - - if (tableName) { - sql += ` AND type = ?`; - queryParams.push(tableName); - } - - const stmt = this.db.prepare(sql); - stmt.bind(queryParams); - - const memories: Memory[] = []; - while (stmt.step()) { - const memory = stmt.getAsObject() as unknown as Memory; - memories.push({ - ...memory, - content: JSON.parse(memory.content as unknown as string), - }); - } - stmt.free(); - return memories; - } - - async createMemory(memory: Memory, tableName: string): Promise { - let isUnique = true; - if (memory.embedding) { - // Check if a similar memory already exists - const similarMemories = await this.searchMemoriesByEmbedding( - memory.embedding, - { - agentId: memory.agentId, - tableName, - roomId: memory.roomId, - match_threshold: 0.95, // 5% similarity threshold - count: 1, - } - ); - - isUnique = similarMemories.length === 0; - } - - // Insert the memory with the appropriate 'unique' value - const sql = `INSERT INTO memories (id, type, content, embedding, userId, roomId, agentId, \`unique\`, createdAt) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`; - const stmt = this.db.prepare(sql); - - const createdAt = memory.createdAt ?? Date.now(); - - stmt.run([ - memory.id ?? v4(), - tableName, - JSON.stringify(memory.content), - JSON.stringify(memory.embedding), - memory.userId, - memory.roomId, - memory.agentId, - isUnique ? 1 : 0, - createdAt, - ]); - stmt.free(); - } - - async searchMemories(params: { - tableName: string; - agentId: UUID; - roomId: UUID; - embedding: number[]; - match_threshold: number; - match_count: number; - unique: boolean; - }): Promise { - let sql = - ` - SELECT *` + - // TODO: Uncomment when we compile sql.js with vss - // `, (1 - vss_distance_l2(embedding, ?)) AS similarity` + - ` FROM memories - WHERE type = ? AND agentId = ? - AND roomId = ?`; - - if (params.unique) { - sql += " AND `unique` = 1"; - } - // TODO: Uncomment when we compile sql.js with vss - // sql += ` ORDER BY similarity DESC LIMIT ?`; - const stmt = this.db.prepare(sql); - stmt.bind([ - // JSON.stringify(params.embedding), - params.tableName, - params.agentId, - params.roomId, - // params.match_count, - ]); - const memories: (Memory & { similarity: number })[] = []; - while (stmt.step()) { - const memory = stmt.getAsObject() as unknown as Memory & { - similarity: number; - }; - memories.push({ - ...memory, - content: JSON.parse(memory.content as unknown as string), - }); - } - stmt.free(); - return memories; - } - - async searchMemoriesByEmbedding( - _embedding: number[], - params: { - agentId: UUID; - match_threshold?: number; - count?: number; - roomId?: UUID; - unique?: boolean; - tableName: string; - } - ): Promise { - let sql = - `SELECT *` + - // TODO: Uncomment when we compile sql.js with vss - // `, (1 - vss_distance_l2(embedding, ?)) AS similarity`+ - ` FROM memories - WHERE type = ? AND agentId = ?`; - - if (params.unique) { - sql += " AND `unique` = 1"; - } - if (params.roomId) { - sql += " AND roomId = ?"; - } - // TODO: Test this - if (params.agentId) { - sql += " AND userId = ?"; - } - // TODO: Uncomment when we compile sql.js with vss - // sql += ` ORDER BY similarity DESC`; - - if (params.count) { - sql += " LIMIT ?"; - } - - const stmt = this.db.prepare(sql); - const bindings = [ - // JSON.stringify(embedding), - params.tableName, - params.agentId, - ]; - if (params.roomId) { - bindings.push(params.roomId); - } - if (params.count) { - bindings.push(params.count.toString()); - } - - stmt.bind(bindings); - const memories: (Memory & { similarity: number })[] = []; - while (stmt.step()) { - const memory = stmt.getAsObject() as unknown as Memory & { - similarity: number; - }; - memories.push({ - ...memory, - content: JSON.parse(memory.content as unknown as string), - }); - } - stmt.free(); - return memories; - } - - async getCachedEmbeddings(opts: { - query_table_name: string; - query_threshold: number; - query_input: string; - query_field_name: string; - query_field_sub_name: string; - query_match_count: number; - }): Promise< - { - embedding: number[]; - levenshtein_score: number; - }[] - > { - const sql = - ` - SELECT * - FROM memories - WHERE type = ?` + - // `AND vss_search(${opts.query_field_name}, ?) - // ORDER BY vss_search(${opts.query_field_name}, ?) DESC` + - ` LIMIT ? - `; - const stmt = this.db.prepare(sql); - stmt.bind([ - opts.query_table_name, - // opts.query_input, - // opts.query_input, - opts.query_match_count, - ]); - const memories: Memory[] = []; - while (stmt.step()) { - const memory = stmt.getAsObject() as unknown as Memory; - memories.push(memory); - } - stmt.free(); - - return memories.map((memory) => ({ - ...memory, - createdAt: memory.createdAt ?? Date.now(), - embedding: JSON.parse(memory.embedding as unknown as string), - levenshtein_score: 0, - })); - } - - async updateGoalStatus(params: { - goalId: UUID; - status: GoalStatus; - }): Promise { - const sql = "UPDATE goals SET status = ? WHERE id = ?"; - const stmt = this.db.prepare(sql); - stmt.run([params.status, params.goalId]); - stmt.free(); - } - - async log(params: { - body: { [key: string]: unknown }; - userId: UUID; - roomId: UUID; - type: string; - }): Promise { - const sql = - "INSERT INTO logs (body, userId, roomId, type) VALUES (?, ?, ?, ?)"; - const stmt = this.db.prepare(sql); - stmt.run([ - JSON.stringify(params.body), - params.userId, - params.roomId, - params.type, - ]); - stmt.free(); - } - - async getMemories(params: { - roomId: UUID; - count?: number; - unique?: boolean; - tableName: string; - agentId?: UUID; - start?: number; - end?: number; - }): Promise { - if (!params.tableName) { - throw new Error("tableName is required"); - } - if (!params.roomId) { - throw new Error("roomId is required"); - } - let sql = `SELECT * FROM memories WHERE type = ? AND roomId = ?`; - - if (params.start) { - sql += ` AND createdAt >= ?`; - } - - if (params.end) { - sql += ` AND createdAt <= ?`; - } - - if (params.unique) { - sql += " AND `unique` = 1"; - } - - if (params.agentId) { - sql += " AND agentId = ?"; - } - - sql += " ORDER BY createdAt DESC"; - - if (params.count) { - sql += " LIMIT ?"; - } - - const stmt = this.db.prepare(sql); - stmt.bind([ - params.tableName, - params.roomId, - ...(params.start ? [params.start] : []), - ...(params.end ? [params.end] : []), - ...(params.agentId ? [params.agentId] : []), - ...(params.count ? [params.count] : []), - ]); - const memories: Memory[] = []; - while (stmt.step()) { - const memory = stmt.getAsObject() as unknown as Memory; - memories.push({ - ...memory, - content: JSON.parse(memory.content as unknown as string), - }); - } - stmt.free(); - return memories; - } - - async removeMemory(memoryId: UUID, tableName: string): Promise { - const sql = `DELETE FROM memories WHERE type = ? AND id = ?`; - const stmt = this.db.prepare(sql); - stmt.run([tableName, memoryId]); - stmt.free(); - } - - async removeAllMemories(roomId: UUID, tableName: string): Promise { - const sql = `DELETE FROM memories WHERE type = ? AND roomId = ?`; - const stmt = this.db.prepare(sql); - stmt.run([tableName, roomId]); - stmt.free(); - } - - async countMemories( - roomId: UUID, - unique = true, - tableName = "" - ): Promise { - if (!tableName) { - throw new Error("tableName is required"); - } - - let sql = `SELECT COUNT(*) as count FROM memories WHERE type = ? AND roomId = ?`; - if (unique) { - sql += " AND `unique` = 1"; - } - - const stmt = this.db.prepare(sql); - stmt.bind([tableName, roomId]); - - let count = 0; - if (stmt.step()) { - const result = stmt.getAsObject() as { count: number }; - count = result.count; - } - - stmt.free(); - return count; - } - - async getGoals(params: { - roomId: UUID; - userId?: UUID | null; - onlyInProgress?: boolean; - count?: number; - }): Promise { - let sql = "SELECT * FROM goals WHERE roomId = ?"; - const bindings: (string | number)[] = [params.roomId]; - - if (params.userId) { - sql += " AND userId = ?"; - bindings.push(params.userId); - } - - if (params.onlyInProgress) { - sql += " AND status = 'IN_PROGRESS'"; - } - - if (params.count) { - sql += " LIMIT ?"; - bindings.push(params.count.toString()); - } - - const stmt = this.db.prepare(sql); - stmt.bind(bindings); - const goals: Goal[] = []; - while (stmt.step()) { - const goal = stmt.getAsObject() as unknown as Goal; - goals.push({ - ...goal, - objectives: - typeof goal.objectives === "string" - ? JSON.parse(goal.objectives) - : goal.objectives, - }); - } - stmt.free(); - return goals; - } - - async updateGoal(goal: Goal): Promise { - const sql = - "UPDATE goals SET name = ?, status = ?, objectives = ? WHERE id = ?"; - const stmt = this.db.prepare(sql); - stmt.run([ - goal.name, - goal.status, - JSON.stringify(goal.objectives), - goal.id as string, - ]); - stmt.free(); - } - - async createGoal(goal: Goal): Promise { - const sql = - "INSERT INTO goals (id, roomId, userId, name, status, objectives) VALUES (?, ?, ?, ?, ?, ?)"; - const stmt = this.db.prepare(sql); - stmt.run([ - goal.id ?? v4(), - goal.roomId, - goal.userId, - goal.name, - goal.status, - JSON.stringify(goal.objectives), - ]); - stmt.free(); - } - - async removeGoal(goalId: UUID): Promise { - const sql = "DELETE FROM goals WHERE id = ?"; - const stmt = this.db.prepare(sql); - stmt.run([goalId]); - stmt.free(); - } - - async removeAllGoals(roomId: UUID): Promise { - const sql = "DELETE FROM goals WHERE roomId = ?"; - const stmt = this.db.prepare(sql); - stmt.run([roomId]); - stmt.free(); - } - - async createRoom(roomId?: UUID): Promise { - roomId = roomId || (v4() as UUID); - try { - const sql = "INSERT INTO rooms (id) VALUES (?)"; - const stmt = this.db.prepare(sql); - stmt.run([roomId ?? (v4() as UUID)]); - stmt.free(); - } catch (error) { - elizaLogger.error("Error creating room", error); - } - return roomId as UUID; - } - - async removeRoom(roomId: UUID): Promise { - const sql = "DELETE FROM rooms WHERE id = ?"; - const stmt = this.db.prepare(sql); - stmt.run([roomId]); - stmt.free(); - } - - async getRoomsForParticipant(userId: UUID): Promise { - const sql = "SELECT roomId FROM participants WHERE userId = ?"; - const stmt = this.db.prepare(sql); - stmt.bind([userId]); - const rows: { roomId: string }[] = []; - while (stmt.step()) { - const row = stmt.getAsObject() as unknown as { roomId: string }; - rows.push(row); - } - stmt.free(); - return rows.map((row) => row.roomId as UUID); - } - - async getRoomsForParticipants(userIds: UUID[]): Promise { - // Assuming userIds is an array of UUID strings, prepare a list of placeholders - const placeholders = userIds.map(() => "?").join(", "); - // Construct the SQL query with the correct number of placeholders - const sql = `SELECT roomId FROM participants WHERE userId IN (${placeholders})`; - const stmt = this.db.prepare(sql); - // Execute the query with the userIds array spread into arguments - stmt.bind(userIds); - const rows: { roomId: string }[] = []; - while (stmt.step()) { - const row = stmt.getAsObject() as unknown as { roomId: string }; - rows.push(row); - } - stmt.free(); - // Map and return the roomId values as UUIDs - return rows.map((row) => row.roomId as UUID); - } - - async addParticipant(userId: UUID, roomId: UUID): Promise { - try { - const sql = - "INSERT INTO participants (id, userId, roomId) VALUES (?, ?, ?)"; - const stmt = this.db.prepare(sql); - stmt.run([v4(), userId, roomId]); - stmt.free(); - return true; - } catch (error) { - elizaLogger.error("Error adding participant", error); - return false; - } - } - - async removeParticipant(userId: UUID, roomId: UUID): Promise { - try { - const sql = - "DELETE FROM participants WHERE userId = ? AND roomId = ?"; - const stmt = this.db.prepare(sql); - stmt.run([userId, roomId]); - stmt.free(); - return true; - } catch (error) { - elizaLogger.error("Error removing participant", error); - return false; - } - } - - async createRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - if (!params.userA || !params.userB) { - throw new Error("userA and userB are required"); - } - const sql = - "INSERT INTO relationships (id, userA, userB, userId) VALUES (?, ?, ?, ?)"; - const stmt = this.db.prepare(sql); - stmt.run([v4(), params.userA, params.userB, params.userA]); - stmt.free(); - return true; - } - - async getRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - let relationship: Relationship | null = null; - try { - const sql = - "SELECT * FROM relationships WHERE (userA = ? AND userB = ?) OR (userA = ? AND userB = ?)"; - const stmt = this.db.prepare(sql); - stmt.bind([params.userA, params.userB, params.userB, params.userA]); - - if (stmt.step()) { - relationship = stmt.getAsObject() as unknown as Relationship; - } - stmt.free(); - } catch (error) { - elizaLogger.error("Error fetching relationship", error); - } - return relationship; - } - - async getRelationships(params: { userId: UUID }): Promise { - const sql = - "SELECT * FROM relationships WHERE (userA = ? OR userB = ?)"; - const stmt = this.db.prepare(sql); - stmt.bind([params.userId, params.userId]); - const relationships: Relationship[] = []; - while (stmt.step()) { - const relationship = stmt.getAsObject() as unknown as Relationship; - relationships.push(relationship); - } - stmt.free(); - return relationships; - } - - async getCache(params: { - key: string; - agentId: UUID; - }): Promise { - const sql = "SELECT value FROM cache WHERE (key = ? AND agentId = ?)"; - const stmt = this.db.prepare(sql); - - stmt.bind([params.key, params.agentId]); - - let cached: { value: string } | undefined = undefined; - if (stmt.step()) { - cached = stmt.getAsObject() as unknown as { value: string }; - } - stmt.free(); - - return cached?.value ?? undefined; - } - - async setCache(params: { - key: string; - agentId: UUID; - value: string; - }): Promise { - const sql = - "INSERT OR REPLACE INTO cache (key, agentId, value, createdAt) VALUES (?, ?, ?, CURRENT_TIMESTAMP)"; - const stmt = this.db.prepare(sql); - - stmt.run([params.key, params.agentId, params.value]); - stmt.free(); - - return true; - } - - async deleteCache(params: { - key: string; - agentId: UUID; - }): Promise { - try { - const sql = "DELETE FROM cache WHERE key = ? AND agentId = ?"; - const stmt = this.db.prepare(sql); - stmt.run([params.key, params.agentId]); - stmt.free(); - return true; - } catch (error) { - elizaLogger.error("Error removing cache", error); - return false; - } - } - - async getKnowledge(params: { - id?: UUID; - agentId: UUID; - limit?: number; - query?: string; - }): Promise { - let sql = `SELECT * FROM knowledge WHERE ("agentId" = ? OR "isShared" = 1)`; - const queryParams: any[] = [params.agentId]; - - if (params.id) { - sql += ` AND id = ?`; - queryParams.push(params.id); - } - - if (params.limit) { - sql += ` LIMIT ?`; - queryParams.push(params.limit); - } - - const stmt = this.db.prepare(sql); - stmt.bind(queryParams); - const results: RAGKnowledgeItem[] = []; - - while (stmt.step()) { - const row = stmt.getAsObject() as any; - results.push({ - id: row.id, - agentId: row.agentId, - content: JSON.parse(row.content), - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, // Convert Uint8Array back to Float32Array - createdAt: row.createdAt, - }); - } - stmt.free(); - return results; - } - - async searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - match_count: number; - searchText?: string; - }): Promise { - const cacheKey = `embedding_${params.agentId}_${params.searchText}`; - const cachedResult = await this.getCache({ - key: cacheKey, - agentId: params.agentId, - }); - - if (cachedResult) { - return JSON.parse(cachedResult); - } - - const sql = ` - WITH vector_scores AS ( - SELECT id, - 1 / (1 + vec_distance_L2(embedding, ?)) as vector_score - FROM knowledge - WHERE ("agentId" IS NULL AND "isShared" = 1) OR "agentId" = ? - AND embedding IS NOT NULL - ), - keyword_matches AS ( - SELECT id, - CASE - WHEN json_extract(content, '$.text') LIKE ? THEN 3.0 - ELSE 1.0 - END * - CASE - WHEN json_extract(content, '$.metadata.isChunk') = 1 THEN 1.5 - WHEN json_extract(content, '$.metadata.isMain') = 1 THEN 1.2 - ELSE 1.0 - END as keyword_score - FROM knowledge - WHERE ("agentId" IS NULL AND "isShared" = 1) OR "agentId" = ? - ) - SELECT k.*, - v.vector_score, - kw.keyword_score, - (v.vector_score * kw.keyword_score) as combined_score - FROM knowledge k - JOIN vector_scores v ON k.id = v.id - LEFT JOIN keyword_matches kw ON k.id = kw.id - WHERE (k.agentId IS NULL AND k.isShared = 1) OR k.agentId = ? - AND ( - v.vector_score >= ? -- Using match_threshold parameter - OR (kw.keyword_score > 1.0 AND v.vector_score >= 0.3) - ) - ORDER BY combined_score DESC - LIMIT ? - `; - - const stmt = this.db.prepare(sql); - stmt.bind([ - new Uint8Array(params.embedding.buffer), - params.agentId, - `%${params.searchText || ""}%`, - params.agentId, - params.agentId, - params.match_threshold, - params.match_count, - ]); - - const results: RAGKnowledgeItem[] = []; - while (stmt.step()) { - const row = stmt.getAsObject() as any; - results.push({ - id: row.id, - agentId: row.agentId, - content: JSON.parse(row.content), - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, - createdAt: row.createdAt, - similarity: row.keyword_score, - }); - } - stmt.free(); - - await this.setCache({ - key: cacheKey, - agentId: params.agentId, - value: JSON.stringify(results), - }); - - return results; - } - - async createKnowledge(knowledge: RAGKnowledgeItem): Promise { - try { - const sql = ` - INSERT INTO knowledge ( - id, "agentId", content, embedding, "createdAt", - "isMain", "originalId", "chunkIndex", "isShared" - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) - `; - - const stmt = this.db.prepare(sql); - const metadata = knowledge.content.metadata || {}; - - stmt.run([ - knowledge.id, - metadata.isShared ? null : knowledge.agentId, - JSON.stringify(knowledge.content), - knowledge.embedding - ? new Uint8Array(knowledge.embedding.buffer) - : null, - knowledge.createdAt || Date.now(), - metadata.isMain ? 1 : 0, - metadata.originalId || null, - metadata.chunkIndex || null, - metadata.isShared ? 1 : 0, - ]); - stmt.free(); - } catch (error: any) { - const isShared = knowledge.content.metadata?.isShared; - const isPrimaryKeyError = - error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY"; - - if (isShared && isPrimaryKeyError) { - elizaLogger.info( - `Shared knowledge ${knowledge.id} already exists, skipping` - ); - return; - } else if ( - !isShared && - !error.message?.includes("SQLITE_CONSTRAINT_PRIMARYKEY") - ) { - elizaLogger.error(`Error creating knowledge ${knowledge.id}:`, { - error, - embeddingLength: knowledge.embedding?.length, - content: knowledge.content, - }); - throw error; - } - - elizaLogger.debug( - `Knowledge ${knowledge.id} already exists, skipping` - ); - } - } - - async removeKnowledge(id: UUID): Promise { - const sql = `DELETE FROM knowledge WHERE id = ?`; - const stmt = this.db.prepare(sql); - stmt.run([id]); - stmt.free(); - } - - async clearKnowledge(agentId: UUID, shared?: boolean): Promise { - const sql = shared - ? `DELETE FROM knowledge WHERE ("agentId" = ? OR "isShared" = 1)` - : `DELETE FROM knowledge WHERE "agentId" = ?`; - - const stmt = this.db.prepare(sql); - stmt.run([agentId]); - stmt.free(); - } -} diff --git a/packages/adapter-sqljs/src/sqliteTables.ts b/packages/adapter-sqljs/src/sqliteTables.ts deleted file mode 100644 index 87fc26743faad..0000000000000 --- a/packages/adapter-sqljs/src/sqliteTables.ts +++ /dev/null @@ -1,130 +0,0 @@ -export const sqliteTables = ` -PRAGMA foreign_keys=OFF; -BEGIN TRANSACTION; - --- Table: accounts -CREATE TABLE IF NOT EXISTS "accounts" ( - "id" TEXT PRIMARY KEY, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "name" TEXT, - "username" TEXT, - "email" TEXT NOT NULL, - "avatarUrl" TEXT, - "details" TEXT DEFAULT '{}' CHECK(json_valid("details")) -- Ensuring details is a valid JSON field -); - --- Table: memories -CREATE TABLE IF NOT EXISTS "memories" ( - "id" TEXT PRIMARY KEY, - "type" TEXT NOT NULL, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "content" TEXT NOT NULL, - "embedding" BLOB NOT NULL, -- TODO: EMBEDDING ARRAY, CONVERT TO BEST FORMAT FOR SQLITE-VSS (JSON?) - "userId" TEXT, - "roomId" TEXT, - "agentId" TEXT, - "unique" INTEGER DEFAULT 1 NOT NULL, - FOREIGN KEY ("userId") REFERENCES "accounts"("id"), - FOREIGN KEY ("roomId") REFERENCES "rooms"("id"), - FOREIGN KEY ("agentId") REFERENCES "accounts"("id") -); - --- Table: goals -CREATE TABLE IF NOT EXISTS "goals" ( - "id" TEXT PRIMARY KEY, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "userId" TEXT, - "name" TEXT, - "status" TEXT, - "description" TEXT, - "roomId" TEXT, - "objectives" TEXT DEFAULT '[]' NOT NULL CHECK(json_valid("objectives")) -- Ensuring objectives is a valid JSON array -); - --- Table: logs -CREATE TABLE IF NOT EXISTS "logs" ( - "id" TEXT PRIMARY KEY, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "userId" TEXT NOT NULL, - "body" TEXT NOT NULL, - "type" TEXT NOT NULL, - "roomId" TEXT NOT NULL -); - --- Table: participants -CREATE TABLE IF NOT EXISTS "participants" ( - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "userId" TEXT, - "roomId" TEXT, - "userState" TEXT, - "id" TEXT PRIMARY KEY, - "last_message_read" TEXT, - FOREIGN KEY ("userId") REFERENCES "accounts"("id"), - FOREIGN KEY ("roomId") REFERENCES "rooms"("id") -); - --- Table: relationships -CREATE TABLE IF NOT EXISTS "relationships" ( - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "userA" TEXT NOT NULL, - "userB" TEXT NOT NULL, - "status" "text", - "id" TEXT PRIMARY KEY, - "userId" TEXT NOT NULL, - FOREIGN KEY ("userA") REFERENCES "accounts"("id"), - FOREIGN KEY ("userB") REFERENCES "accounts"("id"), - FOREIGN KEY ("userId") REFERENCES "accounts"("id") -); - --- Table: rooms -CREATE TABLE IF NOT EXISTS "rooms" ( - "id" TEXT PRIMARY KEY, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP -); - --- Table: cache -CREATE TABLE IF NOT EXISTS "cache" ( - "key" TEXT NOT NULL, - "agentId" TEXT NOT NULL, - "value" TEXT DEFAULT '{}' CHECK(json_valid("value")), - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "expiresAt" TIMESTAMP, - PRIMARY KEY ("key", "agentId") -); - --- Table: knowledge -CREATE TABLE IF NOT EXISTS "knowledge" ( - "id" TEXT PRIMARY KEY, - "agentId" TEXT, - "content" TEXT NOT NULL CHECK(json_valid("content")), - "embedding" BLOB, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "isMain" INTEGER DEFAULT 0, - "originalId" TEXT, - "chunkIndex" INTEGER, - "isShared" INTEGER DEFAULT 0, - FOREIGN KEY ("agentId") REFERENCES "accounts"("id"), - FOREIGN KEY ("originalId") REFERENCES "knowledge"("id"), - CHECK((isShared = 1 AND agentId IS NULL) OR (isShared = 0 AND agentId IS NOT NULL)) -); - --- Index: relationships_id_key -CREATE UNIQUE INDEX IF NOT EXISTS "relationships_id_key" ON "relationships" ("id"); - --- Index: memories_id_key -CREATE UNIQUE INDEX IF NOT EXISTS "memories_id_key" ON "memories" ("id"); - --- Index: participants_id_key -CREATE UNIQUE INDEX IF NOT EXISTS "participants_id_key" ON "participants" ("id"); - --- Index: knowledge -CREATE INDEX IF NOT EXISTS "knowledge_agent_key" ON "knowledge" ("agentId"); -CREATE INDEX IF NOT EXISTS "knowledge_agent_main_key" ON "knowledge" ("agentId", "isMain"); -CREATE INDEX IF NOT EXISTS "knowledge_original_key" ON "knowledge" ("originalId"); -CREATE INDEX IF NOT EXISTS "knowledge_content_key" ON "knowledge" - ((json_extract(content, '$.text'))) - WHERE json_extract(content, '$.text') IS NOT NULL; -CREATE INDEX IF NOT EXISTS "knowledge_created_key" ON "knowledge" ("agentId", "createdAt"); -CREATE INDEX IF NOT EXISTS "knowledge_shared_key" ON "knowledge" ("isShared"); - -COMMIT;`; \ No newline at end of file diff --git a/packages/adapter-sqljs/src/types.ts b/packages/adapter-sqljs/src/types.ts deleted file mode 100644 index f457ecaed2a87..0000000000000 --- a/packages/adapter-sqljs/src/types.ts +++ /dev/null @@ -1,209 +0,0 @@ -type SqlValue = number | string | Uint8Array | null; -type ParamsObject = Record; -type ParamsCallback = (obj: ParamsObject) => void; -type BindParams = SqlValue[] | ParamsObject | null; -interface QueryExecResult { - columns: string[]; - values: SqlValue[][]; -} - -declare class StatementIterator - implements Iterator, Iterable -{ - [Symbol.iterator](): Iterator; - getRemainingSQL(): string; - next(): StatementIteratorResult; -} -interface StatementIteratorResult { - /** `true` if there are no more available statements */ - done: boolean; - /** the next available Statement (as returned by `Database.prepare`) */ - value: Statement; -} -declare class Statement { - /** - * Bind values to the parameters, after having reseted the statement. If - * values is null, do nothing and return true. - * - * SQL statements can have parameters, named '?', '?NNN', ':VVV', - * '@VVV', '$VVV', where NNN is a number and VVV a string. This function - * binds these parameters to the given values. - * - * Warning: ':', '@', and '$' are included in the parameters names - * - * ### Value types - * - * |Javascript type|SQLite type| - * |-|-| - * |number|REAL, INTEGER| - * |boolean|INTEGER| - * |string|TEXT| - * |Array, Uint8Array|BLOB| - * |null|NULL| - * @see [https://sql.js.org/documentation/Statement.html#["bind"]](https://sql.js.org/documentation/Statement.html#%5B%22bind%22%5D) - * - * @param values The values to bind - */ - bind(values?: BindParams): boolean; - - /** - * Free the memory used by the statement - * @see [https://sql.js.org/documentation/Statement.html#["free"]](https://sql.js.org/documentation/Statement.html#%5B%22free%22%5D) - */ - free(): boolean; - - /** - * Free the memory allocated during parameter binding - * @see [https://sql.js.org/documentation/Statement.html#["freemem"]](https://sql.js.org/documentation/Statement.html#%5B%22freemem%22%5D) - */ - freemem(): void; - - /** - * Get one row of results of a statement. If the first parameter is not - * provided, step must have been called before. - * @see [https://sql.js.org/documentation/Statement.html#["get"]](https://sql.js.org/documentation/Statement.html#%5B%22get%22%5D) - * - * @param params If set, the values will be bound to the statement - * before it is executed - */ - get(params?: BindParams): SqlValue[]; - - /** - * Get one row of result as a javascript object, associating column - * names with their value in the current row - * @see [https://sql.js.org/documentation/Statement.html#["getAsObject"]](https://sql.js.org/documentation/Statement.html#%5B%22getAsObject%22%5D) - * - * @param params If set, the values will be bound to the statement, and - * it will be executed - */ - getAsObject(params?: BindParams): ParamsObject; - - /** - * Get the list of column names of a row of result of a statement. - * @see [https://sql.js.org/documentation/Statement.html#["getColumnNames"]](https://sql.js.org/documentation/Statement.html#%5B%22getColumnNames%22%5D) - */ - getColumnNames(): string[]; - - /** - * Get the SQLite's normalized version of the SQL string used in - * preparing this statement. The meaning of "normalized" is not - * well-defined: see - * [the SQLite documentation](https://sqlite.org/c3ref/expanded_sql.html). - * @see [https://sql.js.org/documentation/Statement.html#["getNormalizedSQL"]](https://sql.js.org/documentation/Statement.html#%5B%22getNormalizedSQL%22%5D) - */ - getNormalizedSQL(): string; - - /** - * Get the SQL string used in preparing this statement. - * @see [https://sql.js.org/documentation/Statement.html#["getSQL"]](https://sql.js.org/documentation/Statement.html#%5B%22getSQL%22%5D) - */ - getSQL(): string; - - /** - * Reset a statement, so that its parameters can be bound to new - * values. It also clears all previous bindings, freeing the memory used - * by bound parameters. - * @see [https://sql.js.org/documentation/Statement.html#["reset"]](https://sql.js.org/documentation/Statement.html#%5B%22reset%22%5D) - */ - reset(): void; - - /** - * Shorthand for bind + step + reset Bind the values, execute the - * statement, ignoring the rows it returns, and resets it - * @param values Value to bind to the statement - */ - run(values?: BindParams): void; - - /** - * Execute the statement, fetching the next line of result, that can - * be retrieved with `Statement.get`. - * @see [https://sql.js.org/documentation/Statement.html#["step"]](https://sql.js.org/documentation/Statement.html#%5B%22step%22%5D) - */ - step(): boolean; -} -export declare class Database { - constructor(data?: ArrayLike | Buffer | null); - - close(): void; - - create_function(name: string, func: (...args: any[]) => any): Database; - - each( - sql: string, - params: BindParams, - callback: ParamsCallback, - done: () => void - ): Database; - each(sql: string, callback: ParamsCallback, done: () => void): Database; // eslint-disable-line - - /** - * Execute an SQL query, and returns the result. - * - * This is a wrapper against `Database.prepare`, `Statement.bind`, `Statement.step`, `Statement.get`, and `Statement.free`. - * - * The result is an array of result elements. There are as many result elements as the number of statements in your sql string (statements are separated by a semicolon) - * @see [https://sql.js.org/documentation/Database.html#["exec"]](https://sql.js.org/documentation/Database.html#%5B%22exec%22%5D) - * - * @param sql a string containing some SQL text to execute - * @param params When the SQL statement contains placeholders, you can - * pass them in here. They will be bound to the statement before it is - * executed. If you use the params argument as an array, you **cannot** - * provide an sql string that contains several statements (separated by - * `;`). This limitation does not apply to params as an object. - */ - exec(sql: string, params?: BindParams): QueryExecResult[]; - - /** - * Exports the contents of the database to a binary array - * @see [https://sql.js.org/documentation/Database.html#["export"]](https://sql.js.org/documentation/Database.html#%5B%22export%22%5D) - */ - export(): Uint8Array; - - /** - * Returns the number of changed rows (modified, inserted or deleted) by - * the latest completed `INSERT`, `UPDATE` or `DELETE` statement on the - * database. Executing any other type of SQL statement does not modify - * the value returned by this function. - * @see [https://sql.js.org/documentation/Database.html#["getRowsModified"]](https://sql.js.org/documentation/Database.html#%5B%22getRowsModified%22%5D) - */ - getRowsModified(): number; - - /** - * Analyze a result code, return null if no error occurred, and throw an - * error with a descriptive message otherwise - * @see [https://sql.js.org/documentation/Database.html#["handleError"]](https://sql.js.org/documentation/Database.html#%5B%22handleError%22%5D) - */ - handleError(): null | never; - - /** - * Iterate over multiple SQL statements in a SQL string. This function - * returns an iterator over Statement objects. You can use a `for..of` - * loop to execute the returned statements one by one. - * @see [https://sql.js.org/documentation/Database.html#["iterateStatements"]](https://sql.js.org/documentation/Database.html#%5B%22iterateStatements%22%5D) - * - * @param sql a string of SQL that can contain multiple statements - */ - iterateStatements(sql: string): StatementIterator; - - /** - * Prepare an SQL statement - * @see [https://sql.js.org/documentation/Database.html#["prepare"]](https://sql.js.org/documentation/Database.html#%5B%22prepare%22%5D) - * - * @param sql a string of SQL, that can contain placeholders (`?`, `:VVV`, `:AAA`, `@AAA`) - * @param params values to bind to placeholders - */ - prepare(sql: string, params?: BindParams): Statement; - - /** - * Execute an SQL query, ignoring the rows it returns. - * @see [https://sql.js.org/documentation/Database.html#["run"]](https://sql.js.org/documentation/Database.html#%5B%22run%22%5D) - * - * @param sql a string containing some SQL text to execute - * @param params When the SQL statement contains placeholders, you can - * pass them in here. They will be bound to the statement before it is - * executed. If you use the params argument as an array, you **cannot** - * provide an sql string that contains several statements (separated by - * `;`). This limitation does not apply to params as an object. - */ - run(sql: string, params?: BindParams): Database; -} diff --git a/packages/adapter-sqljs/tsconfig.json b/packages/adapter-sqljs/tsconfig.json deleted file mode 100644 index 8af2c29c40021..0000000000000 --- a/packages/adapter-sqljs/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "strict": true - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/adapter-sqljs/tsup.config.ts b/packages/adapter-sqljs/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/adapter-sqljs/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/adapter-supabase/.npmignore b/packages/adapter-supabase/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/adapter-supabase/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/adapter-supabase/__tests__/supabase-adapter.test.ts b/packages/adapter-supabase/__tests__/supabase-adapter.test.ts deleted file mode 100644 index b12962df8b012..0000000000000 --- a/packages/adapter-supabase/__tests__/supabase-adapter.test.ts +++ /dev/null @@ -1,239 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { SupabaseDatabaseAdapter } from '../src'; -import { type UUID, elizaLogger } from '@elizaos/core'; -import { createClient } from '@supabase/supabase-js'; - -// Mock the elizaLogger -vi.mock('@elizaos/core', async () => { - const actual = await vi.importActual('@elizaos/core'); - return { - ...actual as any, - elizaLogger: { - error: vi.fn() - } - }; -}); - -// Mock Supabase client -vi.mock('@supabase/supabase-js', () => ({ - createClient: vi.fn(() => ({ - from: vi.fn(() => ({ - select: vi.fn(() => ({ - eq: vi.fn(() => ({ - maybeSingle: vi.fn(() => Promise.resolve({ data: { id: 'test-room-id' }, error: null })), - })), - })), - })), - })), -})); - -describe('SupabaseDatabaseAdapter', () => { - let adapter: SupabaseDatabaseAdapter; - const mockSupabaseUrl = 'https://test.supabase.co'; - const mockSupabaseKey = 'test-key'; - const mockSupabase = { - from: vi.fn(() => mockSupabase), - select: vi.fn(() => mockSupabase), - update: vi.fn(() => mockSupabase), - eq: vi.fn(() => mockSupabase), - maybeSingle: vi.fn(), - single: vi.fn(), - }; - - beforeEach(() => { - vi.clearAllMocks(); - adapter = new SupabaseDatabaseAdapter(mockSupabaseUrl, mockSupabaseKey); - // @ts-ignore - we're mocking the implementation - adapter.supabase = mockSupabase; - - // Reset all mock implementations to return mockSupabase for chaining - mockSupabase.from.mockReturnValue(mockSupabase); - mockSupabase.select.mockReturnValue(mockSupabase); - mockSupabase.update.mockReturnValue(mockSupabase); - mockSupabase.eq.mockReturnValue(mockSupabase); - }); - - describe('getRoom', () => { - beforeEach(() => { - mockSupabase.eq.mockReturnValue(mockSupabase); - }); - - it('should return room ID when room exists', async () => { - const roomId = 'test-room-id' as UUID; - mockSupabase.maybeSingle.mockResolvedValueOnce({ - data: { id: roomId }, - error: null - }); - - const result = await adapter.getRoom(roomId); - - expect(mockSupabase.from).toHaveBeenCalledWith('rooms'); - expect(mockSupabase.select).toHaveBeenCalledWith('id'); - expect(mockSupabase.eq).toHaveBeenCalledWith('id', roomId); - expect(result).toBe(roomId); - }); - - it('should return null when room does not exist', async () => { - mockSupabase.maybeSingle.mockResolvedValueOnce({ - data: null, - error: null - }); - - const roomId = 'non-existent-room' as UUID; - const result = await adapter.getRoom(roomId); - - expect(result).toBeNull(); - }); - - it('should return null and log error when there is a database error', async () => { - const error = { message: 'Database error' }; - mockSupabase.maybeSingle.mockResolvedValueOnce({ - data: null, - error - }); - - const roomId = 'error-room' as UUID; - const result = await adapter.getRoom(roomId); - - expect(result).toBeNull(); - expect(elizaLogger.error).toHaveBeenCalledWith(`Error getting room: ${error.message}`); - }); - }); - - describe('getParticipantsForAccount', () => { - const mockParticipants = [ - { id: 'participant-1', userId: 'user-1' }, - { id: 'participant-2', userId: 'user-1' } - ]; - - beforeEach(() => { - mockSupabase.eq.mockReturnValue(mockSupabase); - }); - - it('should return participants when they exist', async () => { - mockSupabase.eq.mockResolvedValueOnce({ - data: mockParticipants, - error: null - }); - - const userId = 'user-1' as UUID; - const result = await adapter.getParticipantsForAccount(userId); - - expect(mockSupabase.from).toHaveBeenCalledWith('participants'); - expect(mockSupabase.select).toHaveBeenCalledWith('*'); - expect(mockSupabase.eq).toHaveBeenCalledWith('userId', userId); - expect(result).toEqual(mockParticipants); - }); - - it('should throw error when database error occurs', async () => { - const error = { message: 'Database error' }; - mockSupabase.eq.mockResolvedValueOnce({ - data: null, - error - }); - - const userId = 'error-user' as UUID; - - await expect(adapter.getParticipantsForAccount(userId)) - .rejects - .toThrow(`Error getting participants for account: ${error.message}`); - }); - }); - - describe('getParticipantUserState', () => { - const roomId = 'test-room' as UUID; - const userId = 'test-user' as UUID; - - beforeEach(() => { - mockSupabase.eq - .mockReturnValueOnce(mockSupabase) // First eq call - .mockReturnValue(mockSupabase); // Second eq call - }); - - it('should return user state when it exists', async () => { - mockSupabase.single.mockResolvedValueOnce({ - data: { userState: 'FOLLOWED' }, - error: null - }); - - const result = await adapter.getParticipantUserState(roomId, userId); - - expect(mockSupabase.from).toHaveBeenCalledWith('participants'); - expect(mockSupabase.select).toHaveBeenCalledWith('userState'); - expect(mockSupabase.eq).toHaveBeenNthCalledWith(1, 'roomId', roomId); - expect(mockSupabase.eq).toHaveBeenNthCalledWith(2, 'userId', userId); - expect(result).toBe('FOLLOWED'); - }); - - it('should return null when user state does not exist', async () => { - mockSupabase.single.mockResolvedValueOnce({ - data: { userState: null }, - error: null - }); - - const result = await adapter.getParticipantUserState(roomId, userId); - - expect(result).toBeNull(); - }); - - it('should return null and log error when database error occurs', async () => { - const error = { message: 'Database error' }; - mockSupabase.single.mockResolvedValueOnce({ - data: null, - error - }); - - const result = await adapter.getParticipantUserState(roomId, userId); - - expect(result).toBeNull(); - expect(elizaLogger.error).toHaveBeenCalledWith('Error getting participant user state:', error); - }); - }); - - describe('setParticipantUserState', () => { - const roomId = 'test-room' as UUID; - const userId = 'test-user' as UUID; - let updateResult: { error: null | { message: string } }; - - beforeEach(() => { - updateResult = { error: null }; - // Set up the chain of mock returns - mockSupabase.from.mockReturnValue(mockSupabase); - mockSupabase.update.mockReturnValue(mockSupabase); - // Make eq return mockSupabase for the first call (roomId) - // and the final result for the second call (userId) - mockSupabase.eq - .mockReturnValueOnce(mockSupabase) - .mockImplementationOnce(() => Promise.resolve(updateResult)); - }); - - it('should successfully update user state', async () => { - await adapter.setParticipantUserState(roomId, userId, 'MUTED'); - - expect(mockSupabase.from).toHaveBeenCalledWith('participants'); - expect(mockSupabase.update).toHaveBeenCalledWith({ userState: 'MUTED' }); - expect(mockSupabase.eq).toHaveBeenNthCalledWith(1, 'roomId', roomId); - expect(mockSupabase.eq).toHaveBeenNthCalledWith(2, 'userId', userId); - }); - - it('should throw error and log when database error occurs', async () => { - const error = { message: 'Database error' }; - updateResult.error = error; - - await expect(adapter.setParticipantUserState(roomId, userId, 'FOLLOWED')) - .rejects - .toThrow('Failed to set participant user state'); - - expect(elizaLogger.error).toHaveBeenCalledWith('Error setting participant user state:', error); - }); - - it('should handle null state', async () => { - await adapter.setParticipantUserState(roomId, userId, null); - - expect(mockSupabase.from).toHaveBeenCalledWith('participants'); - expect(mockSupabase.update).toHaveBeenCalledWith({ userState: null }); - expect(mockSupabase.eq).toHaveBeenNthCalledWith(1, 'roomId', roomId); - expect(mockSupabase.eq).toHaveBeenNthCalledWith(2, 'userId', userId); - }); - }); -}); diff --git a/packages/adapter-supabase/config.toml b/packages/adapter-supabase/config.toml deleted file mode 100644 index c1f016d4a40f8..0000000000000 --- a/packages/adapter-supabase/config.toml +++ /dev/null @@ -1,159 +0,0 @@ -# A string used to distinguish different Supabase projects on the same host. Defaults to the -# working directory name when running `supabase init`. -project_id = "eliza" - -[api] -enabled = true -# Port to use for the API URL. -port = 54321 -# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API -# endpoints. public and storage are always included. -schemas = ["public", "storage", "graphql_public"] -# Extra schemas to add to the search_path of every request. public is always included. -extra_search_path = ["public", "extensions"] -# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size -# for accidental or malicious requests. -max_rows = 1000 - -[db] -# Port to use for the local database URL. -port = 54322 -# Port used by db diff command to initialize the shadow database. -shadow_port = 54320 -# The database major version to use. This has to be the same as your remote database's. Run `SHOW -# server_version;` on the remote database to check. -major_version = 15 - -[db.pooler] -enabled = false -# Port to use for the local connection pooler. -port = 54329 -# Specifies when a server connection can be reused by other clients. -# Configure one of the supported pooler modes: `transaction`, `session`. -pool_mode = "transaction" -# How many server connections to allow per user/database pair. -default_pool_size = 20 -# Maximum number of client connections allowed. -max_client_conn = 100 - -[realtime] -enabled = true -# Bind realtime via either IPv4 or IPv6. (default: IPv6) -# ip_version = "IPv6" -# The maximum length in bytes of HTTP request headers. (default: 4096) -# max_header_length = 4096 - -[studio] -enabled = true -# Port to use for Supabase Studio. -port = 54323 -# External URL of the API server that frontend connects to. -api_url = "http://127.0.0.1" - -# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they -# are monitored, and you can view the emails that would have been sent from the web interface. -[inbucket] -enabled = true -# Port to use for the email testing server web interface. -port = 54324 -# Uncomment to expose additional ports for testing user applications that send emails. -# smtp_port = 54325 -# pop3_port = 54326 - -[storage] -enabled = true -# The maximum file size allowed (e.g. "5MB", "500KB"). -file_size_limit = "50MiB" - -[auth] -enabled = true -# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used -# in emails. -site_url = "http://127.0.0.1:3000" -# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. -additional_redirect_urls = ["https://127.0.0.1:3000"] -# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). -jwt_expiry = 3600 -# If disabled, the refresh token will never expire. -enable_refresh_token_rotation = true -# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. -# Requires enable_refresh_token_rotation = true. -refresh_token_reuse_interval = 10 -# Allow/disallow new user signups to your project. -enable_signup = true -# Allow/disallow testing manual linking of accounts -enable_manual_linking = false - -[auth.email] -# Allow/disallow new user signups via email to your project. -enable_signup = true -# If enabled, a user will be required to confirm any email change on both the old, and new email -# addresses. If disabled, only the new email is required to confirm. -double_confirm_changes = true -# If enabled, users need to confirm their email address before signing in. -enable_confirmations = false - -# Uncomment to customize email template -# [auth.email.template.invite] -# subject = "You have been invited" -# content_path = "./supabase/templates/invite.html" - -[auth.sms] -# Allow/disallow new user signups via SMS to your project. -enable_signup = true -# If enabled, users need to confirm their phone number before signing in. -enable_confirmations = false -# Template for sending OTP to users -template = "Your code is {{ .Code }} ." - -# Use pre-defined map of phone number to OTP for testing. -[auth.sms.test_otp] -# 4152127777 = "123456" - -# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. -[auth.hook.custom_access_token] -# enabled = true -# uri = "pg-functions:////" - - -# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. -[auth.sms.twilio] -enabled = false -account_sid = "" -message_service_sid = "" -# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: -auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" - -# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, -# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, -# `twitter`, `slack`, `spotify`, `workos`, `zoom`. -[auth.external.apple] -enabled = false -client_id = "" -# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: -secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" -# Overrides the default auth redirectUrl. -redirect_uri = "" -# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, -# or any other third-party OIDC providers. -url = "" - -[analytics] -enabled = false -port = 54327 -vector_port = 54328 -# Configure one of the supported backends: `postgres`, `bigquery`. -backend = "postgres" - -# Experimental features may be deprecated any time -[experimental] -# Configures Postgres storage engine to use OrioleDB (S3) -orioledb_version = "" -# Configures S3 bucket URL, eg. .s3-.amazonaws.com -s3_host = "env(S3_HOST)" -# Configures S3 bucket region, eg. us-east-1 -s3_region = "env(S3_REGION)" -# Configures AWS_ACCESS_KEY_ID for S3 bucket -s3_access_key = "env(S3_ACCESS_KEY)" -# Configures AWS_SECRET_ACCESS_KEY for S3 bucket -s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/packages/adapter-supabase/package.json b/packages/adapter-supabase/package.json deleted file mode 100644 index 44c551d2802c8..0000000000000 --- a/packages/adapter-supabase/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "@elizaos/adapter-supabase", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@supabase/supabase-js": "2.46.2" - }, - "devDependencies": { - "@vitest/coverage-v8": "^3.0.2", - "tsup": "8.3.5", - "vitest": "^3.0.2" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:coverage": "vitest run --coverage" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/adapter-supabase/schema.sql b/packages/adapter-supabase/schema.sql deleted file mode 100644 index 5cb5dc3866352..0000000000000 --- a/packages/adapter-supabase/schema.sql +++ /dev/null @@ -1,195 +0,0 @@ --- Enable pgvector extension - --- -- Drop existing tables and extensions --- DROP EXTENSION IF EXISTS vector CASCADE; --- DROP TABLE IF EXISTS relationships CASCADE; --- DROP TABLE IF EXISTS participants CASCADE; --- DROP TABLE IF EXISTS logs CASCADE; --- DROP TABLE IF EXISTS goals CASCADE; --- DROP TABLE IF EXISTS memories CASCADE; --- DROP TABLE IF EXISTS rooms CASCADE; --- DROP TABLE IF EXISTS accounts CASCADE; --- DROP TABLE IF EXISTS knowledge CASCADE; - - -CREATE EXTENSION IF NOT EXISTS vector; - -BEGIN; - -CREATE TABLE accounts ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "name" TEXT, - "username" TEXT, - "email" TEXT NOT NULL, - "avatarUrl" TEXT, - "details" JSONB DEFAULT '{}'::jsonb -); - -CREATE TABLE rooms ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP -); - --- Create tables for both vector sizes -CREATE TABLE memories_1536 ( - "id" UUID PRIMARY KEY, - "type" TEXT NOT NULL, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "content" JSONB NOT NULL, - "embedding" vector(1536), - "userId" UUID REFERENCES accounts("id"), - "agentId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "unique" BOOLEAN DEFAULT true NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE memories_1024 ( - "id" UUID PRIMARY KEY, - "type" TEXT NOT NULL, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "content" JSONB NOT NULL, - "embedding" vector(1024), -- Ollama mxbai-embed-large - "userId" UUID REFERENCES accounts("id"), - "agentId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "unique" BOOLEAN DEFAULT true NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE memories_768 ( - "id" UUID PRIMARY KEY, - "type" TEXT NOT NULL, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "content" JSONB NOT NULL, - "embedding" vector(768), -- Gaianet nomic-embed - "userId" UUID REFERENCES accounts("id"), - "agentId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "unique" BOOLEAN DEFAULT true NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE memories_384 ( - "id" UUID PRIMARY KEY, - "type" TEXT NOT NULL, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "content" JSONB NOT NULL, - "embedding" vector(384), - "userId" UUID REFERENCES accounts("id"), - "agentId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "unique" BOOLEAN DEFAULT true NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE -); - --- Update view to include Ollama table -CREATE VIEW memories AS - SELECT * FROM memories_1536 - UNION ALL - SELECT * FROM memories_1024 - UNION ALL - SELECT * FROM memories_768 - UNION ALL - SELECT * FROM memories_384; - - -CREATE TABLE goals ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID REFERENCES accounts("id"), - "name" TEXT, - "status" TEXT, - "description" TEXT, - "roomId" UUID REFERENCES rooms("id"), - "objectives" JSONB DEFAULT '[]'::jsonb NOT NULL, - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE logs ( - "id" UUID PRIMARY KEY DEFAULT gen_random_uuid(), - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID NOT NULL REFERENCES accounts("id"), - "body" JSONB NOT NULL, - "type" TEXT NOT NULL, - "roomId" UUID NOT NULL REFERENCES rooms("id"), - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE participants ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userId" UUID REFERENCES accounts("id"), - "roomId" UUID REFERENCES rooms("id"), - "userState" TEXT, - "last_message_read" TEXT, - UNIQUE("userId", "roomId"), - CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE relationships ( - "id" UUID PRIMARY KEY, - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "userA" UUID NOT NULL REFERENCES accounts("id"), - "userB" UUID NOT NULL REFERENCES accounts("id"), - "status" TEXT, - "userId" UUID NOT NULL REFERENCES accounts("id"), - CONSTRAINT fk_user_a FOREIGN KEY ("userA") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_user_b FOREIGN KEY ("userB") REFERENCES accounts("id") ON DELETE CASCADE, - CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE -); - -CREATE TABLE cache ( - "key" TEXT NOT NULL, - "agentId" TEXT NOT NULL, - "value" JSONB DEFAULT '{}'::jsonb, - "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "expiresAt" TIMESTAMP, - PRIMARY KEY ("key", "agentId") -); - -CREATE TABLE knowledge ( - "id" UUID PRIMARY KEY, - "agentId" UUID REFERENCES accounts("id"), - "content" JSONB NOT NULL, - "embedding" vector(1536), - "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, - "isMain" BOOLEAN DEFAULT FALSE, - "originalId" UUID REFERENCES knowledge("id"), - "chunkIndex" INTEGER, - "isShared" BOOLEAN DEFAULT FALSE, - CHECK(("isShared" = true AND "agentId" IS NULL) OR ("isShared" = false AND "agentId" IS NOT NULL)) -); - --- Add index for Ollama table -CREATE INDEX idx_memories_1024_embedding ON memories_1024 USING hnsw ("embedding" vector_cosine_ops); -CREATE INDEX idx_memories_1024_type_room ON memories_1024("type", "roomId"); -CREATE INDEX idx_memories_768_embedding ON memories_768 USING hnsw ("embedding" vector_cosine_ops); -CREATE INDEX idx_memories_768_type_room ON memories_768("type", "roomId"); -CREATE INDEX idx_memories_1536_embedding ON memories_1536 USING hnsw ("embedding" vector_cosine_ops); -CREATE INDEX idx_memories_384_embedding ON memories_384 USING hnsw ("embedding" vector_cosine_ops); -CREATE INDEX idx_memories_1536_type_room ON memories_1536("type", "roomId"); -CREATE INDEX idx_memories_384_type_room ON memories_384("type", "roomId"); -CREATE INDEX idx_participants_user ON participants("userId"); -CREATE INDEX idx_participants_room ON participants("roomId"); -CREATE INDEX idx_relationships_users ON relationships("userA", "userB"); -CREATE INDEX idx_knowledge_agent ON knowledge("agentId"); -CREATE INDEX idx_knowledge_agent_main ON knowledge("agentId", "isMain"); -CREATE INDEX idx_knowledge_original ON knowledge("originalId"); -CREATE INDEX idx_knowledge_created ON knowledge("agentId", "createdAt"); -CREATE INDEX idx_knowledge_shared ON knowledge("isShared"); -CREATE INDEX idx_knowledge_embedding ON knowledge USING ivfflat (embedding vector_cosine_ops); - -COMMIT; diff --git a/packages/adapter-supabase/seed.sql b/packages/adapter-supabase/seed.sql deleted file mode 100644 index 749d66fbd83f0..0000000000000 --- a/packages/adapter-supabase/seed.sql +++ /dev/null @@ -1,3 +0,0 @@ -INSERT INTO public.accounts (id, name, email, "avatarUrl", details) VALUES ('00000000-0000-0000-0000-000000000000', 'Default Agent', 'default@agent.com', '', '{}'); -INSERT INTO public.rooms (id, "createdAt") VALUES ('00000000-0000-0000-0000-000000000000', NOW()); -INSERT INTO public.participants (id, "createdAt", "userId", "roomId", "userState", last_message_read) VALUES ('00000000-0000-0000-0000-000000000000', NOW(), 'Default Agent', '00000000-0000-0000-0000-000000000000', NULL, NULL); diff --git a/packages/adapter-supabase/src/index.ts b/packages/adapter-supabase/src/index.ts deleted file mode 100644 index 5f65935490f76..0000000000000 --- a/packages/adapter-supabase/src/index.ts +++ /dev/null @@ -1,957 +0,0 @@ -import { createClient, type SupabaseClient } from "@supabase/supabase-js"; -import { - type Memory, - type Goal, - type Relationship, - type Actor, - type GoalStatus, - type Account, - type UUID, - type Participant, - type Room, - type RAGKnowledgeItem, - elizaLogger, -} from "@elizaos/core"; -import { DatabaseAdapter } from "@elizaos/core"; -import { v4 as uuid } from "uuid"; -export class SupabaseDatabaseAdapter extends DatabaseAdapter { - async getRoom(roomId: UUID): Promise { - const { data, error } = await this.supabase - .from("rooms") - .select("id") - .eq("id", roomId) - .maybeSingle(); - - if (error) { - elizaLogger.error(`Error getting room: ${error.message}`); - return null; - } - return data ? (data.id as UUID) : null; - } - - async getParticipantsForAccount(userId: UUID): Promise { - const { data, error } = await this.supabase - .from("participants") - .select("*") - .eq("userId", userId); - - if (error) { - throw new Error( - `Error getting participants for account: ${error.message}` - ); - } - - return data as Participant[]; - } - - async getParticipantUserState( - roomId: UUID, - userId: UUID - ): Promise<"FOLLOWED" | "MUTED" | null> { - const { data, error } = await this.supabase - .from("participants") - .select("userState") - .eq("roomId", roomId) - .eq("userId", userId) - .single(); - - if (error) { - elizaLogger.error("Error getting participant user state:", error); - return null; - } - - return data?.userState as "FOLLOWED" | "MUTED" | null; - } - - async setParticipantUserState( - roomId: UUID, - userId: UUID, - state: "FOLLOWED" | "MUTED" | null - ): Promise { - const { error } = await this.supabase - .from("participants") - .update({ userState: state }) - .eq("roomId", roomId) - .eq("userId", userId); - - if (error) { - elizaLogger.error("Error setting participant user state:", error); - throw new Error("Failed to set participant user state"); - } - } - - async getParticipantsForRoom(roomId: UUID): Promise { - const { data, error } = await this.supabase - .from("participants") - .select("userId") - .eq("roomId", roomId); - - if (error) { - throw new Error( - `Error getting participants for room: ${error.message}` - ); - } - - return data.map((row) => row.userId as UUID); - } - - supabase: SupabaseClient; - - constructor(supabaseUrl: string, supabaseKey: string) { - super(); - this.supabase = createClient(supabaseUrl, supabaseKey); - } - - async init() { - // noop - } - - async close() { - // noop - } - - async getMemoriesByRoomIds(params: { - roomIds: UUID[]; - agentId?: UUID; - tableName: string; - limit?: number; - }): Promise { - let query = this.supabase - .from(params.tableName) - .select("*") - .in("roomId", params.roomIds) - .order("createdAt", { ascending: false }); - - if (params.agentId) { - query = query.eq("agentId", params.agentId); - } - - if (params.limit) { - query = query.limit(params.limit); - } - - const { data, error } = await query; - - if (error) { - elizaLogger.error("Error retrieving memories by room IDs:", error); - return []; - } - - // map createdAt to Date - const memories = data.map((memory) => ({ - ...memory, - })); - - return memories as Memory[]; - } - - async getAccountById(userId: UUID): Promise { - const { data, error } = await this.supabase - .from("accounts") - .select("*") - .eq("id", userId); - if (error) { - throw new Error(error.message); - } - return (data?.[0] as Account) || null; - } - - async createAccount(account: Account): Promise { - const { error } = await this.supabase - .from("accounts") - .upsert([account]); - if (error) { - elizaLogger.error(error.message); - return false; - } - return true; - } - - async getActorDetails(params: { roomId: UUID }): Promise { - try { - const response = await this.supabase - .from("rooms") - .select( - ` - participants:participants( - account:accounts(id, name, username, details) - ) - ` - ) - .eq("id", params.roomId); - - if (response.error) { - elizaLogger.error("Error!" + response.error); - return []; - } - const { data } = response; - - return data - .flatMap((room) => - room.participants.map((participant) => { - const user = participant.account as unknown as Actor; - return { - name: user?.name, - details: user?.details, - id: user?.id, - username: user?.username, - }; - }) - ); - } catch (error) { - elizaLogger.error("error", error); - throw error; - } - } - - async searchMemories(params: { - tableName: string; - roomId: UUID; - embedding: number[]; - match_threshold: number; - match_count: number; - unique: boolean; - }): Promise { - const result = await this.supabase.rpc("search_memories", { - query_table_name: params.tableName, - query_roomId: params.roomId, - query_embedding: params.embedding, - query_match_threshold: params.match_threshold, - query_match_count: params.match_count, - query_unique: params.unique, - }); - if (result.error) { - throw new Error(JSON.stringify(result.error)); - } - return result.data.map((memory) => ({ - ...memory, - })); - } - - async getCachedEmbeddings(opts: { - query_table_name: string; - query_threshold: number; - query_input: string; - query_field_name: string; - query_field_sub_name: string; - query_match_count: number; - }): Promise< - { - embedding: number[]; - levenshtein_score: number; - }[] - > { - const result = await this.supabase.rpc("get_embedding_list", opts); - if (result.error) { - throw new Error(JSON.stringify(result.error)); - } - return result.data; - } - - async updateGoalStatus(params: { - goalId: UUID; - status: GoalStatus; - }): Promise { - await this.supabase - .from("goals") - .update({ status: params.status }) - .match({ id: params.goalId }); - } - - async log(params: { - body: { [key: string]: unknown }; - userId: UUID; - roomId: UUID; - type: string; - }): Promise { - const { error } = await this.supabase.from("logs").insert({ - body: params.body, - userId: params.userId, - roomId: params.roomId, - type: params.type, - }); - - if (error) { - elizaLogger.error("Error inserting log:", error); - throw new Error(error.message); - } - } - - async getMemories(params: { - roomId: UUID; - count?: number; - unique?: boolean; - tableName: string; - agentId?: UUID; - start?: number; - end?: number; - }): Promise { - const query = this.supabase - .from(params.tableName) - .select("*") - .eq("roomId", params.roomId); - - if (params.start) { - query.gte("createdAt", params.start); - } - - if (params.end) { - query.lte("createdAt", params.end); - } - - if (params.unique) { - query.eq("unique", true); - } - - if (params.agentId) { - query.eq("agentId", params.agentId); - } - - query.order("createdAt", { ascending: false }); - - if (params.count) { - query.limit(params.count); - } - - const { data, error } = await query; - - if (error) { - throw new Error(`Error retrieving memories: ${error.message}`); - } - - return data as Memory[]; - } - - async searchMemoriesByEmbedding( - embedding: number[], - params: { - match_threshold?: number; - count?: number; - roomId?: UUID; - agentId?: UUID; - unique?: boolean; - tableName: string; - } - ): Promise { - const queryParams = { - query_table_name: params.tableName, - query_roomId: params.roomId, - query_embedding: embedding, - query_match_threshold: params.match_threshold, - query_match_count: params.count, - query_unique: !!params.unique, - }; - if (params.agentId) { - (queryParams as any).query_agentId = params.agentId; - } - - const result = await this.supabase.rpc("search_memories", queryParams); - if (result.error) { - throw new Error(JSON.stringify(result.error)); - } - return result.data.map((memory) => ({ - ...memory, - })); - } - - async getMemoryById(memoryId: UUID): Promise { - const { data, error } = await this.supabase - .from("memories") - .select("*") - .eq("id", memoryId) - .single(); - - if (error) { - elizaLogger.error("Error retrieving memory by ID:", error); - return null; - } - - return data as Memory; - } - - async getMemoriesByIds( - memoryIds: UUID[], - tableName?: string - ): Promise { - if (memoryIds.length === 0) return []; - - let query = this.supabase - .from("memories") - .select("*") - .in("id", memoryIds); - - if (tableName) { - query = query.eq("type", tableName); - } - - const { data, error } = await query; - - if (error) { - console.error("Error retrieving memories by IDs:", error); - return []; - } - - return data as Memory[]; - } - - async createMemory( - memory: Memory, - tableName: string, - unique = false - ): Promise { - const createdAt = memory.createdAt ?? Date.now(); - if (unique) { - const opts = { - // TODO: Add ID option, optionally - query_table_name: tableName, - query_userId: memory.userId, - query_content: memory.content.text, - query_roomId: memory.roomId, - query_embedding: memory.embedding, - query_createdAt: createdAt, - similarity_threshold: 0.95, - }; - - const result = await this.supabase.rpc( - "check_similarity_and_insert", - opts - ); - - if (result.error) { - throw new Error(JSON.stringify(result.error)); - } - } else { - const result = await this.supabase - .from("memories") - .insert({ ...memory, createdAt, type: tableName }); - const { error } = result; - if (error) { - throw new Error(JSON.stringify(error)); - } - } - } - - async removeMemory(memoryId: UUID): Promise { - const result = await this.supabase - .from("memories") - .delete() - .eq("id", memoryId); - const { error } = result; - if (error) { - throw new Error(JSON.stringify(error)); - } - } - - async removeAllMemories(roomId: UUID, tableName: string): Promise { - const result = await this.supabase.rpc("remove_memories", { - query_table_name: tableName, - query_roomId: roomId, - }); - - if (result.error) { - throw new Error(JSON.stringify(result.error)); - } - } - - async countMemories( - roomId: UUID, - unique = true, - tableName: string - ): Promise { - if (!tableName) { - throw new Error("tableName is required"); - } - const query = { - query_table_name: tableName, - query_roomId: roomId, - query_unique: !!unique, - }; - const result = await this.supabase.rpc("count_memories", query); - - if (result.error) { - throw new Error(JSON.stringify(result.error)); - } - - return result.data; - } - - async getGoals(params: { - roomId: UUID; - userId?: UUID | null; - onlyInProgress?: boolean; - count?: number; - }): Promise { - const opts = { - query_roomId: params.roomId, - query_userId: params.userId, - only_in_progress: params.onlyInProgress, - row_count: params.count, - }; - - const { data: goals, error } = await this.supabase.rpc( - "get_goals", - opts - ); - - if (error) { - throw new Error(error.message); - } - - return goals; - } - - async updateGoal(goal: Goal): Promise { - const { error } = await this.supabase - .from("goals") - .update(goal) - .match({ id: goal.id }); - if (error) { - throw new Error(`Error creating goal: ${error.message}`); - } - } - - async createGoal(goal: Goal): Promise { - const { error } = await this.supabase.from("goals").insert(goal); - if (error) { - throw new Error(`Error creating goal: ${error.message}`); - } - } - - async removeGoal(goalId: UUID): Promise { - const { error } = await this.supabase - .from("goals") - .delete() - .eq("id", goalId); - if (error) { - throw new Error(`Error removing goal: ${error.message}`); - } - } - - async removeAllGoals(roomId: UUID): Promise { - const { error } = await this.supabase - .from("goals") - .delete() - .eq("roomId", roomId); - if (error) { - throw new Error(`Error removing goals: ${error.message}`); - } - } - - async getRoomsForParticipant(userId: UUID): Promise { - const { data, error } = await this.supabase - .from("participants") - .select("roomId") - .eq("userId", userId); - - if (error) { - throw new Error( - `Error getting rooms by participant: ${error.message}` - ); - } - - return data.map((row) => row.roomId as UUID); - } - - async getRoomsForParticipants(userIds: UUID[]): Promise { - const { data, error } = await this.supabase - .from("participants") - .select("roomId") - .in("userId", userIds); - - if (error) { - throw new Error( - `Error getting rooms by participants: ${error.message}` - ); - } - - return [...new Set(data.map((row) => row.roomId as UUID))] as UUID[]; - } - - async createRoom(roomId?: UUID): Promise { - roomId = roomId ?? (uuid() as UUID); - const { data, error } = await this.supabase.rpc("create_room", { - roomId, - }); - - if (error) { - throw new Error(`Error creating room: ${error.message}`); - } - - if (!data || data.length === 0) { - throw new Error("No data returned from room creation"); - } - - return data[0].id as UUID; - } - - async removeRoom(roomId: UUID): Promise { - const { error } = await this.supabase - .from("rooms") - .delete() - .eq("id", roomId); - - if (error) { - throw new Error(`Error removing room: ${error.message}`); - } - } - - async addParticipant(userId: UUID, roomId: UUID): Promise { - const { error } = await this.supabase - .from("participants") - .insert({ userId: userId, roomId: roomId }); - - if (error) { - elizaLogger.error(`Error adding participant: ${error.message}`); - return false; - } - return true; - } - - async removeParticipant(userId: UUID, roomId: UUID): Promise { - const { error } = await this.supabase - .from("participants") - .delete() - .eq("userId", userId) - .eq("roomId", roomId); - - if (error) { - elizaLogger.error(`Error removing participant: ${error.message}`); - return false; - } - return true; - } - - async createRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - const allRoomData = await this.getRoomsForParticipants([ - params.userA, - params.userB, - ]); - - let roomId: UUID; - - if (!allRoomData || allRoomData.length === 0) { - // If no existing room is found, create a new room - const { data: newRoomData, error: roomsError } = await this.supabase - .from("rooms") - .insert({}) - .single(); - - if (roomsError) { - throw new Error("Room creation error: " + roomsError.message); - } - - roomId = (newRoomData as Room)?.id as UUID; - } else { - // If an existing room is found, use the first room's ID - roomId = allRoomData[0]; - } - - const { error: participantsError } = await this.supabase - .from("participants") - .insert([ - { userId: params.userA, roomId }, - { userId: params.userB, roomId }, - ]); - - if (participantsError) { - throw new Error( - "Participants creation error: " + participantsError.message - ); - } - - // Create or update the relationship between the two users - const { error: relationshipError } = await this.supabase - .from("relationships") - .upsert({ - userA: params.userA, - userB: params.userB, - userId: params.userA, - status: "FRIENDS", - }) - .eq("userA", params.userA) - .eq("userB", params.userB); - - if (relationshipError) { - throw new Error( - "Relationship creation error: " + relationshipError.message - ); - } - - return true; - } - - async getRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise { - const { data, error } = await this.supabase.rpc("get_relationship", { - usera: params.userA, - userb: params.userB, - }); - - if (error) { - throw new Error(error.message); - } - - return data[0]; - } - - async getRelationships(params: { userId: UUID }): Promise { - const { data, error } = await this.supabase - .from("relationships") - .select("*") - .or(`userA.eq.${params.userId},userB.eq.${params.userId}`) - .eq("status", "FRIENDS"); - - if (error) { - throw new Error(error.message); - } - - return data as Relationship[]; - } - - async getCache(params: { - key: string; - agentId: UUID; - }): Promise { - const { data, error } = await this.supabase - .from("cache") - .select("value") - .eq("key", params.key) - .eq("agentId", params.agentId) - .single(); - - if (error) { - elizaLogger.error("Error fetching cache:", error); - return undefined; - } - - return data?.value; - } - - async setCache(params: { - key: string; - agentId: UUID; - value: string; - }): Promise { - const { error } = await this.supabase.from("cache").upsert({ - key: params.key, - agentId: params.agentId, - value: params.value, - createdAt: new Date(), - }); - - if (error) { - elizaLogger.error("Error setting cache:", error); - return false; - } - - return true; - } - - async deleteCache(params: { - key: string; - agentId: UUID; - }): Promise { - try { - const { error } = await this.supabase - .from("cache") - .delete() - .eq("key", params.key) - .eq("agentId", params.agentId); - - if (error) { - elizaLogger.error("Error deleting cache", { - error: error.message, - key: params.key, - agentId: params.agentId, - }); - return false; - } - return true; - } catch (error) { - elizaLogger.error( - "Database connection error in deleteCache", - error instanceof Error ? error.message : String(error) - ); - return false; - } - } - - async getKnowledge(params: { - id?: UUID; - agentId: UUID; - limit?: number; - query?: string; - }): Promise { - let query = this.supabase - .from("knowledge") - .select("*") - .or(`agentId.eq.${params.agentId},isShared.eq.true`); - - if (params.id) { - query = query.eq("id", params.id); - } - - if (params.limit) { - query = query.limit(params.limit); - } - - const { data, error } = await query; - - if (error) { - throw new Error(`Error getting knowledge: ${error.message}`); - } - - return data.map((row) => ({ - id: row.id, - agentId: row.agentId, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, - createdAt: new Date(row.createdAt).getTime(), - })); - } - - async searchKnowledge(params: { - agentId: UUID; - embedding: Float32Array; - match_threshold: number; - match_count: number; - searchText?: string; - }): Promise { - const cacheKey = `embedding_${params.agentId}_${params.searchText}`; - const cachedResult = await this.getCache({ - key: cacheKey, - agentId: params.agentId, - }); - - if (cachedResult) { - return JSON.parse(cachedResult); - } - - // Convert Float32Array to array for Postgres vector - const embedding = Array.from(params.embedding); - - const { data, error } = await this.supabase.rpc("search_knowledge", { - query_embedding: embedding, - query_agent_id: params.agentId, - match_threshold: params.match_threshold, - match_count: params.match_count, - search_text: params.searchText || "", - }); - - if (error) { - throw new Error(`Error searching knowledge: ${error.message}`); - } - - const results = data.map((row) => ({ - id: row.id, - agentId: row.agentId, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - embedding: row.embedding - ? new Float32Array(row.embedding) - : undefined, - createdAt: new Date(row.createdAt).getTime(), - similarity: row.similarity, - })); - - await this.setCache({ - key: cacheKey, - agentId: params.agentId, - value: JSON.stringify(results), - }); - - return results; - } - - async createKnowledge(knowledge: RAGKnowledgeItem): Promise { - try { - const metadata = knowledge.content.metadata || {}; - - const { error } = await this.supabase.from("knowledge").insert({ - id: knowledge.id, - agentId: metadata.isShared ? null : knowledge.agentId, - content: knowledge.content, - embedding: knowledge.embedding - ? Array.from(knowledge.embedding) - : null, - createdAt: knowledge.createdAt || new Date(), - isMain: metadata.isMain || false, - originalId: metadata.originalId || null, - chunkIndex: metadata.chunkIndex || null, - isShared: metadata.isShared || false, - }); - - if (error) { - if (metadata.isShared && error.code === "23505") { - // Unique violation - elizaLogger.info( - `Shared knowledge ${knowledge.id} already exists, skipping` - ); - return; - } - throw error; - } - } catch (error: any) { - elizaLogger.error(`Error creating knowledge ${knowledge.id}:`, { - error, - embeddingLength: knowledge.embedding?.length, - content: knowledge.content, - }); - throw error; - } - } - - async removeKnowledge(id: UUID): Promise { - const { error } = await this.supabase - .from("knowledge") - .delete() - .eq("id", id); - - if (error) { - throw new Error(`Error removing knowledge: ${error.message}`); - } - } - - async clearKnowledge(agentId: UUID, shared?: boolean): Promise { - if (shared) { - const { error } = await this.supabase - .from("knowledge") - .delete() - .filter("agentId", "eq", agentId) - .filter("isShared", "eq", true); - - if (error) { - elizaLogger.error( - `Error clearing shared knowledge for agent ${agentId}:`, - error - ); - throw error; - } - } else { - const { error } = await this.supabase - .from("knowledge") - .delete() - .eq("agentId", agentId); - - if (error) { - elizaLogger.error( - `Error clearing knowledge for agent ${agentId}:`, - error - ); - throw error; - } - } - } -} diff --git a/packages/adapter-supabase/tsconfig.json b/packages/adapter-supabase/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/adapter-supabase/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/adapter-supabase/tsup.config.ts b/packages/adapter-supabase/tsup.config.ts deleted file mode 100644 index 9acebc5ba9ab4..0000000000000 --- a/packages/adapter-supabase/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "uuid", - // Add other modules you want to externalize - ], -}); diff --git a/packages/adapter-supabase/vitest.config.ts b/packages/adapter-supabase/vitest.config.ts deleted file mode 100644 index d87fc4a695427..0000000000000 --- a/packages/adapter-supabase/vitest.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { defineConfig } from 'vitest/config' - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - }, -}) diff --git a/packages/cli/index.js b/packages/cli/index.js new file mode 100755 index 0000000000000..cfaf2032f5c2b --- /dev/null +++ b/packages/cli/index.js @@ -0,0 +1,161 @@ +#!/usr/bin/env node +const { execSync } = require('child_process') +const pathUtil = require('path') +const fs = require('fs') +const { Command } = require('commander') + +const program = new Command() +const { version } = require('./package.json') + +program + .name('elizaos') + .description('elizaOS CLI - Manage your plugins') + .version(version); + +const plugins = new Command() + .name('plugins') + .description('manage elizaOS plugins') + +async function getPlugins() { + const resp = await fetch('https://raw.githubusercontent.com/elizaos-plugins/registry/refs/heads/main/index.json') + return await resp.json(); +} + +plugins + .command('list') + .alias('l') + .alias('ls') + .description('list available plugins') + .option("-t, --type ", "filter by type (adapter, client, plugin)") + .action(async (opts) => { + try { + const plugins = await getPlugins() + const pluginNames = Object.keys(plugins) + .filter(name => !opts.type || name.includes(opts.type)) + .sort() + + console.info("\nAvailable plugins:") + for (const plugin of pluginNames) { + console.info(` ${plugin}`) + } + console.info("") + } catch (error) { + console.error(error) + } + }) + +plugins + .command('add') + .alias('install') + .description('add a plugin') + .argument('', 'plugin name') + .action(async (plugin, opts) => { + // ensure git is installed + try { + const gitVersion = execSync('git --version', { stdio: 'pipe' }).toString().trim(); + console.log('using', gitVersion) + } catch(e) { + console.error('Please install git to use this utility') + return + } + + const plugins = await getPlugins() + const repoData = plugins[plugin]?.split(':') + if (!repoData) { + console.error('Plugin', plugin, 'not found') + return + } + // repo type + if (repoData[0] !== 'github') { + console.error('Plugin', plugin, 'uses', repoData[0], ' but this utility only currently support github') + return + } + const parts = repoData[1].split('/') + const elizaOSroot = pathUtil.resolve(__dirname, '../..') + const pkgPath = elizaOSroot + '/packages/' + parts[1] + + // add to packages + if (!fs.existsSync(pkgPath)) { + // clone it + console.log('cloning', parts[1], 'to', pkgPath) + const gitOutput = execSync('git clone https://github.com/' + repoData[1] + ' ' + pkgPath, { stdio: 'pipe' }).toString().trim(); + } + + // add core to plugin + // # pnpm add @elizaos/core@workspace:* --filter ./packages/client-twitter + console.log('Making sure plugin has access to @elizaos/core') + const pluginAddCoreOutput = execSync('pnpm add @elizaos/core@workspace:* --filter ./packages/' + parts[1], { cwd: elizaOSroot, stdio: 'pipe' }).toString().trim(); + + // Read the current package.json + const packageJsonPath = pkgPath + '/package.json' + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')); + + if (packageJson.name !== '@elizaos-plugins/' + parts[1]) { + // Update the name field + packageJson.name = '@elizaos-plugins/' + parts[1] + console.log('Updating plugins package.json name to', packageJson.name) + + // Write the updated package.json back to disk + fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2)) + } + + // add to agent + console.log('Adding plugin', plugin, 'to agent/package.json') + try { + const pluginAddAgentOutput = execSync('pnpm add ' + plugin + '@workspace:* --filter ./agent', { cwd: elizaOSroot, stdio: 'pipe' }).toString().trim(); + //console.log('pluginAddAgentOutput', pluginAddAgentOutput) + } catch (e) { + console.error('error', e) + } + + console.log(plugin, 'attempted installation is complete') + // can't add to char file because we don't know which character + console.log('Remember to add it to your character file\'s plugin field: ["' + plugin + '"]') + }) + +plugins + .command('remove') + .alias('delete') + .alias('del') + .alias('rm') + .description('remove a plugin') + .argument("", "plugin name") + .action(async (plugin, opts) => { + // ensure prefix + const pluginName = '@elizaos-plugins/' + plugin.replace(/^@elizaos-plugins\//, '') + const plugins = await getPlugins() + //console.log('loaded', plugins.length, plugins) + const repoData = plugins[pluginName]?.split(':') + if (!repoData) { + console.error('Plugin', pluginName, 'not found') + return + } + const parts = repoData[1].split('/') + const elizaOSroot = pathUtil.resolve(__dirname, '../..') + const pkgPath = elizaOSroot + '/packages/' + parts[1] + + // remove from agent: pnpm remove some-plugin --filter ./agent + try { + console.log('Removing plugin from agent') + const pluginRemoveAgentOutput = execSync('pnpm remove ' + pluginName + ' --filter ./agent', { cwd: elizaOSroot, stdio: 'pipe' }).toString().trim(); + } catch (e) { + console.error('removal from agent, error', e) + } + + if (fs.existsSync(pkgPath)) { + // rm -fr packages/path + console.log('deleting', pkgPath) + //const gitOutput = execSync('git clone https://github.com/' + repoData[1] + ' ' + pkgPath, { stdio: 'pipe' }).toString().trim(); + try { + fs.rmSync(pkgPath, { recursive: true, force: true }); + } catch (err) { + console.error('Error removing package plugin directory:', err); + } + } + console.log(plugin, 'attempted plugin removal is complete') + }) + + +program.addCommand(plugins) + +program.parse(process.argv) diff --git a/packages/cli/package.json b/packages/cli/package.json new file mode 100644 index 0000000000000..18209dc4c9d3f --- /dev/null +++ b/packages/cli/package.json @@ -0,0 +1,17 @@ +{ + "name": "cli", + "version": "0.25.8", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "bin": { + "elizaos": "index.js" + }, + "author": "Odilitime", + "license": "ISC", + "description": "", + "dependencies": { + "commander": "^13.1.0" + } +} diff --git a/packages/client-alexa/.npmignore b/packages/client-alexa/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-alexa/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-alexa/biome.json b/packages/client-alexa/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/client-alexa/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/client-alexa/package.json b/packages/client-alexa/package.json deleted file mode 100644 index 0b2ab5ff5b155..0000000000000 --- a/packages/client-alexa/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@elizaos/client-alexa", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@elizaos/plugin-node": "workspace:*", - "ask-sdk-core": "^2.14.0", - "ask-sdk-model": "^1.86.0", - "axios": "1.7.9" - }, - "devDependencies": { - "@biomejs/biome": "1.5.3", - "tsup": "^8.3.5", - "vitest": "1.6.1" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "biome check src/", - "lint:fix": "biome check --apply src/", - "format": "biome format src/", - "format:fix": "biome format --write src/", - "test": "vitest run" - } -} diff --git a/packages/client-alexa/src/alexa-client.ts b/packages/client-alexa/src/alexa-client.ts deleted file mode 100644 index b05631309b422..0000000000000 --- a/packages/client-alexa/src/alexa-client.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { elizaLogger, type IAgentRuntime } from "@elizaos/core"; -import { DefaultApiClient } from "ask-sdk-core"; -import { services } from "ask-sdk-model"; -import axios from "axios"; -import { v4 } from "uuid"; - -export class AlexaClient { - // private bot: services.proactiveEvents.ProactiveEventsServiceClient; Use for conversations - private LwaServiceClient: services.LwaServiceClient; - private apiConfiguration: any; - private runtime: IAgentRuntime; - private skillId: string; - private clientId: string; - private clientSecret: string; - - constructor(runtime: IAgentRuntime) { - elizaLogger.log("📱 Constructing new AlexaClient..."); - this.runtime = runtime; - this.apiConfiguration = { - apiClient: new DefaultApiClient(), - apiEndpoint: "https://api.amazonalexa.com", - }; - this.skillId = runtime.getSetting("ALEXA_SKILL_ID"); - this.clientId = runtime.getSetting("ALEXA_CLIENT_ID"); - this.clientSecret = runtime.getSetting("ALEXA_CLIENT_SECRET"); - } - - public async start(): Promise { - elizaLogger.log("🚀 Starting Alexa bot..."); - try { - await this.initializeBot(); - } catch (error) { - elizaLogger.error("❌ Failed to launch Alexa bot:", error); - throw error; - } - } - - private async initializeBot(): Promise { - const authenticationConfiguration = { - clientId: this.clientId, - clientSecret: this.clientSecret, - }; - this.LwaServiceClient = new services.LwaServiceClient({ - apiConfiguration: this.apiConfiguration, - authenticationConfiguration, - }); - - elizaLogger.log("✨ Alexa bot successfully launched and is running!"); - const access_token = await this.LwaServiceClient.getAccessTokenForScope( - "alexa::proactive_events" - ); - - await this.sendProactiveEvent(access_token); - } - - async sendProactiveEvent(access_token: string): Promise { - const event = { - timestamp: new Date().toISOString(), - referenceId: v4(), - expiryTime: new Date(Date.now() + 10 * 60000).toISOString(), - event: { - name: "AMAZON.MessageAlert.Activated", - payload: { - state: { - status: "UNREAD", - freshness: "NEW", - }, - messageGroup: { - creator: { - name: "Eliza", - }, - count: 1, - }, - }, - }, - localizedAttributes: [ - { - locale: "en-US", - source: "localizedattribute:source", - }, - ], - relevantAudience: { - type: "Multicast", - payload: {}, - }, - }; - - try { - const response = await axios.post( - "https://api.amazonalexa.com/v1/proactiveEvents/stages/development", - event, - { - headers: { - "Content-Type": "application/json", - Authorization: `Bearer ${access_token}`, - }, - } - ); - switch (response.status) { - case 202: - elizaLogger.log("✅ Proactive event sent successfully."); - break; - case 400: - elizaLogger.error( - `${response.data.code} - ${response.data.message}}` - ); - break; - case 401: - elizaLogger.error("Unauthorized"); - break; - } - } catch (error) { - elizaLogger.error("Error", error); - } - } -} diff --git a/packages/client-alexa/src/index.ts b/packages/client-alexa/src/index.ts deleted file mode 100644 index a333e86b5af6b..0000000000000 --- a/packages/client-alexa/src/index.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { type Client, type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { AlexaClient } from "./alexa-client"; - -export const AlexaClientInterface: Client = { - start: async (runtime: IAgentRuntime) => { - const alexaClient = new AlexaClient(runtime); - - await alexaClient.start(); - - elizaLogger.success( - `✅ Alexa client successfully started for character ${runtime.character.name}` - ); - return alexaClient; - }, - stop: async (runtime: IAgentRuntime) => { - try { - // stop it - elizaLogger.log("Stopping alexa client", runtime.agentId); - await runtime.clients.alexa.stop(); - } catch (e) { - elizaLogger.error("client-alexa interface stop error", e); - } - }, -}; - -export default AlexaClientInterface; diff --git a/packages/client-alexa/tsconfig.json b/packages/client-alexa/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/client-alexa/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/client-alexa/tsup.config.ts b/packages/client-alexa/tsup.config.ts deleted file mode 100644 index 8eea21ba74f12..0000000000000 --- a/packages/client-alexa/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "fluent-ffmpeg", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-alexa/vitest.config.ts b/packages/client-alexa/vitest.config.ts deleted file mode 100644 index a11fbbd0d9ee8..0000000000000 --- a/packages/client-alexa/vitest.config.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { defineConfig } from 'vitest/config'; -import { resolve } from 'path'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - }, - resolve: { - alias: { - '@elizaos/core': resolve(__dirname, '../core/src'), - }, - }, -}); diff --git a/packages/client-auto/.npmignore b/packages/client-auto/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-auto/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-auto/package.json b/packages/client-auto/package.json deleted file mode 100644 index d6e03b58b3a02..0000000000000 --- a/packages/client-auto/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/client-auto", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@types/body-parser": "1.19.5", - "@types/cors": "2.8.17", - "@types/express": "5.0.0", - "body-parser": "1.20.3", - "cors": "2.8.5", - "multer": "1.4.5-lts.1" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/client-auto/src/index.ts b/packages/client-auto/src/index.ts deleted file mode 100644 index 1eae8637197f2..0000000000000 --- a/packages/client-auto/src/index.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { type Client, type IAgentRuntime, elizaLogger } from "@elizaos/core"; - -export class AutoClient { - interval: NodeJS.Timeout; - runtime: IAgentRuntime; - - constructor(runtime: IAgentRuntime) { - this.runtime = runtime; - - // start a loop that runs every x seconds - this.interval = setInterval( - async () => { - elizaLogger.log("running auto client..."); - }, - 60 * 60 * 1000 - ); // 1 hour in milliseconds - } -} - -export const AutoClientInterface: Client = { - start: async (runtime: IAgentRuntime) => { - const client = new AutoClient(runtime); - return client; - }, - stop: async (_runtime: IAgentRuntime) => { - console.warn("Direct client does not support stopping yet"); - }, -}; - -export default AutoClientInterface; diff --git a/packages/client-auto/tsconfig.json b/packages/client-auto/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/client-auto/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-auto/tsup.config.ts b/packages/client-auto/tsup.config.ts deleted file mode 100644 index 49f33adc5f939..0000000000000 --- a/packages/client-auto/tsup.config.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "bs58", - "borsh", - "@solana/buffer-layout", - "stream", - "buffer", - "rpc-websockets", - "@solana/web3.js", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-deva/.npmignore b/packages/client-deva/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-deva/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-deva/eslint.config.mjs b/packages/client-deva/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/client-deva/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/client-deva/package.json b/packages/client-deva/package.json deleted file mode 100644 index 446208c72b3cc..0000000000000 --- a/packages/client-deva/package.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "name": "@elizaos/client-deva", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "glob": "11.0.0" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --watch", - "lint": "eslint . --fix" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/client-deva/src/base.ts b/packages/client-deva/src/base.ts deleted file mode 100644 index 4fcd75e64a69a..0000000000000 --- a/packages/client-deva/src/base.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { elizaLogger, IAgentRuntime } from "@elizaos/core"; -import { DevaPersona, DevaPost } from "./types"; - -export class ClientBase { - private readonly runtime: IAgentRuntime; - private readonly accessToken: string; - private readonly apiBaseUrl: string; - private readonly defaultHeaders: Record; - - constructor(runtime: IAgentRuntime, accessToken: string, baseUrl: string) { - this.runtime = runtime; - this.accessToken = accessToken; - this.apiBaseUrl = baseUrl; - this.defaultHeaders = { - Authorization: `Bearer ${this.accessToken}`, - "Content-Type": "application/json", - }; - } - - public async getMe(): Promise { - try { - const response = await fetch(`${this.apiBaseUrl}/persona`, { - headers: { ...this.defaultHeaders }, - }); - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - return await response.json(); - } catch (error) { - elizaLogger.error("Failed to fetch persona:", error); - return null; - } - } - - public async getPersonaPosts(personaId: string): Promise { - const res = await fetch( - `${this.apiBaseUrl}/post?filter_persona_id=${personaId}`, - { - headers: { - Authorization: `Bearer ${this.accessToken}`, - "Content-Type": "application/json", - }, - }, - ).then((res) => res.json()); - return res.items; - } - - public async makePost({ - text, - in_reply_to_id, - }: { - text: string; - in_reply_to_id: string; - }): Promise { - const res = await fetch(`${this.apiBaseUrl}/post`, { - method: "POST", - headers: { - Authorization: `Bearer ${this.accessToken}`, - "Content-Type": "application/json", - }, - body: JSON.stringify({ text, in_reply_to_id, author_type: "BOT" }), - }).then((res) => res.json()); - - console.log(res); - return res; - } -} diff --git a/packages/client-deva/src/controller.ts b/packages/client-deva/src/controller.ts deleted file mode 100644 index 6169e52b31855..0000000000000 --- a/packages/client-deva/src/controller.ts +++ /dev/null @@ -1,244 +0,0 @@ -import { - composeContext, - Content, - elizaLogger, - generateText, - getEmbeddingZeroVector, - IAgentRuntime, - ModelClass, - parseBooleanFromText, - State, - stringToUuid, -} from "@elizaos/core"; -import { ClientBase } from "./base"; -import { DevaPersona, DevaPost } from "./types"; -import { DEVA_POST_TEMPLATE } from "./templates.ts"; - -export class DevaController { - private readonly runtime: IAgentRuntime; - private readonly client: ClientBase; - - private persona: DevaPersona; - private posts: DevaPost[]; - - constructor(runtime: IAgentRuntime, client: ClientBase) { - this.runtime = runtime; - this.client = client; - } - - public async init() { - await this.populatePersona(); - await this.populatePosts(); - await this.startPosting(); - } - - private async populatePersona() { - this.persona = await this.client.getMe(); - - if (!this.persona || !this.persona.id) { - elizaLogger.error("❌ Deva Client failed to fetch Persona"); - throw new Error("❌ Deva Client failed to fetch Persona"); - } - - elizaLogger.log( - `✨ Deva Client successfully fetched Persona: ${this.persona.username} ID: ${this.persona.id}` - ); - } - - private async populatePosts() { - this.posts = await this.client.getPersonaPosts(this.persona.id); - - // Get the existing memories from the database - const existingMemories = - await this.runtime.messageManager.getMemoriesByRoomIds({ - roomIds: this.posts.map((post) => - stringToUuid( - post.in_reply_to_id + "-" + this.runtime.agentId - ) - ), - }); - - // Create a Set to store the IDs of existing memories - const existingMemoryIds = new Set( - existingMemories.map((memory) => memory.id.toString()) - ); - - // Check if any of the posts don't exist in the existing memories - const notExistingPostsInMemory = this.posts.filter( - (post) => - !existingMemoryIds.has( - stringToUuid(post.id + "-" + this.runtime.agentId) - ) - ); - - for (const post of notExistingPostsInMemory) { - elizaLogger.log("Saving Post", post.id); - - const roomId = stringToUuid( - post.in_reply_to_id + "-" + this.runtime.agentId - ); - - const userId = - post.persona_id === this.persona.id - ? this.runtime.agentId - : stringToUuid(post.persona_id); - - if (post.persona_id === this.persona.id) { - await this.runtime.ensureConnection( - this.runtime.agentId, - roomId, - this.persona.username, - this.persona.display_name, - "deva" - ); - } else { - await this.runtime.ensureConnection( - userId, - roomId, - post.persona.username, - post.persona.display_name, - "deva" - ); - } - - const content = { - text: post.text, - inReplyTo: stringToUuid( - post.in_reply_to_id + "-" + this.runtime.agentId - ), - source: "deva", - } as Content; - - elizaLogger.log("Creating memory for post", post.id); - - // check if it already exists - const memory = await this.runtime.messageManager.getMemoryById( - stringToUuid(post.id + "-" + this.runtime.agentId) - ); - - if (memory) { - elizaLogger.log( - "Memory already exists, skipping timeline population" - ); - continue; - } - - await this.runtime.messageManager.createMemory({ - id: stringToUuid(post.id + "-" + this.runtime.agentId), - userId, - content: content, - agentId: this.runtime.agentId, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: new Date(post.created_at).getTime(), - }); - - elizaLogger.log("Created memory for post", post.id); - } - - elizaLogger.log( - `✨ Deva Client successfully fetched Persona Posts: ${this.posts.length}` - ); - } - - private async startPosting() { - const shouldPostImmediately = - this.runtime.getSetting("POST_IMMEDIATELY") != null && - this.runtime.getSetting("POST_IMMEDIATELY") != "" && - parseBooleanFromText(this.runtime.getSetting("POST_IMMEDIATELY")); - - if (shouldPostImmediately) { - this.generateNewPost(); - } - - return this.setupPostAwaiter(); - } - - private async setupPostAwaiter() { - // since new updates can happen meanwhile, we should check new posts - await this.populatePosts(); - const lastPost: DevaPost | null = - this.posts.length > 0 ? this.posts[this.posts.length - 1] : null; - - const lastPostTimestamp = lastPost - ? new Date(lastPost.updated_at).getTime() - : 0; - - const minMinutes = - parseInt(this.runtime.getSetting("POST_INTERVAL_MIN")) || 90; - const maxMinutes = - parseInt(this.runtime.getSetting("POST_INTERVAL_MAX")) || 180; - const randomMinutes = - Math.floor(Math.random() * (maxMinutes - minMinutes + 1)) + - minMinutes; - const delay = randomMinutes * 60 * 1000; - - if (Date.now() > lastPostTimestamp + delay) { - await this.generateNewPost(); - } - - setTimeout(() => { - this.setupPostAwaiter(); - }, delay); - - elizaLogger.log(`Next post scheduled in ${randomMinutes} minutes`); - } - - private async generateNewPost() { - elizaLogger.log("Generating new Deva Post"); - - const roomId = stringToUuid( - "deva_generate_room-" + this.persona.username - ); - - await this.runtime.ensureUserExists( - this.runtime.agentId, - this.persona.username, - this.persona.display_name, - "deva" - ); - - const topics = this.runtime.character.topics.join(", "); - const state = await this.runtime.composeState({ - userId: this.runtime.agentId, - roomId: roomId, - agentId: this.runtime.agentId, - content: { - text: topics, - action: "", - }, - }); - const customState: State = { - ...state, - agentName: this.persona.display_name, - twitterUserName: this.persona.username, - adjective: "Any adjective", - topic: "Any topic", - }; - - const context = composeContext({ - state: customState, - template: - this.runtime.character.templates?.devaPostTemplate || - DEVA_POST_TEMPLATE, - }); - - const newPostContent = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - // Replace \n with proper line breaks and trim excess spaces - // const formattedPost = newPostContent.replaceAll(/\\n/g, "\n").trim(); - - await this.client.makePost({ - text: newPostContent, - in_reply_to_id: null, - }); - - console.log(newPostContent); - - elizaLogger.log(`New Post published:\n ${newPostContent}`); - } -} diff --git a/packages/client-deva/src/devaClient.ts b/packages/client-deva/src/devaClient.ts deleted file mode 100644 index f3cbee469fd34..0000000000000 --- a/packages/client-deva/src/devaClient.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { ClientBase } from "./base.ts"; -import { DevaController } from "./controller"; - -export class DevaClient { - private readonly runtime: IAgentRuntime; - private readonly clientBase: ClientBase; - private readonly controller: DevaController; - - constructor(runtime: IAgentRuntime, accessToken: string, baseUrl: string) { - elizaLogger.log("📱 Constructing new DevaClient..."); - this.runtime = runtime; - this.clientBase = new ClientBase(runtime, accessToken, baseUrl); - this.controller = new DevaController(runtime, this.clientBase); - elizaLogger.log("✅ DevaClient constructor completed"); - } - - public async start(): Promise { - elizaLogger.log("🚀 Starting DevaClient..."); - try { - await this.controller.init(); - elizaLogger.log( - "✨ DevaClient successfully launched and is running!" - ); - } catch (error) { - elizaLogger.error("❌ Failed to launch DevaClient:", error); - throw error; - } - } -} diff --git a/packages/client-deva/src/enviroment.ts b/packages/client-deva/src/enviroment.ts deleted file mode 100644 index 2de524ea92b1f..0000000000000 --- a/packages/client-deva/src/enviroment.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const devaEnvSchema = z.object({ - DEVA_API_KEY: z.string().min(1, "Deva api key is required"), -}); - -export type DevaConfig = z.infer; - -export async function validateDevaConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - DEVA_API_KEY: - runtime.getSetting("DEVA_API_KEY") || process.env.DEVA_API_KEY, - }; - - return devaEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Deva configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-deva/src/index.ts b/packages/client-deva/src/index.ts deleted file mode 100644 index f40eca57076da..0000000000000 --- a/packages/client-deva/src/index.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { IAgentRuntime, Client, elizaLogger } from "@elizaos/core"; -import { DevaClient } from "./devaClient.ts"; -import { validateDevaConfig } from "./enviroment.ts"; - -export const DevaClientInterface: Client = { - async start(runtime: IAgentRuntime) { - await validateDevaConfig(runtime); - - const deva = new DevaClient( - runtime, - runtime.getSetting("DEVA_API_KEY"), - runtime.getSetting("DEVA_API_BASE_URL") - ); - - await deva.start(); - - elizaLogger.success( - `✅ Deva client successfully started for character ${runtime.character.name}` - ); - - return deva; - }, - async stop(_runtime: IAgentRuntime) { - try { - // Add cleanup logic here - elizaLogger.warn("Deva client does not support stopping yet"); - } catch (error) { - elizaLogger.error("Failed to stop Deva client:", error); - throw error; - } - }, -}; - -export default DevaClientInterface; diff --git a/packages/client-deva/src/templates.ts b/packages/client-deva/src/templates.ts deleted file mode 100644 index db5578b8d4bd5..0000000000000 --- a/packages/client-deva/src/templates.ts +++ /dev/null @@ -1,21 +0,0 @@ -export const DEVA_POST_TEMPLATE = ` -# Areas of Expertise -{{knowledge}} - -# About {{agentName}} (!{{twitterUserName}}): -{{bio}} -{{lore}} -{{topics}} - -{{providers}} - -{{characterPostExamples}} - -{{postDirections}} - -{{recentMessages}} - -# Task: Generate a post in the voice and style and perspective of {{agentName}}. -Write a 1-3 sentence post that is {{adjective}} about {{topic}} (without mentioning {{topic}} directly), from the perspective of {{agentName}}. Do not add commentary or acknowledge this request, just write the post. -Your response should not contain any questions. Brief, concise statements only. The total character count MUST be less than 280. No emojis. Use \\n\\n (double spaces) between statements. -`; diff --git a/packages/client-deva/src/types.ts b/packages/client-deva/src/types.ts deleted file mode 100644 index 545dd6d10a944..0000000000000 --- a/packages/client-deva/src/types.ts +++ /dev/null @@ -1,21 +0,0 @@ -export type DevaPersona = { - id: string; - user_id: string; - display_name: string; - username: string; - description: string; - avatar: number; - cover_image: number; -}; - -export type DevaPost = { - id: string; - author_type: string; - text: string; - persona_id: string; - in_reply_to_id: string; - mentioned_profile_persona_id: string; - persona: DevaPersona; - created_at: string; - updated_at: string; -}; diff --git a/packages/client-deva/tsconfig.json b/packages/client-deva/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/client-deva/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-deva/tsup.config.ts b/packages/client-deva/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/client-deva/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-direct/package.json b/packages/client-direct/package.json index 870a509060fe2..a3c1ae6cc48eb 100644 --- a/packages/client-direct/package.json +++ b/packages/client-direct/package.json @@ -1,6 +1,6 @@ { "name": "@elizaos/client-direct", - "version": "0.25.6-alpha.1", + "version": "0.25.8", "main": "dist/index.js", "module": "dist/index.js", "type": "module", @@ -20,20 +20,18 @@ ], "dependencies": { "@elizaos/core": "workspace:*", - "@elizaos/plugin-image-generation": "workspace:*", - "@elizaos/plugin-tee-verifiable-log": "workspace:*", - "@elizaos/plugin-tee-log": "workspace:*", "@types/body-parser": "1.19.5", "@types/cors": "2.8.17", - "@types/express": "5.0.0", "body-parser": "1.20.3", "cors": "2.8.5", "discord.js": "14.16.3", "express": "4.21.1", "multer": "1.4.5-lts.1", - "openai": "4.73.0" + "openai": "4.73.0", + "path-to-regexp": "^1.7.0" }, "devDependencies": { + "@types/express": "4.17.21", "@types/multer": "^1.4.12", "tsup": "8.3.5" }, @@ -43,5 +41,8 @@ }, "peerDependencies": { "whatwg-url": "7.1.0" + }, + "publishConfig": { + "access": "public" } } diff --git a/packages/client-direct/src/api.ts b/packages/client-direct/src/api.ts index 3b4f00135cecb..2dff1b022ae52 100644 --- a/packages/client-direct/src/api.ts +++ b/packages/client-direct/src/api.ts @@ -1,4 +1,5 @@ import express from "express"; +import { Router } from 'express'; import bodyParser from "body-parser"; import cors from "cors"; import path from "path"; @@ -14,8 +15,8 @@ import { type Character, } from "@elizaos/core"; -import type { TeeLogQuery, TeeLogService } from "@elizaos/plugin-tee-log"; -import { REST, Routes } from "discord.js"; +// import type { TeeLogQuery, TeeLogService } from "@elizaos/plugin-tee-log"; +// import { REST, Routes } from "discord.js"; import type { DirectClient } from "."; import { validateUuid } from "@elizaos/core"; @@ -51,9 +52,9 @@ function validateUUIDParams( } export function createApiRouter( - agents: Map, + agents: Map, directClient: DirectClient -) { +):Router { const router = express.Router(); router.use(cors()); @@ -212,35 +213,35 @@ export function createApiRouter( }); }); - router.get("/agents/:agentId/channels", async (req, res) => { - const { agentId } = validateUUIDParams(req.params, res) ?? { - agentId: null, - }; - if (!agentId) return; + // router.get("/agents/:agentId/channels", async (req, res) => { + // const { agentId } = validateUUIDParams(req.params, res) ?? { + // agentId: null, + // }; + // if (!agentId) return; - const runtime = agents.get(agentId); + // const runtime = agents.get(agentId); - if (!runtime) { - res.status(404).json({ error: "Runtime not found" }); - return; - } + // if (!runtime) { + // res.status(404).json({ error: "Runtime not found" }); + // return; + // } - const API_TOKEN = runtime.getSetting("DISCORD_API_TOKEN") as string; - const rest = new REST({ version: "10" }).setToken(API_TOKEN); + // const API_TOKEN = runtime.getSetting("DISCORD_API_TOKEN") as string; + // const rest = new REST({ version: "10" }).setToken(API_TOKEN); - try { - const guilds = (await rest.get(Routes.userGuilds())) as Array; + // try { + // const guilds = (await rest.get(Routes.userGuilds())) as Array; - res.json({ - id: runtime.agentId, - guilds: guilds, - serverCount: guilds.length, - }); - } catch (error) { - console.error("Error fetching guilds:", error); - res.status(500).json({ error: "Failed to fetch guilds" }); - } - }); + // res.json({ + // id: runtime.agentId, + // guilds: guilds, + // serverCount: guilds.length, + // }); + // } catch (error) { + // console.error("Error fetching guilds:", error); + // res.status(500).json({ error: "Failed to fetch guilds" }); + // } + // }); router.get("/agents/:agentId/:roomId/memories", async (req, res) => { const { agentId, roomId } = validateUUIDParams(req.params, res) ?? { @@ -307,102 +308,102 @@ export function createApiRouter( } }); - router.get("/tee/agents", async (req, res) => { - try { - const allAgents = []; - - for (const agentRuntime of agents.values()) { - const teeLogService = agentRuntime - .getService(ServiceType.TEE_LOG) - .getInstance(); - - const agents = await teeLogService.getAllAgents(); - allAgents.push(...agents); - } - - const runtime: AgentRuntime = agents.values().next().value; - const teeLogService = runtime - .getService(ServiceType.TEE_LOG) - .getInstance(); - const attestation = await teeLogService.generateAttestation( - JSON.stringify(allAgents) - ); - res.json({ agents: allAgents, attestation: attestation }); - } catch (error) { - elizaLogger.error("Failed to get TEE agents:", error); - res.status(500).json({ - error: "Failed to get TEE agents", - }); - } - }); - - router.get("/tee/agents/:agentId", async (req, res) => { - try { - const agentId = req.params.agentId; - const agentRuntime = agents.get(agentId); - if (!agentRuntime) { - res.status(404).json({ error: "Agent not found" }); - return; - } - - const teeLogService = agentRuntime - .getService(ServiceType.TEE_LOG) - .getInstance(); - - const teeAgent = await teeLogService.getAgent(agentId); - const attestation = await teeLogService.generateAttestation( - JSON.stringify(teeAgent) - ); - res.json({ agent: teeAgent, attestation: attestation }); - } catch (error) { - elizaLogger.error("Failed to get TEE agent:", error); - res.status(500).json({ - error: "Failed to get TEE agent", - }); - } - }); - - router.post( - "/tee/logs", - async (req: express.Request, res: express.Response) => { - try { - const query = req.body.query || {}; - const page = Number.parseInt(req.body.page) || 1; - const pageSize = Number.parseInt(req.body.pageSize) || 10; - - const teeLogQuery: TeeLogQuery = { - agentId: query.agentId || "", - roomId: query.roomId || "", - userId: query.userId || "", - type: query.type || "", - containsContent: query.containsContent || "", - startTimestamp: query.startTimestamp || undefined, - endTimestamp: query.endTimestamp || undefined, - }; - const agentRuntime: AgentRuntime = agents.values().next().value; - const teeLogService = agentRuntime - .getService(ServiceType.TEE_LOG) - .getInstance(); - const pageQuery = await teeLogService.getLogs( - teeLogQuery, - page, - pageSize - ); - const attestation = await teeLogService.generateAttestation( - JSON.stringify(pageQuery) - ); - res.json({ - logs: pageQuery, - attestation: attestation, - }); - } catch (error) { - elizaLogger.error("Failed to get TEE logs:", error); - res.status(500).json({ - error: "Failed to get TEE logs", - }); - } - } - ); + // router.get("/tee/agents", async (req, res) => { + // try { + // const allAgents = []; + + // for (const agentRuntime of agents.values()) { + // const teeLogService = agentRuntime + // .getService(ServiceType.TEE_LOG) + // .getInstance(); + + // const agents = await teeLogService.getAllAgents(); + // allAgents.push(...agents); + // } + + // const runtime: AgentRuntime = agents.values().next().value; + // const teeLogService = runtime + // .getService(ServiceType.TEE_LOG) + // .getInstance(); + // const attestation = await teeLogService.generateAttestation( + // JSON.stringify(allAgents) + // ); + // res.json({ agents: allAgents, attestation: attestation }); + // } catch (error) { + // elizaLogger.error("Failed to get TEE agents:", error); + // res.status(500).json({ + // error: "Failed to get TEE agents", + // }); + // } + // }); + + // router.get("/tee/agents/:agentId", async (req, res) => { + // try { + // const agentId = req.params.agentId; + // const agentRuntime = agents.get(agentId); + // if (!agentRuntime) { + // res.status(404).json({ error: "Agent not found" }); + // return; + // } + + // const teeLogService = agentRuntime + // .getService(ServiceType.TEE_LOG) + // .getInstance(); + + // const teeAgent = await teeLogService.getAgent(agentId); + // const attestation = await teeLogService.generateAttestation( + // JSON.stringify(teeAgent) + // ); + // res.json({ agent: teeAgent, attestation: attestation }); + // } catch (error) { + // elizaLogger.error("Failed to get TEE agent:", error); + // res.status(500).json({ + // error: "Failed to get TEE agent", + // }); + // } + // }); + + // router.post( + // "/tee/logs", + // async (req: express.Request, res: express.Response) => { + // try { + // const query = req.body.query || {}; + // const page = Number.parseInt(req.body.page) || 1; + // const pageSize = Number.parseInt(req.body.pageSize) || 10; + + // const teeLogQuery: TeeLogQuery = { + // agentId: query.agentId || "", + // roomId: query.roomId || "", + // userId: query.userId || "", + // type: query.type || "", + // containsContent: query.containsContent || "", + // startTimestamp: query.startTimestamp || undefined, + // endTimestamp: query.endTimestamp || undefined, + // }; + // const agentRuntime: AgentRuntime = agents.values().next().value; + // const teeLogService = agentRuntime + // .getService(ServiceType.TEE_LOG) + // .getInstance(); + // const pageQuery = await teeLogService.getLogs( + // teeLogQuery, + // page, + // pageSize + // ); + // const attestation = await teeLogService.generateAttestation( + // JSON.stringify(pageQuery) + // ); + // res.json({ + // logs: pageQuery, + // attestation: attestation, + // }); + // } catch (error) { + // elizaLogger.error("Failed to get TEE logs:", error); + // res.status(500).json({ + // error: "Failed to get TEE logs", + // }); + // } + // } + // ); router.post("/agent/start", async (req, res) => { const { characterPath, characterJson } = req.body; diff --git a/packages/client-direct/src/index.ts b/packages/client-direct/src/index.ts index cddb2cb9da517..35a3b06f1d108 100644 --- a/packages/client-direct/src/index.ts +++ b/packages/client-direct/src/index.ts @@ -1,32 +1,33 @@ -import bodyParser from "body-parser"; -import cors from "cors"; -import express, { type Request as ExpressRequest } from "express"; -import multer from "multer"; -import { z } from "zod"; import { - type AgentRuntime, + composeContext, elizaLogger, - messageCompletionFooter, generateCaption, generateImage, - type Media, - getEmbeddingZeroVector, - composeContext, generateMessageResponse, generateObject, - type Content, - type Memory, + getEmbeddingZeroVector, + messageCompletionFooter, ModelClass, - type Client, - stringToUuid, settings, + stringToUuid, + type AgentRuntime, + type Client, + type Content, type IAgentRuntime, + type Media, + type Memory, + type Plugin, } from "@elizaos/core"; -import { createApiRouter } from "./api.ts"; +import bodyParser from "body-parser"; +import cors from "cors"; +import express, { type Request as ExpressRequest } from "express"; import * as fs from "fs"; +import multer from "multer"; +import OpenAI from "openai"; import * as path from "path"; +import { z } from "zod"; +import { createApiRouter } from "./api.ts"; import { createVerifiableLogApiRouter } from "./verifiable-log-api.ts"; -import OpenAI from "openai"; const storage = multer.diskStorage({ destination: (req, file, cb) => { @@ -110,7 +111,7 @@ Response format should be formatted in a JSON block like this: export class DirectClient { public app: express.Application; - private agents: Map; // container management + private agents: Map; // container management private server: any; // Store server instance public startAgent: Function; // Store startAgent functor public loadCharacterTryPath: Function; // Store loadCharacterTryPath functor @@ -655,12 +656,16 @@ export class DirectClient { "/fine-tune/:assetId", async (req: express.Request, res: express.Response) => { const assetId = req.params.assetId; - const downloadDir = path.join( - process.cwd(), - "downloads", - assetId - ); + const ROOT_DIR = path.join(process.cwd(), "downloads"); + const downloadDir = path.resolve(ROOT_DIR, assetId); + + if (!downloadDir.startsWith(ROOT_DIR)) { + res.status(403).json({ + error: "Invalid assetId. Access denied.", + }); + return; + } elizaLogger.log("Download directory:", downloadDir); try { @@ -699,7 +704,7 @@ export class DirectClient { const filePath = path.join(downloadDir, fileName); elizaLogger.log("Full file path:", filePath); - await fs.promises.writeFile(filePath, buffer); + await fs.promises.writeFile(filePath, new Uint8Array(buffer)); // Verify file was written const stats = await fs.promises.stat(filePath); @@ -978,13 +983,13 @@ export class DirectClient { } // agent/src/index.ts:startAgent calls this - public registerAgent(runtime: AgentRuntime) { + public registerAgent(runtime: IAgentRuntime) { // register any plugin endpoints? // but once and only once this.agents.set(runtime.agentId, runtime); } - public unregisterAgent(runtime: AgentRuntime) { + public unregisterAgent(runtime: IAgentRuntime) { this.agents.delete(runtime.agentId); } @@ -1017,7 +1022,7 @@ export class DirectClient { process.on("SIGINT", gracefulShutdown); } - public stop() { + public async stop() { if (this.server) { this.server.close(() => { elizaLogger.success("Server stopped"); @@ -1027,6 +1032,8 @@ export class DirectClient { } export const DirectClientInterface: Client = { + name: 'direct', + config: {}, start: async (_runtime: IAgentRuntime) => { elizaLogger.log("DirectClientInterface start"); const client = new DirectClient(); @@ -1034,11 +1041,16 @@ export const DirectClientInterface: Client = { client.start(serverPort); return client; }, - stop: async (_runtime: IAgentRuntime, client?: Client) => { - if (client instanceof DirectClient) { - client.stop(); - } - }, + // stop: async (_runtime: IAgentRuntime, client?: Client) => { + // if (client instanceof DirectClient) { + // client.stop(); + // } + // }, }; -export default DirectClientInterface; +const directPlugin: Plugin = { + name: "direct", + description: "Direct client", + clients: [DirectClientInterface], +}; +export default directPlugin; diff --git a/packages/client-direct/src/verifiable-log-api.ts b/packages/client-direct/src/verifiable-log-api.ts index e6dcdb502c5ea..f0a11049a4636 100644 --- a/packages/client-direct/src/verifiable-log-api.ts +++ b/packages/client-direct/src/verifiable-log-api.ts @@ -1,4 +1,5 @@ import express from "express"; +import { Router } from 'express'; import bodyParser from "body-parser"; import cors from "cors"; @@ -9,8 +10,8 @@ import type { } from "@elizaos/plugin-tee-verifiable-log"; export function createVerifiableLogApiRouter( - agents: Map -) { + agents: Map +):Router { const router = express.Router(); router.use(cors()); router.use(bodyParser.json()); diff --git a/packages/client-discord/.npmignore b/packages/client-discord/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-discord/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-discord/__tests__/discord-client.test.ts b/packages/client-discord/__tests__/discord-client.test.ts deleted file mode 100644 index 969e049187714..0000000000000 --- a/packages/client-discord/__tests__/discord-client.test.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { DiscordClient } from '../src'; -import { Client, Events } from 'discord.js'; - -// Mock @elizaos/core -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - info: vi.fn(), - error: vi.fn(), - debug: vi.fn(), - }, - getEmbeddingZeroVector: () => new Array(1536).fill(0), - stringToUuid: (str: string) => str, - messageCompletionFooter: '# INSTRUCTIONS: Choose the best response for the agent.', - shouldRespondFooter: '# INSTRUCTIONS: Choose if the agent should respond.', - generateMessageResponse: vi.fn(), - generateShouldRespond: vi.fn(), - composeContext: vi.fn(), - composeRandomUser: vi.fn(), -})); - -// Mock discord.js Client -vi.mock('discord.js', () => { - const mockClient = { - login: vi.fn().mockResolvedValue('token'), - on: vi.fn(), - once: vi.fn(), - destroy: vi.fn().mockResolvedValue(undefined), - }; - - return { - Client: vi.fn(() => mockClient), - Events: { - ClientReady: 'ready', - MessageCreate: 'messageCreate', - VoiceStateUpdate: 'voiceStateUpdate', - MessageReactionAdd: 'messageReactionAdd', - MessageReactionRemove: 'messageReactionRemove', - }, - GatewayIntentBits: { - Guilds: 1, - DirectMessages: 2, - GuildVoiceStates: 3, - MessageContent: 4, - GuildMessages: 5, - DirectMessageTyping: 6, - GuildMessageTyping: 7, - GuildMessageReactions: 8, - }, - Partials: { - Channel: 'channel', - Message: 'message', - User: 'user', - Reaction: 'reaction', - }, - Collection: class Collection extends Map {}, - }; -}); - -describe('DiscordClient', () => { - let mockRuntime: any; - let discordClient: DiscordClient; - - beforeEach(() => { - mockRuntime = { - getSetting: vi.fn((key: string) => { - if (key === 'DISCORD_API_TOKEN') return 'mock-token'; - return undefined; - }), - getState: vi.fn(), - setState: vi.fn(), - getMemory: vi.fn(), - setMemory: vi.fn(), - getService: vi.fn(), - registerAction: vi.fn(), - providers: [], - character: { - clientConfig: { - discord: { - shouldIgnoreBotMessages: true - } - } - } - }; - - discordClient = new DiscordClient(mockRuntime); - }); - - it('should initialize with correct configuration', () => { - expect(discordClient.apiToken).toBe('mock-token'); - expect(discordClient.client).toBeDefined(); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('DISCORD_API_TOKEN'); - }); - - it('should login to Discord on initialization', () => { - expect(discordClient.client.login).toHaveBeenCalledWith('mock-token'); - }); - - it('should register event handlers on initialization', () => { - expect(discordClient.client.once).toHaveBeenCalledWith(Events.ClientReady, expect.any(Function)); - expect(discordClient.client.on).toHaveBeenCalledWith('guildCreate', expect.any(Function)); - expect(discordClient.client.on).toHaveBeenCalledWith(Events.MessageReactionAdd, expect.any(Function)); - expect(discordClient.client.on).toHaveBeenCalledWith(Events.MessageReactionRemove, expect.any(Function)); - expect(discordClient.client.on).toHaveBeenCalledWith('voiceStateUpdate', expect.any(Function)); - }); - - it('should clean up resources when stopped', async () => { - await discordClient.stop(); - expect(discordClient.client.destroy).toHaveBeenCalled(); - }); -}); diff --git a/packages/client-discord/package.json b/packages/client-discord/package.json deleted file mode 100644 index 00563bb9f8987..0000000000000 --- a/packages/client-discord/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "@elizaos/client-discord", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@discordjs/opus": "github:discordjs/opus", - "@discordjs/rest": "2.4.0", - "@discordjs/voice": "0.17.0", - "@elizaos/core": "workspace:*", - "@elizaos/plugin-node": "workspace:*", - "discord.js": "14.16.3", - "libsodium-wrappers": "0.7.15", - "prism-media": "1.3.5" - }, - "devDependencies": { - "tsup": "8.3.5", - "vitest": "1.6.1" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run" - }, - "trustedDependencies": { - "@discordjs/opus": "github:discordjs/opus", - "@discordjs/voice": "0.17.0" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/client-discord/readme.md b/packages/client-discord/readme.md deleted file mode 100644 index 7ea3eb4d764ba..0000000000000 --- a/packages/client-discord/readme.md +++ /dev/null @@ -1,102 +0,0 @@ -# @elizaos/client-discord - -A Discord client implementation for ElizaOS, enabling rich integration with Discord servers for managing interactions, voice, and message handling. - -## Features - -- Handle server join events and manage initial configurations. -- Voice event management via the voice manager. -- Manage and process new messages with the message manager. -- Slash command registration and interaction handling. -- Disconnect websocket and unbind all listeners when required. -- Robust permissions management for bot functionality. - -## Installation - -As this is a workspace package, it's installed as part of the ElizaOS monorepo: - -```bash -pnpm install -``` - -## Configuration - -The client requires the following environment variables: - -```bash -# Discord API Credentials -DISCORD_APPLICATION_ID=your_application_id -DISCORD_API_TOKEN=your_api_token - -# Optional Settings (add any additional details here if necessary) -``` - -## Usage - -### Basic Initialization - -```typescript -import { DiscordClientInterface } from '@elizaos/client-discord'; - -// Initialize the client -const discordManager = await DiscordClientInterface.start(runtime); -``` - -### Slash Command Registration - -To register slash commands: - -```typescript -await discordManager.command.registerCommands([ - { - name: 'example', - description: 'An example slash command', - options: [] - } -]); -``` - -### Handling Messages - -```typescript -// Listen for new messages -await discordManager.message.handleNewMessage({ - channelId: 'channel-id', - content: 'Hello Discord!' -}); -``` - -### Managing Voice Events - -```typescript -// Join a voice channel -await discordManager.voice.joinChannel('channel-id'); - -// Handle voice interactions -await discordManager.voice.handleInteraction({ - userId: 'user-id', - action: 'speak' -}); -``` - -## Key Components - -1. **ClientBase** - - Handles authentication and session management. - - Manages websocket connections. - -2. **MessageManager** - - Processes incoming messages and responses. - - Supports message formatting and templating. - -3. **VoiceManager** - - Manages voice interactions and events. - - Handles joining and leaving voice channels. - -4. **CommandManager** - - Registers and processes slash commands. - - Ensures permissions are validated. - -## Notes - -Ensure that your `.env` file includes the required environment variables for proper functionality. Additional features or modules can be extended as part of the ElizaOS framework. diff --git a/packages/client-discord/src/actions/chat_with_attachments.ts b/packages/client-discord/src/actions/chat_with_attachments.ts deleted file mode 100644 index ba050706ae2be..0000000000000 --- a/packages/client-discord/src/actions/chat_with_attachments.ts +++ /dev/null @@ -1,338 +0,0 @@ -import { composeContext, getModelSettings } from "@elizaos/core"; -import { generateText, trimTokens } from "@elizaos/core"; -import { parseJSONObjectFromText } from "@elizaos/core"; -import { - type Action, - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; -import * as fs from "fs"; - -export const summarizationTemplate = `# Summarized so far (we are adding to this) -{{currentSummary}} - -# Current attachments we are summarizing -{{attachmentsWithText}} - -Summarization objective: {{objective}} - -# Instructions: Summarize the attachments. Return the summary. Do not acknowledge this request, just summarize and continue the existing summary if there is one. Capture any important details based on the objective. Only respond with the new summary text.`; - -export const attachmentIdsTemplate = `# Messages we are summarizing -{{recentMessages}} - -# Instructions: {{senderName}} is requesting a summary of specific attachments. Your goal is to determine their objective, along with the list of attachment IDs to summarize. -The "objective" is a detailed description of what the user wants to summarize based on the conversation. -The "attachmentIds" is an array of attachment IDs that the user wants to summarize. If not specified, default to including all attachments from the conversation. - -Your response must be formatted as a JSON block with this structure: -\`\`\`json -{ - "objective": "", - "attachmentIds": ["", "", ...] -} -\`\`\` -`; - -const getAttachmentIds = async ( - runtime: IAgentRuntime, - message: Memory, - state: State -): Promise<{ objective: string; attachmentIds: string[] } | null> => { - state = (await runtime.composeState(message)) as State; - - const context = composeContext({ - state, - template: attachmentIdsTemplate, - }); - - for (let i = 0; i < 5; i++) { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - console.log("response", response); - // try parsing to a json object - const parsedResponse = parseJSONObjectFromText(response) as { - objective: string; - attachmentIds: string[]; - } | null; - // see if it contains objective and attachmentIds - if (parsedResponse?.objective && parsedResponse?.attachmentIds) { - return parsedResponse; - } - } - return null; -}; - -const summarizeAction = { - name: "CHAT_WITH_ATTACHMENTS", - similes: [ - "CHAT_WITH_ATTACHMENT", - "SUMMARIZE_FILES", - "SUMMARIZE_FILE", - "SUMMARIZE_ATACHMENT", - "CHAT_WITH_PDF", - "ATTACHMENT_SUMMARY", - "RECAP_ATTACHMENTS", - "SUMMARIZE_FILE", - "SUMMARIZE_VIDEO", - "SUMMARIZE_AUDIO", - "SUMMARIZE_IMAGE", - "SUMMARIZE_DOCUMENT", - "SUMMARIZE_LINK", - "ATTACHMENT_SUMMARY", - "FILE_SUMMARY", - ], - description: - "Answer a user request informed by specific attachments based on their IDs. If a user asks to chat with a PDF, or wants more specific information about a link or video or anything else they've attached, this is the action to use.", - validate: async ( - _runtime: IAgentRuntime, - message: Memory, - _state: State - ) => { - if (message.content.source !== "discord") { - return false; - } - // only show if one of the keywords are in the message - const keywords: string[] = [ - "attachment", - "summary", - "summarize", - "research", - "pdf", - "video", - "audio", - "image", - "document", - "link", - "file", - "attachment", - "summarize", - "code", - "report", - "write", - "details", - "information", - "talk", - "chat", - "read", - "listen", - "watch", - ]; - return keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword.toLowerCase()) - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ) => { - state = (await runtime.composeState(message)) as State; - - const callbackData: Content = { - text: "", // fill in later - action: "CHAT_WITH_ATTACHMENTS_RESPONSE", - source: message.content.source, - attachments: [], - }; - - // 1. extract attachment IDs from the message - const attachmentData = await getAttachmentIds(runtime, message, state); - if (!attachmentData) { - console.error("Couldn't get attachment IDs from message"); - return; - } - - const { objective, attachmentIds } = attachmentData; - - // This is pretty gross but it can catch cases where the returned generated UUID is stupidly wrong for some reason - const attachments = state.recentMessagesData - .filter( - (msg) => - msg.content.attachments && - msg.content.attachments.length > 0 - ) - .flatMap((msg) => msg.content.attachments) - // check by first 5 characters of uuid - .filter( - (attachment) => - attachmentIds - .map((attch) => attch.toLowerCase().slice(0, 5)) - .includes(attachment.id.toLowerCase().slice(0, 5)) || - // or check the other way - attachmentIds.some((id) => { - const attachmentId = id.toLowerCase().slice(0, 5); - return attachment.id - .toLowerCase() - .includes(attachmentId); - }) - ); - - const attachmentsWithText = attachments - .map((attachment) => `# ${attachment.title}\n${attachment.text}`) - .join("\n\n"); - - let currentSummary = ""; - - const modelSettings = getModelSettings( - runtime.character.modelProvider, - ModelClass.SMALL - ); - const chunkSize = modelSettings.maxOutputTokens; - - state.attachmentsWithText = attachmentsWithText; - state.objective = objective; - const template = await trimTokens( - summarizationTemplate, - chunkSize + 500, - runtime - ); - const context = composeContext({ - state, - // make sure it fits, we can pad the tokens a bit - // Get the model's tokenizer based on the current model being used - template, - }); - - const summary = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - currentSummary = currentSummary + "\n" + summary; - - if (!currentSummary) { - console.error("No summary found, that's not good!"); - return; - } - - callbackData.text = currentSummary.trim(); - if ( - callbackData.text && - (currentSummary.trim()?.split("\n").length < 4 || - currentSummary.trim()?.split(" ").length < 100) - ) { - callbackData.text = `Here is the summary: -\`\`\`md -${currentSummary.trim()} -\`\`\` -`; - await callback(callbackData); - } else if (currentSummary.trim()) { - const summaryFilename = `content/summary_${Date.now()}.md`; - - try { - // Debug: Log before file operations - console.log("Creating summary file:", { - filename: summaryFilename, - summaryLength: currentSummary.length, - }); - - // Write file directly first - await fs.promises.writeFile( - summaryFilename, - currentSummary, - "utf8" - ); - console.log("File written successfully"); - - // Then cache it - await runtime.cacheManager.set(summaryFilename, currentSummary); - console.log("Cache set operation completed"); - - await callback( - { - ...callbackData, - text: `I've attached the summary of the requested attachments as a text file.`, - }, - [summaryFilename] - ); - console.log("Callback completed with summary file"); - } catch (error) { - console.error("Error in file/cache process:", error); - throw error; - } - } else { - console.warn( - "Empty response from chat with attachments action, skipping" - ); - } - - return callbackData; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Can you summarize the attachments b3e23, c4f67, and d5a89?", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sure thing! I'll pull up those specific attachments and provide a summary of their content.", - action: "CHAT_WITH_ATTACHMENTS", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "I need a technical summary of the PDFs I sent earlier - a1b2c3.pdf, d4e5f6.pdf, and g7h8i9.pdf", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll take a look at those specific PDF attachments and put together a technical summary for you. Give me a few minutes to review them.", - action: "CHAT_WITH_ATTACHMENTS", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Can you watch this video for me and tell me which parts you think are most relevant to the report I'm writing? (the one I attached in my last message)", - }, - }, - { - user: "{{user2}}", - content: { - text: "sure, no problem.", - action: "CHAT_WITH_ATTACHMENTS", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "can you read my blog post and give me a detailed breakdown of the key points I made, and then suggest a handful of tweets to promote it?", - }, - }, - { - user: "{{user2}}", - content: { - text: "great idea, give me a minute", - action: "CHAT_WITH_ATTACHMENTS", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -export default summarizeAction; diff --git a/packages/client-discord/src/actions/download_media.ts b/packages/client-discord/src/actions/download_media.ts deleted file mode 100644 index 273a152390571..0000000000000 --- a/packages/client-discord/src/actions/download_media.ts +++ /dev/null @@ -1,196 +0,0 @@ -import path from "path"; -import { composeContext } from "@elizaos/core"; -import { parseJSONObjectFromText } from "@elizaos/core"; -import { - type Action, - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type IVideoService, - type Memory, - ModelClass, - ServiceType, - type State, -} from "@elizaos/core"; -import { generateText } from "@elizaos/core"; - -export const mediaUrlTemplate = `# Messages we are searching for a media URL -{{recentMessages}} - -# Instructions: {{senderName}} is requesting to download a specific media file (video or audio). Your goal is to determine the URL of the media they want to download. -The "mediaUrl" is the URL of the media file that the user wants downloaded. If not specified, return null. - -Your response must be formatted as a JSON block with this structure: -\`\`\`json -{ - "mediaUrl": "" -} -\`\`\` -`; - -const getMediaUrl = async ( - runtime: IAgentRuntime, - message: Memory, - state: State -): Promise => { - if (!state) { - state = (await runtime.composeState(message)) as State; - } - - const context = composeContext({ - state, - template: mediaUrlTemplate, - }); - - for (let i = 0; i < 5; i++) { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - const parsedResponse = parseJSONObjectFromText(response) as { - mediaUrl: string; - } | null; - - if (parsedResponse?.mediaUrl) { - return parsedResponse.mediaUrl; - } - } - return null; -}; - -export default { - name: "DOWNLOAD_MEDIA", - similes: [ - "DOWNLOAD_VIDEO", - "DOWNLOAD_AUDIO", - "GET_MEDIA", - "DOWNLOAD_PODCAST", - "DOWNLOAD_YOUTUBE", - ], - description: - "Downloads a video or audio file from a URL and attaches it to the response message.", - validate: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State - ) => { - if (message.content.source !== "discord") { - return false; - } - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ) => { - const videoService = runtime - .getService(ServiceType.VIDEO) - .getInstance(); - if (!state) { - state = (await runtime.composeState(message)) as State; - } - - const mediaUrl = await getMediaUrl(runtime, message, state); - if (!mediaUrl) { - console.error("Couldn't get media URL from messages"); - return; - } - - const videoInfo = await videoService.fetchVideoInfo(mediaUrl); - const mediaPath = await videoService.downloadVideo(videoInfo); - - const response: Content = { - text: `I downloaded the video "${videoInfo.title}" and attached it below.`, - action: "DOWNLOAD_MEDIA_RESPONSE", - source: message.content.source, - attachments: [], - }; - - const filename = path.basename(mediaPath); - - const maxRetries = 3; - let retries = 0; - - while (retries < maxRetries) { - try { - await callback( - { - ...response, - }, - ["content_cache/" + filename] - ); - break; - } catch (error) { - retries++; - console.error( - `Error sending message (attempt ${retries}):`, - error - ); - - if (retries === maxRetries) { - console.error( - "Max retries reached. Failed to send message with attachment." - ); - break; - } - - // Wait for a short delay before retrying - await new Promise((resolve) => setTimeout(resolve, 2000)); - } - } - - return response; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "https://www.youtube.com/watch?v=dQw4w9WgXcQ", - }, - }, - { - user: "{{user2}}", - content: { - text: "Downloading the YouTube video now, one sec", - action: "DOWNLOAD_MEDIA", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Can you grab this video for me? https://vimeo.com/123456789", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sure thing, I'll download that Vimeo video for you", - action: "DOWNLOAD_MEDIA", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "I need this video downloaded: https://www.youtube.com/watch?v=abcdefg", - }, - }, - { - user: "{{user2}}", - content: { - text: "No problem, I'm on it. I'll have that YouTube video downloaded in a jiffy", - action: "DOWNLOAD_MEDIA", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/client-discord/src/actions/joinvoice.ts b/packages/client-discord/src/actions/joinvoice.ts deleted file mode 100644 index 1371dcb29e3ce..0000000000000 --- a/packages/client-discord/src/actions/joinvoice.ts +++ /dev/null @@ -1,346 +0,0 @@ -// eslint-disable-next-line -// @ts-nocheck -// src/actions/joinVoice -import { - type Action, - type ActionExample, - composeContext, - type IAgentRuntime, - type Memory, - type State, - generateText, - ModelClass, -} from "@elizaos/core"; -import { - type Channel, - ChannelType, - type Client, - type Message as DiscordMessage, - type Guild, - type GuildMember, -} from "discord.js"; -import { joinVoiceChannel } from "@discordjs/voice"; - -export default { - name: "JOIN_VOICE", - similes: [ - "JOIN_VOICE", - "JOIN_VC", - "JOIN_VOICE_CHAT", - "JOIN_VOICE_CHANNEL", - "JOIN_MEETING", - "JOIN_CALL", - ], - validate: async ( - _runtime: IAgentRuntime, - message: Memory, - state: State - ) => { - if (message.content.source !== "discord") { - // not a discord message - return false; - } - - if (!state.discordClient) { - return; - } - - // did they say something about joining a voice channel? if not, don't validate - const keywords = [ - "join", - "come to", - "come on", - "enter", - "voice", - "chat", - "talk", - "call", - "hop on", - "get on", - "vc", - "meeting", - "discussion", - ]; - if ( - !keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword) - ) - ) { - return false; - } - - return true; - }, - description: "Join a voice channel to participate in voice chat.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State - ): Promise => { - if (!state) { - console.error("State is not available."); - } - - // We normalize data in from voice channels - const discordMessage = (state.discordChannel || - state.discordMessage) as DiscordMessage; - - if (!discordMessage.content) { - discordMessage.content = message.content.text; - } - - const id = (discordMessage as DiscordMessage).guild?.id as string; - const client = state.discordClient as Client; - const voiceChannels = ( - client.guilds.cache.get(id) as Guild - ).channels.cache.filter( - (channel: Channel) => channel.type === ChannelType.GuildVoice - ); - - const messageContent = discordMessage.content; - - const targetChannel = voiceChannels.find((channel) => { - const name = (channel as { name: string }).name.toLowerCase(); - - // remove all non-alphanumeric characters (keep spaces between words) - const replacedName = name.replace(/[^a-z0-9 ]/g, ""); - - return ( - name.includes(messageContent) || - messageContent.includes(name) || - replacedName.includes(messageContent) || - messageContent.includes(replacedName) - ); - }); - - if (targetChannel) { - joinVoiceChannel({ - channelId: targetChannel.id, - guildId: (discordMessage as DiscordMessage).guild?.id as string, - adapterCreator: (client.guilds.cache.get(id) as Guild) - .voiceAdapterCreator, - selfDeaf: false, - selfMute: false, - group: client.user.id, - }); - return true; - } else { - const member = (discordMessage as DiscordMessage) - .member as GuildMember; - if (member?.voice?.channel) { - joinVoiceChannel({ - channelId: member.voice.channel.id, - guildId: (discordMessage as DiscordMessage).guild - ?.id as string, - adapterCreator: (client.guilds.cache.get(id) as Guild) - .voiceAdapterCreator, - selfDeaf: false, - selfMute: false, - group: client.user.id, - }); - return true; - } - - const messageTemplate = ` -The user has requested to join a voice channel. -Here is the list of channels available in the server: -{{voiceChannels}} - -Here is the user's request: -{{userMessage}} - -Please respond with the name of the voice channel which the bot should join. Try to infer what channel the user is talking about. If the user didn't specify a voice channel, respond with "none". -You should only respond with the name of the voice channel or none, no commentary or additional information should be included. -`; - - const guessState = { - userMessage: message.content.text, - voiceChannels: voiceChannels - .map((channel) => (channel as { name: string }).name) - .join("\n"), - }; - - const context = composeContext({ - template: messageTemplate, - state: guessState as unknown as State, - }); - - const _datestr = new Date().toUTCString().replace(/:/g, "-"); - - const responseContent = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - runtime.databaseAdapter.log({ - body: { message, context, response: responseContent }, - userId: message.userId, - roomId: message.roomId, - type: "joinvoice", - }); - - if (responseContent && responseContent.trim().length > 0) { - // join the voice channel - const channelName = responseContent.toLowerCase(); - - const targetChannel = voiceChannels.find((channel) => { - const name = ( - channel as { name: string } - ).name.toLowerCase(); - - // remove all non-alphanumeric characters (keep spaces between words) - const replacedName = name.replace(/[^a-z0-9 ]/g, ""); - - return ( - name.includes(channelName) || - channelName.includes(name) || - replacedName.includes(channelName) || - channelName.includes(replacedName) - ); - }); - - if (targetChannel) { - joinVoiceChannel({ - channelId: targetChannel.id, - guildId: (discordMessage as DiscordMessage).guild - ?.id as string, - adapterCreator: (client.guilds.cache.get(id) as Guild) - .voiceAdapterCreator, - selfDeaf: false, - selfMute: false, - group: client.user.id, - }); - return true; - } - } - - await (discordMessage as DiscordMessage).reply( - "I couldn't figure out which channel you wanted me to join." - ); - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Hey, let's jump into the 'General' voice and chat", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sounds good", - action: "JOIN_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "{{user2}}, can you join the vc, I want to discuss our strat", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sure I'll join right now", - action: "JOIN_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "hey {{user2}}, we're having a team meeting in the 'conference' voice channel, plz join us", - }, - }, - { - user: "{{user2}}", - content: { - text: "OK see you there", - action: "JOIN_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "{{user2}}, let's have a quick voice chat in the 'Lounge' channel.", - }, - }, - { - user: "{{user2}}", - content: { - text: "kk be there in a sec", - action: "JOIN_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Hey {{user2}}, can you join me in the 'Music' voice channel", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sure", - action: "JOIN_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "join voice chat with us {{user2}}", - }, - }, - { - user: "{{user2}}", - content: { - text: "coming", - action: "JOIN_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "hop in vc {{user2}}", - }, - }, - { - user: "{{user2}}", - content: { - text: "joining now", - action: "JOIN_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "get in vc with us {{user2}}", - }, - }, - { - user: "{{user2}}", - content: { - text: "im in", - action: "JOIN_VOICE", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/client-discord/src/actions/leavevoice.ts b/packages/client-discord/src/actions/leavevoice.ts deleted file mode 100644 index 20419b754bd0b..0000000000000 --- a/packages/client-discord/src/actions/leavevoice.ts +++ /dev/null @@ -1,226 +0,0 @@ -// src/actions/leaveVoice -import { getVoiceConnection } from "@discordjs/voice"; -import { - type Channel, - ChannelType, - type Client, - type Message as DiscordMessage, -} from "discord.js"; -import type { - Action, - ActionExample, - IAgentRuntime, - Memory, - State, -} from "@elizaos/core"; - -export default { - name: "LEAVE_VOICE", - similes: [ - "LEAVE_VOICE", - "LEAVE_VC", - "LEAVE_VOICE_CHAT", - "LEAVE_VOICE_CHANNEL", - "LEAVE_MEETING", - "LEAVE_CALL", - ], - validate: async (runtime: IAgentRuntime, message: Memory, state: State) => { - if (message.content.source !== "discord") { - // not a discord message - return false; - } - - if (!state.discordClient) { - return false; - } - - const keywords = [ - "leave", - "exit", - "stop", - "quit", - "get off", - "get out", - "bye", - "cya", - "see you", - "hop off", - "get off", - "voice", - "vc", - "chat", - "call", - "meeting", - "discussion", - ]; - if ( - !keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword) - ) - ) { - return false; - } - - const client = state.discordClient as Client; - - // Check if the client is connected to any voice channel - const isConnectedToVoice = client.voice.adapters.size > 0; - - return isConnectedToVoice; - }, - description: "Leave the current voice channel.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State - ): Promise => { - if (!state.discordClient) { - return; - } - - const discordMessage = (state.discordMessage || - state.discordChannel) as DiscordMessage; - - if (!discordMessage) { - throw new Error("Discord message is not available in the state."); - } - const voiceChannels = (state.discordClient as Client)?.guilds.cache - .get((discordMessage as DiscordMessage).guild?.id as string) - ?.channels.cache.filter( - (channel: Channel) => channel.type === ChannelType.GuildVoice - ); - - voiceChannels?.forEach((_channel: Channel) => { - const connection = getVoiceConnection( - (discordMessage as DiscordMessage).guild?.id as string - ); - if (connection) { - connection.destroy(); - } - }); - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Hey {{user2}} please leave the voice channel", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sure", - action: "LEAVE_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "I have to go now but thanks for the chat", - }, - }, - { - user: "{{user2}}", - content: { - text: "You too, talk to you later", - action: "LEAVE_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Great call everyone, hopping off now", - action: "LEAVE_VOICE", - }, - }, - { - user: "{{user2}}", - content: { - text: "Agreed, I'll hop off too", - action: "LEAVE_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Hey {{user2}} I need you to step away from the voice chat for a bit", - }, - }, - { - user: "{{user2}}", - content: { - text: "No worries, I'll leave the voice channel", - action: "LEAVE_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "{{user2}}, I think we covered everything, you can leave the voice chat now", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sounds good, see you both later", - action: "LEAVE_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "leave voice {{user2}}", - }, - }, - { - user: "{{user2}}", - content: { - text: "ok leaving", - action: "LEAVE_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "plz leave the voice chat {{user2}}", - }, - }, - { - user: "{{user2}}", - content: { - text: "aight im out", - action: "LEAVE_VOICE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "yo {{user2}} gtfo the vc", - }, - }, - { - user: "{{user2}}", - content: { - text: "sorry, talk to you later", - action: "LEAVE_VOICE", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/client-discord/src/actions/summarize_conversation.ts b/packages/client-discord/src/actions/summarize_conversation.ts deleted file mode 100644 index 34d1fa9744d7a..0000000000000 --- a/packages/client-discord/src/actions/summarize_conversation.ts +++ /dev/null @@ -1,392 +0,0 @@ -import { composeContext, getModelSettings } from "@elizaos/core"; -import { generateText, splitChunks, trimTokens } from "@elizaos/core"; -import { getActorDetails } from "@elizaos/core"; -import { parseJSONObjectFromText } from "@elizaos/core"; -import { - type Action, - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Media, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; -export const summarizationTemplate = `# Summarized so far (we are adding to this) -{{currentSummary}} - -# Current conversation chunk we are summarizing (includes attachments) -{{memoriesWithAttachments}} - -Summarization objective: {{objective}} - -# Instructions: Summarize the conversation so far. Return the summary. Do not acknowledge this request, just summarize and continue the existing summary if there is one. Capture any important details to the objective. Only respond with the new summary text. -Your response should be extremely detailed and include any and all relevant information.`; - -export const dateRangeTemplate = `# Messages we are summarizing (the conversation is continued after this) -{{recentMessages}} - -# Instructions: {{senderName}} is requesting a summary of the conversation. Your goal is to determine their objective, along with the range of dates that their request covers. -The "objective" is a detailed description of what the user wants to summarize based on the conversation. If they just ask for a general summary, you can either base it off the conversation if the summary range is very recent, or set the object to be general, like "a detailed summary of the conversation between all users". -The "start" and "end" are the range of dates that the user wants to summarize, relative to the current time. The start and end should be relative to the current time, and measured in seconds, minutes, hours and days. The format is "2 days ago" or "3 hours ago" or "4 minutes ago" or "5 seconds ago", i.e. " ago". -If you aren't sure, you can use a default range of "0 minutes ago" to "2 hours ago" or more. Better to err on the side of including too much than too little. - -Your response must be formatted as a JSON block with this structure: -\`\`\`json -{ - "objective": "", - "start": "0 minutes ago", - "end": "2 hours ago" -} -\`\`\` -`; - -const getDateRange = async ( - runtime: IAgentRuntime, - message: Memory, - state: State -) => { - state = (await runtime.composeState(message)) as State; - - const context = composeContext({ - state, - template: dateRangeTemplate, - }); - - for (let i = 0; i < 5; i++) { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - console.log("response", response); - // try parsing to a json object - const parsedResponse = parseJSONObjectFromText(response) as { - objective: string; - start: string | number; - end: string | number; - } | null; - // see if it contains objective, start and end - if (parsedResponse) { - if ( - parsedResponse.objective && - parsedResponse.start && - parsedResponse.end - ) { - // TODO: parse start and end into timestamps - const startIntegerString = ( - parsedResponse.start as string - ).match(/\d+/)?.[0]; - const endIntegerString = (parsedResponse.end as string).match( - /\d+/ - )?.[0]; - - // parse multiplier - const multipliers = { - second: 1 * 1000, - minute: 60 * 1000, - hour: 3600 * 1000, - day: 86400 * 1000, - }; - - const startMultiplier = (parsedResponse.start as string).match( - /second|minute|hour|day/ - )?.[0]; - const endMultiplier = (parsedResponse.end as string).match( - /second|minute|hour|day/ - )?.[0]; - - const startInteger = startIntegerString - ? Number.parseInt(startIntegerString) - : 0; - const endInteger = endIntegerString - ? Number.parseInt(endIntegerString) - : 0; - - // multiply by multiplier - const startTime = - startInteger * - multipliers[startMultiplier as keyof typeof multipliers]; - - console.log("startTime", startTime); - - const endTime = - endInteger * - multipliers[endMultiplier as keyof typeof multipliers]; - - console.log("endTime", endTime); - - // get the current time and subtract the start and end times - parsedResponse.start = Date.now() - startTime; - parsedResponse.end = Date.now() - endTime; - - return parsedResponse; - } - } - } -}; - -const summarizeAction = { - name: "SUMMARIZE_CONVERSATION", - similes: [ - "RECAP", - "RECAP_CONVERSATION", - "SUMMARIZE_CHAT", - "SUMMARIZATION", - "CHAT_SUMMARY", - "CONVERSATION_SUMMARY", - ], - description: "Summarizes the conversation and attachments.", - validate: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State - ) => { - if (message.content.source !== "discord") { - return false; - } - // only show if one of the keywords are in the message - const keywords: string[] = [ - "summarize", - "summarization", - "summary", - "recap", - "report", - "overview", - "review", - "rundown", - "wrap-up", - "brief", - "debrief", - "abstract", - "synopsis", - "outline", - "digest", - "abridgment", - "condensation", - "encapsulation", - "essence", - "gist", - "main points", - "key points", - "key takeaways", - "bulletpoint", - "highlights", - "tldr", - "tl;dr", - "in a nutshell", - "bottom line", - "long story short", - "sum up", - "sum it up", - "short version", - "bring me up to speed", - "catch me up", - ]; - return keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword.toLowerCase()) - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ) => { - state = (await runtime.composeState(message)) as State; - - const callbackData: Content = { - text: "", // fill in later - action: "SUMMARIZATION_RESPONSE", - source: message.content.source, - attachments: [], - }; - const { roomId } = message; - - // 1. extract date range from the message - const dateRange = await getDateRange(runtime, message, state); - if (!dateRange) { - console.error("Couldn't get date range from message"); - return; - } - - console.log("dateRange", dateRange); - - const { objective, start, end } = dateRange; - - // 2. get these memories from the database - const memories = await runtime.messageManager.getMemories({ - roomId, - // subtract start from current time - start: Number.parseInt(start as string), - end: Number.parseInt(end as string), - count: 10000, - unique: false, - }); - - const actors = await getActorDetails({ - runtime: runtime as IAgentRuntime, - roomId, - }); - - const actorMap = new Map(actors.map((actor) => [actor.id, actor])); - - const formattedMemories = memories - .map((memory) => { - const attachments = memory.content.attachments - ?.map((attachment: Media) => { - return `---\nAttachment: ${attachment.id}\n${attachment.description}\n${attachment.text}\n---`; - }) - .join("\n"); - return `${actorMap.get(memory.userId)?.name ?? "Unknown User"} (${actorMap.get(memory.userId)?.username ?? ""}): ${memory.content.text}\n${attachments}`; - }) - .join("\n"); - - let currentSummary = ""; - - const modelSettings = getModelSettings( - runtime.character.modelProvider, - ModelClass.SMALL - ); - const chunkSize = modelSettings.maxOutputTokens - 1000; - - const chunks = await splitChunks(formattedMemories, chunkSize, 0); - - const _datestr = new Date().toUTCString().replace(/:/g, "-"); - - state.memoriesWithAttachments = formattedMemories; - state.objective = objective; - - for (let i = 0; i < chunks.length; i++) { - const chunk = chunks[i]; - state.currentSummary = currentSummary; - state.currentChunk = chunk; - const template = await trimTokens( - summarizationTemplate, - chunkSize + 500, - runtime - ); - const context = composeContext({ - state, - // make sure it fits, we can pad the tokens a bit - template, - }); - - const summary = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - currentSummary = currentSummary + "\n" + summary; - } - - if (!currentSummary) { - console.error("No summary found, that's not good!"); - return; - } - - callbackData.text = currentSummary.trim(); - if ( - callbackData.text && - (currentSummary.trim()?.split("\n").length < 4 || - currentSummary.trim()?.split(" ").length < 100) - ) { - callbackData.text = `Here is the summary: -\`\`\`md -${currentSummary.trim()} -\`\`\` -`; - await callback(callbackData); - } else if (currentSummary.trim()) { - const summaryFilename = `content/conversation_summary_${Date.now()}`; - await runtime.cacheManager.set(summaryFilename, currentSummary); - // save the summary to a file - await callback( - { - ...callbackData, - text: `I've attached the summary of the conversation from \`${new Date(Number.parseInt(start as string)).toString()}\` to \`${new Date(Number.parseInt(end as string)).toString()}\` as a text file.`, - }, - [summaryFilename] - ); - } else { - console.warn( - "Empty response from summarize conversation action, skipping" - ); - } - - return callbackData; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "```js\nconst x = 10\n```", - }, - }, - { - user: "{{user1}}", - content: { - text: "can you give me a detailed report on what we're talking about?", - }, - }, - { - user: "{{user2}}", - content: { - text: "sure, no problem, give me a minute to get that together for you", - action: "SUMMARIZE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "please summarize the conversation we just had and include this blogpost i'm linking (Attachment: b3e12)", - }, - }, - { - user: "{{user2}}", - content: { - text: "sure, give me a sec", - action: "SUMMARIZE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Can you summarize what moon and avf are talking about?", - }, - }, - { - user: "{{user2}}", - content: { - text: "Yeah, just hold on a second while I get that together for you...", - action: "SUMMARIZE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "i need to write a blog post about farming, can you summarize the discussion from a few hours ago?", - }, - }, - { - user: "{{user2}}", - content: { - text: "no problem, give me a few minutes to read through everything", - action: "SUMMARIZE", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -export default summarizeAction; diff --git a/packages/client-discord/src/actions/transcribe_media.ts b/packages/client-discord/src/actions/transcribe_media.ts deleted file mode 100644 index da9323bb1c5be..0000000000000 --- a/packages/client-discord/src/actions/transcribe_media.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { composeContext } from "@elizaos/core"; -import { generateText } from "@elizaos/core"; -import { parseJSONObjectFromText } from "@elizaos/core"; -import { - type Action, - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; - -export const transcriptionTemplate = `# Transcription of media file -{{mediaTranscript}} - -# Instructions: Return only the full transcript of the media file without any additional context or commentary.`; - -export const mediaAttachmentIdTemplate = `# Messages we are transcribing -{{recentMessages}} - -# Instructions: {{senderName}} is requesting a transcription of a specific media file (audio or video). Your goal is to determine the ID of the attachment they want transcribed. -The "attachmentId" is the ID of the media file attachment that the user wants transcribed. If not specified, return null. - -Your response must be formatted as a JSON block with this structure: -\`\`\`json -{ - "attachmentId": "" -} -\`\`\` -`; - -const getMediaAttachmentId = async ( - runtime: IAgentRuntime, - message: Memory, - state: State -): Promise => { - state = (await runtime.composeState(message)) as State; - - const context = composeContext({ - state, - template: mediaAttachmentIdTemplate, - }); - - for (let i = 0; i < 5; i++) { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - console.log("response", response); - - const parsedResponse = parseJSONObjectFromText(response) as { - attachmentId: string; - } | null; - - if (parsedResponse?.attachmentId) { - return parsedResponse.attachmentId; - } - } - return null; -}; - -const transcribeMediaAction = { - name: "TRANSCRIBE_MEDIA", - similes: [ - "TRANSCRIBE_AUDIO", - "TRANSCRIBE_VIDEO", - "MEDIA_TRANSCRIPT", - "VIDEO_TRANSCRIPT", - "AUDIO_TRANSCRIPT", - ], - description: - "Transcribe the full text of an audio or video file that the user has attached.", - validate: async ( - _runtime: IAgentRuntime, - message: Memory, - _state: State - ) => { - if (message.content.source !== "discord") { - return false; - } - - const keywords: string[] = [ - "transcribe", - "transcript", - "audio", - "video", - "media", - "youtube", - "meeting", - "recording", - "podcast", - "call", - "conference", - "interview", - "speech", - "lecture", - "presentation", - ]; - return keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword.toLowerCase()) - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ) => { - state = (await runtime.composeState(message)) as State; - - const callbackData: Content = { - text: "", // fill in later - action: "TRANSCRIBE_MEDIA_RESPONSE", - source: message.content.source, - attachments: [], - }; - - const attachmentId = await getMediaAttachmentId( - runtime, - message, - state - ); - if (!attachmentId) { - console.error("Couldn't get media attachment ID from message"); - return; - } - - const attachment = state.recentMessagesData - .filter( - (msg) => - msg.content.attachments && - msg.content.attachments.length > 0 - ) - .flatMap((msg) => msg.content.attachments) - .find( - (attachment) => - attachment.id.toLowerCase() === attachmentId.toLowerCase() - ); - - if (!attachment) { - console.error(`Couldn't find attachment with ID ${attachmentId}`); - return; - } - - const mediaTranscript = attachment.text; - - callbackData.text = mediaTranscript.trim(); - - // if callbackData.text is < 4 lines or < 100 words, then we we callback with normal message wrapped in markdown block - if ( - callbackData.text && - (callbackData.text?.split("\n").length < 4 || - callbackData.text?.split(" ").length < 100) - ) { - callbackData.text = `Here is the transcript: -\`\`\`md -${mediaTranscript.trim()} -\`\`\` -`; - await callback(callbackData); - } - // if text is big, let's send as an attachment - else if (callbackData.text) { - const transcriptFilename = `content/transcript_${Date.now()}`; - - // save the transcript to a file - await runtime.cacheManager.set( - transcriptFilename, - callbackData.text - ); - - await callback( - { - ...callbackData, - text: `I've attached the transcript as a text file.`, - }, - [transcriptFilename] - ); - } else { - console.warn( - "Empty response from transcribe media action, skipping" - ); - } - - return callbackData; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Please transcribe the audio file I just sent.", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sure, I'll transcribe the full audio for you.", - action: "TRANSCRIBE_MEDIA", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Can I get a transcript of that video recording?", - }, - }, - { - user: "{{user2}}", - content: { - text: "Absolutely, give me a moment to generate the full transcript of the video.", - action: "TRANSCRIBE_MEDIA", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -export default transcribeMediaAction; diff --git a/packages/client-discord/src/attachments.ts b/packages/client-discord/src/attachments.ts deleted file mode 100644 index 6b4a53661bc0f..0000000000000 --- a/packages/client-discord/src/attachments.ts +++ /dev/null @@ -1,374 +0,0 @@ -import { generateText, trimTokens } from "@elizaos/core"; -import { parseJSONObjectFromText } from "@elizaos/core"; -import { - type IAgentRuntime, - type IImageDescriptionService, - type IPdfService, - type ITranscriptionService, - type IVideoService, - type Media, - ModelClass, - ServiceType, -} from "@elizaos/core"; -import { type Attachment, Collection } from "discord.js"; -import ffmpeg from "fluent-ffmpeg"; -import fs from "fs"; - -async function generateSummary( - runtime: IAgentRuntime, - text: string -): Promise<{ title: string; description: string }> { - // make sure text is under 128k characters - text = await trimTokens(text, 100000, runtime); - - const prompt = `Please generate a concise summary for the following text: - - Text: """ - ${text} - """ - - Respond with a JSON object in the following format: - \`\`\`json - { - "title": "Generated Title", - "summary": "Generated summary and/or description of the text" - } - \`\`\``; - - const response = await generateText({ - runtime, - context: prompt, - modelClass: ModelClass.SMALL, - }); - - const parsedResponse = parseJSONObjectFromText(response); - - if (parsedResponse?.title && parsedResponse?.summary) { - return { - title: parsedResponse.title, - description: parsedResponse.summary, - }; - } - - return { - title: "", - description: "", - }; -} - -export class AttachmentManager { - private attachmentCache: Map = new Map(); - private runtime: IAgentRuntime; - - constructor(runtime: IAgentRuntime) { - this.runtime = runtime; - } - - async processAttachments( - attachments: Collection | Attachment[] - ): Promise { - const processedAttachments: Media[] = []; - const attachmentCollection = - attachments instanceof Collection - ? attachments - : new Collection(attachments.map((att) => [att.id, att])); - - for (const [, attachment] of attachmentCollection) { - const media = await this.processAttachment(attachment); - if (media) { - processedAttachments.push(media); - } - } - - return processedAttachments; - } - - async processAttachment(attachment: Attachment): Promise { - if (this.attachmentCache.has(attachment.url)) { - return this.attachmentCache.get(attachment.url)!; - } - - let media: Media | null = null; - if (attachment.contentType?.startsWith("application/pdf")) { - media = await this.processPdfAttachment(attachment); - } else if (attachment.contentType?.startsWith("text/plain")) { - media = await this.processPlaintextAttachment(attachment); - } else if ( - attachment.contentType?.startsWith("audio/") || - attachment.contentType?.startsWith("video/mp4") - ) { - media = await this.processAudioVideoAttachment(attachment); - } else if (attachment.contentType?.startsWith("image/")) { - media = await this.processImageAttachment(attachment); - } else if ( - attachment.contentType?.startsWith("video/") || - this.runtime - .getService(ServiceType.VIDEO) - .isVideoUrl(attachment.url) - ) { - media = await this.processVideoAttachment(attachment); - } else { - media = await this.processGenericAttachment(attachment); - } - - if (media) { - this.attachmentCache.set(attachment.url, media); - } - return media; - } - - private async processAudioVideoAttachment( - attachment: Attachment - ): Promise { - try { - const response = await fetch(attachment.url); - const audioVideoArrayBuffer = await response.arrayBuffer(); - - let audioBuffer: Buffer; - if (attachment.contentType?.startsWith("audio/")) { - audioBuffer = Buffer.from(audioVideoArrayBuffer); - } else if (attachment.contentType?.startsWith("video/mp4")) { - audioBuffer = await this.extractAudioFromMP4( - audioVideoArrayBuffer - ); - } else { - throw new Error("Unsupported audio/video format"); - } - - const transcriptionService = - this.runtime.getService( - ServiceType.TRANSCRIPTION - ); - if (!transcriptionService) { - throw new Error("Transcription service not found"); - } - - const transcription = - await transcriptionService.transcribeAttachment(audioBuffer); - const { title, description } = await generateSummary( - this.runtime, - transcription - ); - - return { - id: attachment.id, - url: attachment.url, - title: title || "Audio/Video Attachment", - source: attachment.contentType?.startsWith("audio/") - ? "Audio" - : "Video", - description: - description || - "User-uploaded audio/video attachment which has been transcribed", - text: transcription || "Audio/video content not available", - }; - } catch (error) { - console.error( - `Error processing audio/video attachment: ${error.message}` - ); - return { - id: attachment.id, - url: attachment.url, - title: "Audio/Video Attachment", - source: attachment.contentType?.startsWith("audio/") - ? "Audio" - : "Video", - description: "An audio/video attachment (transcription failed)", - text: `This is an audio/video attachment. File name: ${attachment.name}, Size: ${attachment.size} bytes, Content type: ${attachment.contentType}`, - }; - } - } - - private async extractAudioFromMP4(mp4Data: ArrayBuffer): Promise { - // Use a library like 'fluent-ffmpeg' or 'ffmpeg-static' to extract the audio stream from the MP4 data - // and convert it to MP3 or WAV format - // Example using fluent-ffmpeg: - const tempMP4File = `temp_${Date.now()}.mp4`; - const tempAudioFile = `temp_${Date.now()}.mp3`; - - try { - // Write the MP4 data to a temporary file - fs.writeFileSync(tempMP4File, Buffer.from(mp4Data)); - - // Extract the audio stream and convert it to MP3 - await new Promise((resolve, reject) => { - ffmpeg(tempMP4File) - .outputOptions("-vn") // Disable video output - .audioCodec("libmp3lame") // Set audio codec to MP3 - .save(tempAudioFile) // Save the output to the specified file - .on("end", () => { - resolve(); - }) - .on("error", (err) => { - reject(err); - }) - .run(); - }); - - // Read the converted audio file and return it as a Buffer - const audioData = fs.readFileSync(tempAudioFile); - return audioData; - } finally { - // Clean up the temporary files - if (fs.existsSync(tempMP4File)) { - fs.unlinkSync(tempMP4File); - } - if (fs.existsSync(tempAudioFile)) { - fs.unlinkSync(tempAudioFile); - } - } - } - - private async processPdfAttachment(attachment: Attachment): Promise { - try { - const response = await fetch(attachment.url); - const pdfBuffer = await response.arrayBuffer(); - const text = await this.runtime - .getService(ServiceType.PDF) - .convertPdfToText(Buffer.from(pdfBuffer)); - const { title, description } = await generateSummary( - this.runtime, - text - ); - - return { - id: attachment.id, - url: attachment.url, - title: title || "PDF Attachment", - source: "PDF", - description: description || "A PDF document", - text: text, - }; - } catch (error) { - console.error(`Error processing PDF attachment: ${error.message}`); - return { - id: attachment.id, - url: attachment.url, - title: "PDF Attachment (conversion failed)", - source: "PDF", - description: - "A PDF document that could not be converted to text", - text: `This is a PDF attachment. File name: ${attachment.name}, Size: ${attachment.size} bytes`, - }; - } - } - - private async processPlaintextAttachment( - attachment: Attachment - ): Promise { - try { - const response = await fetch(attachment.url); - const text = await response.text(); - const { title, description } = await generateSummary( - this.runtime, - text - ); - - return { - id: attachment.id, - url: attachment.url, - title: title || "Plaintext Attachment", - source: "Plaintext", - description: description || "A plaintext document", - text: text, - }; - } catch (error) { - console.error( - `Error processing plaintext attachment: ${error.message}` - ); - return { - id: attachment.id, - url: attachment.url, - title: "Plaintext Attachment (retrieval failed)", - source: "Plaintext", - description: "A plaintext document that could not be retrieved", - text: `This is a plaintext attachment. File name: ${attachment.name}, Size: ${attachment.size} bytes`, - }; - } - } - - private async processImageAttachment( - attachment: Attachment - ): Promise { - try { - const { description, title } = await this.runtime - .getService( - ServiceType.IMAGE_DESCRIPTION - ) - .describeImage(attachment.url); - return { - id: attachment.id, - url: attachment.url, - title: title || "Image Attachment", - source: "Image", - description: description || "An image attachment", - text: description || "Image content not available", - }; - } catch (error) { - console.error( - `Error processing image attachment: ${error.message}` - ); - return this.createFallbackImageMedia(attachment); - } - } - - private createFallbackImageMedia(attachment: Attachment): Media { - return { - id: attachment.id, - url: attachment.url, - title: "Image Attachment", - source: "Image", - description: "An image attachment (recognition failed)", - text: `This is an image attachment. File name: ${attachment.name}, Size: ${attachment.size} bytes, Content type: ${attachment.contentType}`, - }; - } - - private async processVideoAttachment( - attachment: Attachment - ): Promise { - const videoService = this.runtime.getService( - ServiceType.VIDEO - ); - - if (!videoService) { - throw new Error("Video service not found"); - } - - if (videoService.isVideoUrl(attachment.url)) { - const videoInfo = await videoService.processVideo( - attachment.url, - this.runtime - ); - return { - id: attachment.id, - url: attachment.url, - title: videoInfo.title, - source: "YouTube", - description: videoInfo.description, - text: videoInfo.text, - }; - } else { - return { - id: attachment.id, - url: attachment.url, - title: "Video Attachment", - source: "Video", - description: "A video attachment", - text: "Video content not available", - }; - } - } - - private async processGenericAttachment( - attachment: Attachment - ): Promise { - return { - id: attachment.id, - url: attachment.url, - title: "Generic Attachment", - source: "Generic", - description: "A generic attachment", - text: "Attachment content not available", - }; - } -} diff --git a/packages/client-discord/src/constants.ts b/packages/client-discord/src/constants.ts deleted file mode 100644 index f149b58b4020e..0000000000000 --- a/packages/client-discord/src/constants.ts +++ /dev/null @@ -1,84 +0,0 @@ -export const TEAM_COORDINATION = { - KEYWORDS: [ - "team", - "all agents", - "team update", - "gm team", - "hello team", - "hey team", - "hi team", - "morning team", - "evening team", - "night team", - "update team", - ], -} as const; - -export const MESSAGE_CONSTANTS = { - MAX_MESSAGES: 10, - RECENT_MESSAGE_COUNT: 3, - CHAT_HISTORY_COUNT: 5, - INTEREST_DECAY_TIME: 5 * 60 * 1000, // 5 minutes - PARTIAL_INTEREST_DECAY: 3 * 60 * 1000, // 3 minutes - DEFAULT_SIMILARITY_THRESHOLD: 0.3, - DEFAULT_SIMILARITY_THRESHOLD_FOLLOW_UPS: 0.2, -} as const; - -export const MESSAGE_LENGTH_THRESHOLDS = { - LOSE_INTEREST: 100, - SHORT_MESSAGE: 10, - VERY_SHORT_MESSAGE: 2, - IGNORE_RESPONSE: 4, -} as const; - -export const TIMING_CONSTANTS = { - LEADER_RESPONSE_TIMEOUT: 3000, - TEAM_MEMBER_DELAY: 1500, - LEADER_DELAY_MIN: 3000, - LEADER_DELAY_MAX: 4000, - TEAM_MEMBER_DELAY_MIN: 1000, - TEAM_MEMBER_DELAY_MAX: 3000, -} as const; - -export const RESPONSE_CHANCES = { - AFTER_LEADER: 0.5, // 50% chance - FREQUENT_CHATTER: 0.5, // Base chance for frequent responders -} as const; - -export const LOSE_INTEREST_WORDS = [ - "shut up", - "stop", - "please shut up", - "shut up please", - "dont talk", - "silence", - "stop talking", - "be quiet", - "hush", - "wtf", - "chill", - "stfu", - "stupid bot", - "dumb bot", - "stop responding", - "god damn it", - "god damn", - "goddamnit", - "can you not", - "can you stop", - "be quiet", - "hate you", - "hate this", - "fuck up", -] as const; - -export const IGNORE_RESPONSE_WORDS = [ - "lol", - "nm", - "uh", - "wtf", - "stfu", - "dumb", - "jfc", - "omg", -] as const; diff --git a/packages/client-discord/src/environment.ts b/packages/client-discord/src/environment.ts deleted file mode 100644 index 71f4b3cbeb3dd..0000000000000 --- a/packages/client-discord/src/environment.ts +++ /dev/null @@ -1,38 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const discordEnvSchema = z.object({ - DISCORD_APPLICATION_ID: z - .string() - .min(1, "Discord application ID is required"), - DISCORD_API_TOKEN: z.string().min(1, "Discord API token is required"), -}); - -export type DiscordConfig = z.infer; - -export async function validateDiscordConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - DISCORD_APPLICATION_ID: - runtime.getSetting("DISCORD_APPLICATION_ID") || - process.env.DISCORD_APPLICATION_ID, - DISCORD_API_TOKEN: - runtime.getSetting("DISCORD_API_TOKEN") || - process.env.DISCORD_API_TOKEN, - }; - - return discordEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Discord configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-discord/src/index.ts b/packages/client-discord/src/index.ts deleted file mode 100644 index 5aac95fed238a..0000000000000 --- a/packages/client-discord/src/index.ts +++ /dev/null @@ -1,414 +0,0 @@ -import { - getEmbeddingZeroVector, - stringToUuid, - elizaLogger, - type Character, - type Client as ElizaClient, - type IAgentRuntime, -} from "@elizaos/core"; -import { - Client, - Events, - GatewayIntentBits, - type Guild, - type MessageReaction, - Partials, - type User, -} from "discord.js"; -import { EventEmitter } from "events"; -import chat_with_attachments from "./actions/chat_with_attachments.ts"; -import download_media from "./actions/download_media.ts"; -import joinvoice from "./actions/joinvoice.ts"; -import leavevoice from "./actions/leavevoice.ts"; -import summarize from "./actions/summarize_conversation.ts"; -import transcribe_media from "./actions/transcribe_media.ts"; -import { MessageManager } from "./messages.ts"; -import channelStateProvider from "./providers/channelState.ts"; -import voiceStateProvider from "./providers/voiceState.ts"; -import { VoiceManager } from "./voice.ts"; -import { PermissionsBitField } from "discord.js"; - -export class DiscordClient extends EventEmitter { - apiToken: string; - client: Client; - runtime: IAgentRuntime; - character: Character; - private messageManager: MessageManager; - private voiceManager: VoiceManager; - - constructor(runtime: IAgentRuntime) { - super(); - - this.apiToken = runtime.getSetting("DISCORD_API_TOKEN") as string; - this.client = new Client({ - intents: [ - GatewayIntentBits.Guilds, - GatewayIntentBits.DirectMessages, - GatewayIntentBits.GuildVoiceStates, - GatewayIntentBits.MessageContent, - GatewayIntentBits.GuildMessages, - GatewayIntentBits.DirectMessageTyping, - GatewayIntentBits.GuildMessageTyping, - GatewayIntentBits.GuildMessageReactions, - ], - partials: [ - Partials.Channel, - Partials.Message, - Partials.User, - Partials.Reaction, - ], - }); - - this.runtime = runtime; - this.voiceManager = new VoiceManager(this); - this.messageManager = new MessageManager(this, this.voiceManager); - - this.client.once(Events.ClientReady, this.onClientReady.bind(this)); - this.client.login(this.apiToken); - - this.setupEventListeners(); - - this.runtime.registerAction(joinvoice); - this.runtime.registerAction(leavevoice); - this.runtime.registerAction(summarize); - this.runtime.registerAction(chat_with_attachments); - this.runtime.registerAction(transcribe_media); - this.runtime.registerAction(download_media); - - this.runtime.providers.push(channelStateProvider); - this.runtime.providers.push(voiceStateProvider); - } - - private setupEventListeners() { - // When joining to a new server - this.client.on("guildCreate", this.handleGuildCreate.bind(this)); - - this.client.on( - Events.MessageReactionAdd, - this.handleReactionAdd.bind(this) - ); - this.client.on( - Events.MessageReactionRemove, - this.handleReactionRemove.bind(this) - ); - - // Handle voice events with the voice manager - this.client.on( - "voiceStateUpdate", - this.voiceManager.handleVoiceStateUpdate.bind(this.voiceManager) - ); - this.client.on( - "userStream", - this.voiceManager.handleUserStream.bind(this.voiceManager) - ); - - // Handle a new message with the message manager - this.client.on( - Events.MessageCreate, - this.messageManager.handleMessage.bind(this.messageManager) - ); - - // Handle a new interaction - this.client.on( - Events.InteractionCreate, - this.handleInteractionCreate.bind(this) - ); - } - - async stop() { - try { - // disconnect websocket - // this unbinds all the listeners - await this.client.destroy(); - } catch (e) { - elizaLogger.error("client-discord instance stop err", e); - } - } - - private async onClientReady(readyClient: { user: { tag: any; id: any } }) { - elizaLogger.success(`Logged in as ${readyClient.user?.tag}`); - - // Register slash commands - const commands = [ - { - name: "joinchannel", - description: "Join a voice channel", - options: [ - { - name: "channel", - type: 7, // CHANNEL type - description: "The voice channel to join", - required: true, - channel_types: [2], // GuildVoice type - }, - ], - }, - { - name: "leavechannel", - description: "Leave the current voice channel", - }, - ]; - - try { - await this.client.application?.commands.set(commands); - elizaLogger.success("Slash commands registered"); - } catch (error) { - console.error("Error registering slash commands:", error); - } - - // Required permissions for the bot - const requiredPermissions = [ - // Text Permissions - PermissionsBitField.Flags.ViewChannel, - PermissionsBitField.Flags.SendMessages, - PermissionsBitField.Flags.SendMessagesInThreads, - PermissionsBitField.Flags.CreatePrivateThreads, - PermissionsBitField.Flags.CreatePublicThreads, - PermissionsBitField.Flags.EmbedLinks, - PermissionsBitField.Flags.AttachFiles, - PermissionsBitField.Flags.AddReactions, - PermissionsBitField.Flags.UseExternalEmojis, - PermissionsBitField.Flags.UseExternalStickers, - PermissionsBitField.Flags.MentionEveryone, - PermissionsBitField.Flags.ManageMessages, - PermissionsBitField.Flags.ReadMessageHistory, - // Voice Permissions - PermissionsBitField.Flags.Connect, - PermissionsBitField.Flags.Speak, - PermissionsBitField.Flags.UseVAD, - PermissionsBitField.Flags.PrioritySpeaker, - ].reduce((a, b) => a | b, 0n); - - elizaLogger.success("Use this URL to add the bot to your server:"); - elizaLogger.success( - `https://discord.com/api/oauth2/authorize?client_id=${readyClient.user?.id}&permissions=${requiredPermissions}&scope=bot%20applications.commands` - ); - await this.onReady(); - } - - async handleReactionAdd(reaction: MessageReaction, user: User) { - try { - elizaLogger.log("Reaction added"); - - // Early returns - if (!reaction || !user) { - elizaLogger.warn("Invalid reaction or user"); - return; - } - - // Get emoji info - let emoji = reaction.emoji.name; - if (!emoji && reaction.emoji.id) { - emoji = `<:${reaction.emoji.name}:${reaction.emoji.id}>`; - } - - // Fetch full message if partial - if (reaction.partial) { - try { - await reaction.fetch(); - } catch (error) { - elizaLogger.error( - "Failed to fetch partial reaction:", - error - ); - return; - } - } - - // Generate IDs with timestamp to ensure uniqueness - const timestamp = Date.now(); - const roomId = stringToUuid( - `${reaction.message.channel.id}-${this.runtime.agentId}` - ); - const userIdUUID = stringToUuid( - `${user.id}-${this.runtime.agentId}` - ); - const reactionUUID = stringToUuid( - `${reaction.message.id}-${user.id}-${emoji}-${timestamp}-${this.runtime.agentId}` - ); - - // Validate IDs - if (!userIdUUID || !roomId) { - elizaLogger.error("Invalid user ID or room ID", { - userIdUUID, - roomId, - }); - return; - } - - // Process message content - const messageContent = reaction.message.content || ""; - const truncatedContent = - messageContent.length > 100 - ? `${messageContent.substring(0, 100)}...` - : messageContent; - const reactionMessage = `*<${emoji}>: "${truncatedContent}"*`; - - // Get user info - const userName = reaction.message.author?.username || "unknown"; - const name = reaction.message.author?.displayName || userName; - - // Ensure connection - await this.runtime.ensureConnection( - userIdUUID, - roomId, - userName, - name, - "discord" - ); - - // Create memory with retry logic - const memory = { - id: reactionUUID, - userId: userIdUUID, - agentId: this.runtime.agentId, - content: { - text: reactionMessage, - source: "discord", - inReplyTo: stringToUuid( - `${reaction.message.id}-${this.runtime.agentId}` - ), - }, - roomId, - createdAt: timestamp, - embedding: getEmbeddingZeroVector(), - }; - - try { - await this.runtime.messageManager.createMemory(memory); - elizaLogger.debug("Reaction memory created", { - reactionId: reactionUUID, - emoji, - userId: user.id, - }); - } catch (error) { - if (error.code === "23505") { - // Duplicate key error - elizaLogger.warn("Duplicate reaction memory, skipping", { - reactionId: reactionUUID, - }); - return; - } - throw error; // Re-throw other errors - } - } catch (error) { - elizaLogger.error("Error handling reaction:", error); - } - } - - async handleReactionRemove(reaction: MessageReaction, user: User) { - elizaLogger.log("Reaction removed"); - // if (user.bot) return; - - let emoji = reaction.emoji.name; - if (!emoji && reaction.emoji.id) { - emoji = `<:${reaction.emoji.name}:${reaction.emoji.id}>`; - } - - // Fetch the full message if it's a partial - if (reaction.partial) { - try { - await reaction.fetch(); - } catch (error) { - console.error( - "Something went wrong when fetching the message:", - error - ); - return; - } - } - - const messageContent = reaction.message.content; - const truncatedContent = - messageContent.length > 50 - ? messageContent.substring(0, 50) + "..." - : messageContent; - - const reactionMessage = `*Removed <${emoji} emoji> from: "${truncatedContent}"*`; - - const roomId = stringToUuid( - reaction.message.channel.id + "-" + this.runtime.agentId - ); - const userIdUUID = stringToUuid(user.id); - - // Generate a unique UUID for the reaction removal - const reactionUUID = stringToUuid( - `${reaction.message.id}-${user.id}-${emoji}-removed-${this.runtime.agentId}` - ); - - const userName = reaction.message.author.username; - const name = reaction.message.author.displayName; - - await this.runtime.ensureConnection( - userIdUUID, - roomId, - userName, - name, - "discord" - ); - - try { - // Save the reaction removal as a message - await this.runtime.messageManager.createMemory({ - id: reactionUUID, // This is the ID of the reaction removal message - userId: userIdUUID, - agentId: this.runtime.agentId, - content: { - text: reactionMessage, - source: "discord", - inReplyTo: stringToUuid( - reaction.message.id + "-" + this.runtime.agentId - ), // This is the ID of the original message - }, - roomId, - createdAt: Date.now(), - embedding: getEmbeddingZeroVector(), - }); - } catch (error) { - console.error("Error creating reaction removal message:", error); - } - } - - private handleGuildCreate(guild: Guild) { - console.log(`Joined guild ${guild.name}`); - this.voiceManager.scanGuild(guild); - } - - private async handleInteractionCreate(interaction: any) { - if (!interaction.isCommand()) return; - - switch (interaction.commandName) { - case "joinchannel": - await this.voiceManager.handleJoinChannelCommand(interaction); - break; - case "leavechannel": - await this.voiceManager.handleLeaveChannelCommand(interaction); - break; - } - } - - private async onReady() { - const guilds = await this.client.guilds.fetch(); - for (const [, guild] of guilds) { - const fullGuild = await guild.fetch(); - this.voiceManager.scanGuild(fullGuild); - } - } -} - -export function startDiscord(runtime: IAgentRuntime) { - return new DiscordClient(runtime); -} - -export const DiscordClientInterface: ElizaClient = { - start: async (runtime: IAgentRuntime) => new DiscordClient(runtime), - stop: async (runtime: IAgentRuntime) => { - try { - // stop it - elizaLogger.log("Stopping discord client", runtime.agentId); - await runtime.clients.discord.stop(); - } catch (e) { - elizaLogger.error("client-discord interface stop error", e); - } - }, -}; diff --git a/packages/client-discord/src/messages.ts b/packages/client-discord/src/messages.ts deleted file mode 100644 index 313c890dbb711..0000000000000 --- a/packages/client-discord/src/messages.ts +++ /dev/null @@ -1,1609 +0,0 @@ -import { composeContext, composeRandomUser } from "@elizaos/core"; -import { generateMessageResponse, generateShouldRespond } from "@elizaos/core"; -import { - type Content, - type HandlerCallback, - type IAgentRuntime, - type IBrowserService, - type ISpeechService, - type IVideoService, - type Media, - type Memory, - ModelClass, - ServiceType, - type State, - type UUID, -} from "@elizaos/core"; -import { stringToUuid, getEmbeddingZeroVector } from "@elizaos/core"; -import { - ChannelType, - type Client, - type Message as DiscordMessage, - type TextChannel, -} from "discord.js"; -import { elizaLogger } from "@elizaos/core"; -import { AttachmentManager } from "./attachments.ts"; -import type { VoiceManager } from "./voice.ts"; -import { - discordShouldRespondTemplate, - discordMessageHandlerTemplate, - discordAutoPostTemplate, - discordAnnouncementHypeTemplate -} from "./templates.ts"; -import { - IGNORE_RESPONSE_WORDS, - LOSE_INTEREST_WORDS, - MESSAGE_CONSTANTS, - MESSAGE_LENGTH_THRESHOLDS, - RESPONSE_CHANCES, - TEAM_COORDINATION, - TIMING_CONSTANTS, -} from "./constants"; -import { - sendMessageInChunks, - canSendMessage, - cosineSimilarity, -} from "./utils.ts"; - -interface MessageContext { - content: string; - timestamp: number; -} - -interface AutoPostConfig { - enabled: boolean; - monitorTime: number; - inactivityThreshold: number; // milliseconds - mainChannelId: string; - announcementChannelIds: string[]; - lastAutoPost?: number; - minTimeBetweenPosts?: number; // minimum time between auto posts -} - -export type InterestChannels = { - [key: string]: { - currentHandler: string | undefined; - lastMessageSent: number; - messages: { userId: UUID; userName: string; content: Content }[]; - previousContext?: MessageContext; - contextSimilarityThreshold?: number; - }; -}; - -export class MessageManager { - private client: Client; - private runtime: IAgentRuntime; - private attachmentManager: AttachmentManager; - private interestChannels: InterestChannels = {}; - private discordClient: any; - private voiceManager: VoiceManager; - //Auto post - private autoPostConfig: AutoPostConfig; - private lastChannelActivity: { [channelId: string]: number } = {}; - private autoPostInterval: NodeJS.Timeout; - - constructor(discordClient: any, voiceManager: VoiceManager) { - this.client = discordClient.client; - this.voiceManager = voiceManager; - this.discordClient = discordClient; - this.runtime = discordClient.runtime; - this.attachmentManager = new AttachmentManager(this.runtime); - - this.autoPostConfig = { - enabled: this.runtime.character.clientConfig?.discord?.autoPost?.enabled || false, - monitorTime: this.runtime.character.clientConfig?.discord?.autoPost?.monitorTime || 300000, - inactivityThreshold: this.runtime.character.clientConfig?.discord?.autoPost?.inactivityThreshold || 3600000, // 1 hour default - mainChannelId: this.runtime.character.clientConfig?.discord?.autoPost?.mainChannelId, - announcementChannelIds: this.runtime.character.clientConfig?.discord?.autoPost?.announcementChannelIds || [], - minTimeBetweenPosts: this.runtime.character.clientConfig?.discord?.autoPost?.minTimeBetweenPosts || 7200000, // 2 hours default - }; - - if (this.autoPostConfig.enabled) { - this._startAutoPostMonitoring(); - } - } - - async handleMessage(message: DiscordMessage) { - - if (this.runtime.character.clientConfig?.discord?.allowedChannelIds && - !this.runtime.character.clientConfig.discord.allowedChannelIds.includes(message.channelId)) { - return; - } - - // Update last activity time for the channel - this.lastChannelActivity[message.channelId] = Date.now(); - - if ( - message.interaction || - message.author.id === - this.client.user?.id /* || message.author?.bot*/ - ) { - return; - } - - if ( - this.runtime.character.clientConfig?.discord - ?.shouldIgnoreBotMessages && - message.author?.bot - ) { - return; - } - - // Check for mentions-only mode setting - if ( - this.runtime.character.clientConfig?.discord - ?.shouldRespondOnlyToMentions - ) { - if (!this._isMessageForMe(message)) { - return; - } - } - - if ( - this.runtime.character.clientConfig?.discord - ?.shouldIgnoreDirectMessages && - message.channel.type === ChannelType.DM - ) { - return; - } - - const userId = message.author.id as UUID; - const userName = message.author.username; - const name = message.author.displayName; - const channelId = message.channel.id; - const isDirectlyMentioned = this._isMessageForMe(message); - const hasInterest = this._checkInterest(message.channelId); - - // Team handling - if ( - this.runtime.character.clientConfig?.discord?.isPartOfTeam && - !this.runtime.character.clientConfig?.discord - ?.shouldRespondOnlyToMentions - ) { - const authorId = this._getNormalizedUserId(message.author.id); - - if ( - !this._isTeamLeader() && - this._isRelevantToTeamMember(message.content, channelId) - ) { - this.interestChannels[message.channelId] = { - currentHandler: this.client.user?.id, - lastMessageSent: Date.now(), - messages: [], - }; - } - - const isTeamRequest = this._isTeamCoordinationRequest( - message.content - ); - const isLeader = this._isTeamLeader(); - - // After team-wide responses, check if we should maintain interest - if (hasInterest && !isDirectlyMentioned) { - const lastSelfMemories = - await this.runtime.messageManager.getMemories({ - roomId: stringToUuid( - channelId + "-" + this.runtime.agentId - ), - unique: false, - count: 5, - }); - - const lastSelfSortedMemories = lastSelfMemories - ?.filter((m) => m.userId === this.runtime.agentId) - .sort((a, b) => (b.createdAt || 0) - (a.createdAt || 0)); - - const isRelevant = this._isRelevantToTeamMember( - message.content, - channelId, - lastSelfSortedMemories?.[0] - ); - - if (!isRelevant) { - // Clearing interest - conversation not relevant to team member - delete this.interestChannels[message.channelId]; - return; - } - } - - if (isTeamRequest) { - if (isLeader) { - this.interestChannels[message.channelId] = { - currentHandler: this.client.user?.id, - lastMessageSent: Date.now(), - messages: [], - }; - } else { - // Set temporary interest for this response - this.interestChannels[message.channelId] = { - currentHandler: this.client.user?.id, - lastMessageSent: Date.now(), - messages: [], - }; - - // Clear interest after this cycle unless directly mentioned - if (!isDirectlyMentioned) { - // Use existing message cycle to clear interest - this.interestChannels[ - message.channelId - ].lastMessageSent = 0; - } - } - } - - // Check for other team member mentions - const otherTeamMembers = - this.runtime.character.clientConfig.discord.teamAgentIds.filter( - (id) => id !== this.client.user?.id - ); - const mentionedTeamMember = otherTeamMembers.find((id) => - message.content.includes(`<@${id}>`) - ); - - // If another team member is mentioned, clear our interest - if (mentionedTeamMember) { - if ( - hasInterest || - this.interestChannels[message.channelId]?.currentHandler === - this.client.user?.id - ) { - delete this.interestChannels[message.channelId]; - - // Only return if we're not the mentioned member - if (!isDirectlyMentioned) { - return; - } - } - } - - // Set/maintain interest only if we're mentioned or already have interest - if (isDirectlyMentioned) { - this.interestChannels[message.channelId] = { - currentHandler: this.client.user?.id, - lastMessageSent: Date.now(), - messages: [], - }; - } else if (!isTeamRequest && !hasInterest) { - return; - } - - // Bot-specific checks - if (message.author.bot) { - if (this._isTeamMember(authorId) && !isDirectlyMentioned) { - return; - } else if ( - this.runtime.character.clientConfig.discord - .shouldIgnoreBotMessages - ) { - return; - } - } - } - - try { - const { processedContent, attachments } = - await this.processMessageMedia(message); - - const audioAttachments = message.attachments.filter((attachment) => - attachment.contentType?.startsWith("audio/") - ); - if (audioAttachments.size > 0) { - const processedAudioAttachments = - await this.attachmentManager.processAttachments( - audioAttachments - ); - attachments.push(...processedAudioAttachments); - } - - const roomId = stringToUuid(channelId + "-" + this.runtime.agentId); - const userIdUUID = stringToUuid(userId); - - await this.runtime.ensureConnection( - userIdUUID, - roomId, - userName, - name, - "discord" - ); - - const messageId = stringToUuid( - message.id + "-" + this.runtime.agentId - ); - - let shouldIgnore = false; - let shouldRespond = true; - - const content: Content = { - text: processedContent, - attachments: attachments, - source: "discord", - url: message.url, - inReplyTo: message.reference?.messageId - ? stringToUuid( - message.reference.messageId + - "-" + - this.runtime.agentId - ) - : undefined, - }; - - const userMessage = { - content, - userId: userIdUUID, - agentId: this.runtime.agentId, - roomId, - }; - - const memory: Memory = { - id: stringToUuid(message.id + "-" + this.runtime.agentId), - ...userMessage, - userId: userIdUUID, - agentId: this.runtime.agentId, - roomId, - content, - createdAt: message.createdTimestamp, - }; - - if (content.text) { - await this.runtime.messageManager.addEmbeddingToMemory(memory); - await this.runtime.messageManager.createMemory(memory); - - if (this.interestChannels[message.channelId]) { - // Add new message - this.interestChannels[message.channelId].messages.push({ - userId: userIdUUID, - userName: userName, - content: content, - }); - - // Trim to keep only recent messages - if ( - this.interestChannels[message.channelId].messages - .length > MESSAGE_CONSTANTS.MAX_MESSAGES - ) { - this.interestChannels[message.channelId].messages = - this.interestChannels[ - message.channelId - ].messages.slice(-MESSAGE_CONSTANTS.MAX_MESSAGES); - } - } - } - - let state = await this.runtime.composeState(userMessage, { - discordClient: this.client, - discordMessage: message, - agentName: - this.runtime.character.name || - this.client.user?.displayName, - }); - - const canSendResult = canSendMessage(message.channel); - if (!canSendResult.canSend) { - return elizaLogger.warn( - `Cannot send message to channel ${message.channel}`, - canSendResult - ); - } - - if (!shouldIgnore) { - shouldIgnore = await this._shouldIgnore(message); - } - - if (shouldIgnore) { - return; - } - - const agentUserState = - await this.runtime.databaseAdapter.getParticipantUserState( - roomId, - this.runtime.agentId - ); - - if ( - agentUserState === "MUTED" && - !message.mentions.has(this.client.user.id) && - !hasInterest - ) { - console.log("Ignoring muted room"); - // Ignore muted rooms unless explicitly mentioned - return; - } - - if (agentUserState === "FOLLOWED") { - shouldRespond = true; // Always respond in followed rooms - } else if ( - (!shouldRespond && hasInterest) || - (shouldRespond && !hasInterest) - ) { - shouldRespond = await this._shouldRespond(message, state); - } - - if (shouldRespond) { - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.discordMessageHandlerTemplate || - discordMessageHandlerTemplate, - }); - - // simulate discord typing while generating a response - const stopTyping = this.simulateTyping(message); - - const responseContent = await this._generateResponse( - memory, - state, - context - ).finally(() => { - stopTyping(); - }); - - responseContent.text = responseContent.text?.trim(); - responseContent.inReplyTo = stringToUuid( - message.id + "-" + this.runtime.agentId - ); - - if (!responseContent.text) { - return; - } - - const callback: HandlerCallback = async ( - content: Content, - files: any[] - ) => { - try { - if (message.id && !content.inReplyTo) { - content.inReplyTo = stringToUuid( - message.id + "-" + this.runtime.agentId - ); - } - const messages = await sendMessageInChunks( - message.channel as TextChannel, - content.text, - message.id, - files - ); - - const memories: Memory[] = []; - for (const m of messages) { - let action = content.action; - // If there's only one message or it's the last message, keep the original action - // For multiple messages, set all but the last to 'CONTINUE' - if ( - messages.length > 1 && - m !== messages[messages.length - 1] - ) { - action = "CONTINUE"; - } - - const memory: Memory = { - id: stringToUuid( - m.id + "-" + this.runtime.agentId - ), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - ...content, - action, - inReplyTo: messageId, - url: m.url, - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: m.createdTimestamp, - }; - memories.push(memory); - } - for (const m of memories) { - await this.runtime.messageManager.createMemory(m); - } - return memories; - } catch (error) { - console.error("Error sending message:", error); - return []; - } - }; - - const action = this.runtime.actions.find((a) => a.name === responseContent.action); - const shouldSuppressInitialMessage = action?.suppressInitialMessage; - - let responseMessages = []; - - if (!shouldSuppressInitialMessage) { - responseMessages = await callback(responseContent); - } else { - responseMessages = [ - { - id: stringToUuid(messageId + "-" + this.runtime.agentId), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: responseContent, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - } - ] - } - - state = await this.runtime.updateRecentMessageState(state); - - await this.runtime.processActions( - memory, - responseMessages, - state, - callback - ); - } - await this.runtime.evaluate(memory, state, shouldRespond); - } catch (error) { - console.error("Error handling message:", error); - if (message.channel.type === ChannelType.GuildVoice) { - // For voice channels, use text-to-speech for the error message - const errorMessage = "Sorry, I had a glitch. What was that?"; - - const speechService = this.runtime.getService( - ServiceType.SPEECH_GENERATION - ); - if (!speechService) { - throw new Error("Speech generation service not found"); - } - - const audioStream = await speechService.generate( - this.runtime, - errorMessage - ); - await this.voiceManager.playAudioStream(userId, audioStream); - } else { - // For text channels, send the error message - console.error("Error sending message:", error); - } - } - } - - async cacheMessages(channel: TextChannel, count = 20) { - const messages = await channel.messages.fetch({ limit: count }); - - // TODO: This is throwing an error but seems to work? - for (const [_, message] of messages) { - await this.handleMessage(message); - } - } - - private _startAutoPostMonitoring(): void { - // Wait for client to be ready - if (!this.client.isReady()) { - elizaLogger.info('[AutoPost Discord] Client not ready, waiting for ready event') - this.client.once('ready', () => { - elizaLogger.info('[AutoPost Discord] Client ready, starting monitoring') - this._initializeAutoPost(); - }); - } else { - elizaLogger.info('[AutoPost Discord] Client already ready, starting monitoring') - this._initializeAutoPost(); - } - } - - private _initializeAutoPost(): void { - // Give the client a moment to fully load its cache - setTimeout(() => { - // Monitor with random intervals between 2-6 hours - this.autoPostInterval = setInterval(() => { - this._checkChannelActivity(); - }, Math.floor(Math.random() * (4 * 60 * 60 * 1000) + 2 * 60 * 60 * 1000)); - - // Start monitoring announcement channels - this._monitorAnnouncementChannels(); - }, 5000); // 5 second delay to ensure everything is loaded - } - - private async _checkChannelActivity(): Promise { - if (!this.autoPostConfig.enabled || !this.autoPostConfig.mainChannelId) return; - - const channel = this.client.channels.cache.get(this.autoPostConfig.mainChannelId) as TextChannel; - if (!channel) return; - - try { - // Get last message time - const messages = await channel.messages.fetch({ limit: 1 }); - const lastMessage = messages.first(); - const lastMessageTime = lastMessage ? lastMessage.createdTimestamp : 0; - - const now = Date.now(); - const timeSinceLastMessage = now - lastMessageTime; - const timeSinceLastAutoPost = now - (this.autoPostConfig.lastAutoPost || 0); - - // Add some randomness to the inactivity threshold (±30 minutes) - const randomThreshold = this.autoPostConfig.inactivityThreshold + - (Math.random() * 1800000 - 900000); - - // Check if we should post - if ((timeSinceLastMessage > randomThreshold) && - timeSinceLastAutoPost > (this.autoPostConfig.minTimeBetweenPosts || 0)) { - - try { - // Create memory and generate response - const roomId = stringToUuid(channel.id + "-" + this.runtime.agentId); - - const memory = { - id: stringToUuid(`autopost-${Date.now()}`), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId, - content: { text: "AUTO_POST_ENGAGEMENT", source: "discord" }, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now() - }; - - let state = await this.runtime.composeState(memory, { - discordClient: this.client, - discordMessage: null, - agentName: this.runtime.character.name || this.client.user?.displayName - }); - - // Generate response using template - const context = composeContext({ - state, - template: this.runtime.character.templates?.discordAutoPostTemplate || discordAutoPostTemplate - }); - - const responseContent = await this._generateResponse(memory, state, context); - if (!responseContent?.text) return; - - // Send message and update memory - const messages = await sendMessageInChunks(channel, responseContent.text.trim(), null, []); - - // Create and store memories - const memories = messages.map(m => ({ - id: stringToUuid(m.id + "-" + this.runtime.agentId), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - ...responseContent, - url: m.url, - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: m.createdTimestamp, - })); - - for (const m of memories) { - await this.runtime.messageManager.createMemory(m); - } - - // Update state and last post time - this.autoPostConfig.lastAutoPost = Date.now(); - state = await this.runtime.updateRecentMessageState(state); - await this.runtime.evaluate(memory, state, true); - } catch (error) { - elizaLogger.warn("[AutoPost Discord] Error:", error); - } - } else { - elizaLogger.warn("[AutoPost Discord] Activity within threshold. Not posting."); - } - } catch (error) { - elizaLogger.warn("[AutoPost Discord] Error checking last message:", error); - } - } - - private async _monitorAnnouncementChannels(): Promise { - if (!this.autoPostConfig.enabled || !this.autoPostConfig.announcementChannelIds.length) { - elizaLogger.warn('[AutoPost Discord] Auto post config disabled or no announcement channels') - return; - } - - for (const announcementChannelId of this.autoPostConfig.announcementChannelIds) { - const channel = this.client.channels.cache.get(announcementChannelId); - - if (channel) { - // Check if it's either a text channel or announcement channel - // ChannelType.GuildAnnouncement is 5 - // ChannelType.GuildText is 0 - if (channel instanceof TextChannel || channel.type === ChannelType.GuildAnnouncement) { - const newsChannel = channel as TextChannel; - try { - newsChannel.createMessageCollector().on('collect', async (message: DiscordMessage) => { - if (message.author.bot || Date.now() - message.createdTimestamp > 300000) return; - - const mainChannel = this.client.channels.cache.get(this.autoPostConfig.mainChannelId) as TextChannel; - if (!mainChannel) return; - - try { - // Create memory and generate response - const roomId = stringToUuid(mainChannel.id + "-" + this.runtime.agentId); - const memory = { - id: stringToUuid(`announcement-${Date.now()}`), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId, - content: { - text: message.content, - source: "discord", - metadata: { announcementUrl: message.url } - }, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now() - }; - - let state = await this.runtime.composeState(memory, { - discordClient: this.client, - discordMessage: message, - announcementContent: message?.content, - announcementChannelId: channel.id, - agentName: this.runtime.character.name || this.client.user?.displayName - }); - - // Generate response using template - const context = composeContext({ - state, - template: this.runtime.character.templates?.discordAnnouncementHypeTemplate || discordAnnouncementHypeTemplate - - }); - - const responseContent = await this._generateResponse(memory, state, context); - if (!responseContent?.text) return; - - // Send message and update memory - const messages = await sendMessageInChunks(mainChannel, responseContent.text.trim(), null, []); - - // Create and store memories - const memories = messages.map(m => ({ - id: stringToUuid(m.id + "-" + this.runtime.agentId), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - ...responseContent, - url: m.url, - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: m.createdTimestamp, - })); - - for (const m of memories) { - await this.runtime.messageManager.createMemory(m); - } - - // Update state - state = await this.runtime.updateRecentMessageState(state); - await this.runtime.evaluate(memory, state, true); - } catch (error) { - elizaLogger.warn("[AutoPost Discord] Announcement Error:", error); - } - }); - elizaLogger.info(`[AutoPost Discord] Successfully set up collector for announcement channel: ${newsChannel.name}`); - } catch (error) { - elizaLogger.warn(`[AutoPost Discord] Error setting up announcement channel collector:`, error); - } - } else { - elizaLogger.warn(`[AutoPost Discord] Channel ${announcementChannelId} is not a valid announcement or text channel, type:`, channel.type); - } - } else { - elizaLogger.warn(`[AutoPost Discord] Could not find channel ${announcementChannelId} directly`); - } - } - } - - private _isMessageForMe(message: DiscordMessage): boolean { - const isMentioned = message.mentions.users?.has( - this.client.user?.id as string - ); - const guild = message.guild; - const member = guild?.members.cache.get(this.client.user?.id as string); - const nickname = member?.nickname; - - // Don't consider role mentions as direct mentions - const hasRoleMentionOnly = - message.mentions.roles.size > 0 && !isMentioned; - - // If it's only a role mention and we're in team mode, let team logic handle it - if ( - hasRoleMentionOnly && - this.runtime.character.clientConfig?.discord?.isPartOfTeam - ) { - return false; - } - - return ( - isMentioned || - (!this.runtime.character.clientConfig?.discord - ?.shouldRespondOnlyToMentions && - (message.content - .toLowerCase() - .includes( - this.client.user?.username.toLowerCase() as string - ) || - message.content - .toLowerCase() - .includes( - this.client.user?.tag.toLowerCase() as string - ) || - (nickname && - message.content - .toLowerCase() - .includes(nickname.toLowerCase())))) - ); - } - - async processMessageMedia( - message: DiscordMessage - ): Promise<{ processedContent: string; attachments: Media[] }> { - let processedContent = message.content; - - let attachments: Media[] = []; - - // Process code blocks in the message content - const codeBlockRegex = /```([\s\S]*?)```/g; - let match; - while ((match = codeBlockRegex.exec(processedContent))) { - const codeBlock = match[1]; - const lines = codeBlock.split("\n"); - const title = lines[0]; - const description = lines.slice(0, 3).join("\n"); - const attachmentId = - `code-${Date.now()}-${Math.floor(Math.random() * 1000)}`.slice( - -5 - ); - attachments.push({ - id: attachmentId, - url: "", - title: title || "Code Block", - source: "Code", - description: description, - text: codeBlock, - }); - processedContent = processedContent.replace( - match[0], - `Code Block (${attachmentId})` - ); - } - - // Process message attachments - if (message.attachments.size > 0) { - attachments = await this.attachmentManager.processAttachments( - message.attachments - ); - } - - // TODO: Move to attachments manager - const urlRegex = /(https?:\/\/[^\s]+)/g; - const urls = processedContent.match(urlRegex) || []; - - for (const url of urls) { - if ( - this.runtime - .getService(ServiceType.VIDEO) - ?.isVideoUrl(url) - ) { - const videoService = this.runtime.getService( - ServiceType.VIDEO - ); - if (!videoService) { - throw new Error("Video service not found"); - } - const videoInfo = await videoService.processVideo( - url, - this.runtime - ); - - attachments.push({ - id: `youtube-${Date.now()}`, - url: url, - title: videoInfo.title, - source: "YouTube", - description: videoInfo.description, - text: videoInfo.text, - }); - } else { - const browserService = this.runtime.getService( - ServiceType.BROWSER - ); - if (!browserService) { - throw new Error("Browser service not found"); - } - - const { title, description: summary } = - await browserService.getPageContent(url, this.runtime); - - attachments.push({ - id: `webpage-${Date.now()}`, - url: url, - title: title || "Web Page", - source: "Web", - description: summary, - text: summary, - }); - } - } - - return { processedContent, attachments }; - } - - private _getNormalizedUserId(id: string): string { - return id.toString().replace(/[^0-9]/g, ""); - } - - private _isTeamMember(userId: string): boolean { - const teamConfig = this.runtime.character.clientConfig?.discord; - if (!teamConfig?.isPartOfTeam || !teamConfig.teamAgentIds) return false; - - const normalizedUserId = this._getNormalizedUserId(userId); - - const isTeamMember = teamConfig.teamAgentIds.some( - (teamId) => this._getNormalizedUserId(teamId) === normalizedUserId - ); - - return isTeamMember; - } - - private _isTeamLeader(): boolean { - return ( - this.client.user?.id === - this.runtime.character.clientConfig?.discord?.teamLeaderId - ); - } - - private _isTeamCoordinationRequest(content: string): boolean { - const contentLower = content.toLowerCase(); - return TEAM_COORDINATION.KEYWORDS?.some((keyword) => - contentLower.includes(keyword.toLowerCase()) - ); - } - - private _isRelevantToTeamMember( - content: string, - channelId: string, - lastAgentMemory: Memory | null = null - ): boolean { - const teamConfig = this.runtime.character.clientConfig?.discord; - - if (this._isTeamLeader() && lastAgentMemory?.content.text) { - const timeSinceLastMessage = Date.now() - lastAgentMemory.createdAt; - if (timeSinceLastMessage > MESSAGE_CONSTANTS.INTEREST_DECAY_TIME) { - return false; // Memory too old, not relevant - } - - const similarity = cosineSimilarity( - content.toLowerCase(), - lastAgentMemory.content.text.toLowerCase() - ); - - return ( - similarity >= - MESSAGE_CONSTANTS.DEFAULT_SIMILARITY_THRESHOLD_FOLLOW_UPS - ); - } - - // If no keywords defined, only leader maintains conversation - if (!teamConfig?.teamMemberInterestKeywords) { - return false; - } - - return teamConfig.teamMemberInterestKeywords.some((keyword) => - content.toLowerCase().includes(keyword.toLowerCase()) - ); - } - - private async _analyzeContextSimilarity( - currentMessage: string, - previousContext?: MessageContext, - agentLastMessage?: string - ): Promise { - if (!previousContext) return 1; // No previous context to compare against - - // If more than 5 minutes have passed, reduce similarity weight - const timeDiff = Date.now() - previousContext.timestamp; - const timeWeight = Math.max(0, 1 - timeDiff / (5 * 60 * 1000)); // 5 minutes threshold - - // Calculate content similarity - const similarity = cosineSimilarity( - currentMessage.toLowerCase(), - previousContext.content.toLowerCase(), - agentLastMessage?.toLowerCase() - ); - - // Weight the similarity by time factor - const weightedSimilarity = similarity * timeWeight; - - return weightedSimilarity; - } - - private async _shouldRespondBasedOnContext( - message: DiscordMessage, - channelState: InterestChannels[string] - ): Promise { - // Always respond if directly mentioned - if (this._isMessageForMe(message)) return true; - - // If we're not the current handler, don't respond - if (channelState?.currentHandler !== this.client.user?.id) return false; - - // Check if we have messages to compare - if (!channelState.messages?.length) return false; - - // Get last user message (not from the bot) - const lastUserMessage = [...channelState.messages].reverse().find( - (m, index) => - index > 0 && // Skip first message (current) - m.userId !== this.runtime.agentId - ); - - if (!lastUserMessage) return false; - - const lastSelfMemories = await this.runtime.messageManager.getMemories({ - roomId: stringToUuid( - message.channel.id + "-" + this.runtime.agentId - ), - unique: false, - count: 5, - }); - - const lastSelfSortedMemories = lastSelfMemories - ?.filter((m) => m.userId === this.runtime.agentId) - .sort((a, b) => (b.createdAt || 0) - (a.createdAt || 0)); - - // Calculate context similarity - const contextSimilarity = await this._analyzeContextSimilarity( - message.content, - { - content: lastUserMessage.content.text || "", - timestamp: Date.now(), - }, - lastSelfSortedMemories?.[0]?.content?.text - ); - - const similarityThreshold = - this.runtime.character.clientConfig?.discord - ?.messageSimilarityThreshold || - channelState.contextSimilarityThreshold || - MESSAGE_CONSTANTS.DEFAULT_SIMILARITY_THRESHOLD; - - return contextSimilarity >= similarityThreshold; - } - - private _checkInterest(channelId: string): boolean { - const channelState = this.interestChannels[channelId]; - if (!channelState) return false; - - const lastMessage = - channelState.messages[channelState.messages.length - 1]; - // If it's been more than 5 minutes since last message, reduce interest - const timeSinceLastMessage = Date.now() - channelState.lastMessageSent; - - if (timeSinceLastMessage > MESSAGE_CONSTANTS.INTEREST_DECAY_TIME) { - delete this.interestChannels[channelId]; - return false; - } else if ( - timeSinceLastMessage > MESSAGE_CONSTANTS.PARTIAL_INTEREST_DECAY - ) { - // Require stronger relevance for continued interest - return this._isRelevantToTeamMember( - lastMessage.content.text || "", - channelId - ); - } - - // If team leader and messages exist, check for topic changes and team member responses - if (this._isTeamLeader() && channelState.messages.length > 0) { - // If leader's keywords don't match and another team member has responded, drop interest - if ( - !this._isRelevantToTeamMember( - lastMessage.content.text || "", - channelId - ) - ) { - const recentTeamResponses = channelState.messages - .slice(-3) - .some( - (m) => - m.userId !== this.client.user?.id && - this._isTeamMember(m.userId) - ); - - if (recentTeamResponses) { - delete this.interestChannels[channelId]; - return false; - } - } - } - - // Check if conversation has shifted to a new topic - if (channelState.messages.length > 0) { - const recentMessages = channelState.messages.slice( - -MESSAGE_CONSTANTS.RECENT_MESSAGE_COUNT - ); - const differentUsers = new Set(recentMessages.map((m) => m.userId)) - .size; - - // If multiple users are talking and we're not involved, reduce interest - if ( - differentUsers > 1 && - !recentMessages.some((m) => m.userId === this.client.user?.id) - ) { - delete this.interestChannels[channelId]; - return false; - } - } - - return true; - } - - private async _shouldIgnore(message: DiscordMessage): Promise { - // if the message is from us, ignore - if (message.author.id === this.client.user?.id) return true; - - // Honor mentions-only mode - if ( - this.runtime.character.clientConfig?.discord - ?.shouldRespondOnlyToMentions - ) { - return !this._isMessageForMe(message); - } - - // Team-based ignore logic - if (this.runtime.character.clientConfig?.discord?.isPartOfTeam) { - const authorId = this._getNormalizedUserId(message.author.id); - - if (this._isTeamLeader()) { - if (this._isTeamCoordinationRequest(message.content)) { - return false; - } - // Ignore if message is only about team member interests and not directed to leader - if (!this._isMessageForMe(message)) { - const otherMemberInterests = - this.runtime.character.clientConfig?.discord - ?.teamMemberInterestKeywords || []; - const hasOtherInterests = otherMemberInterests.some( - (keyword) => - message.content - .toLowerCase() - .includes(keyword.toLowerCase()) - ); - if (hasOtherInterests) { - return true; - } - } - } else if (this._isTeamCoordinationRequest(message.content)) { - const randomDelay = - Math.floor( - Math.random() * - (TIMING_CONSTANTS.TEAM_MEMBER_DELAY_MAX - - TIMING_CONSTANTS.TEAM_MEMBER_DELAY_MIN) - ) + TIMING_CONSTANTS.TEAM_MEMBER_DELAY_MIN; // 1-3 second random delay - await new Promise((resolve) => - setTimeout(resolve, randomDelay) - ); - return false; - } - - if (this._isTeamMember(authorId)) { - if (!this._isMessageForMe(message)) { - // If message contains our interests, don't ignore - if ( - this._isRelevantToTeamMember( - message.content, - message.channelId - ) - ) { - return false; - } - return true; - } - } - - // Check if we're in an active conversation based on context - const channelState = this.interestChannels[message.channelId]; - - if (channelState?.currentHandler) { - // If we're the current handler, check context - if (channelState.currentHandler === this.client.user?.id) { - //If it's our keywords, bypass context check - if ( - this._isRelevantToTeamMember( - message.content, - message.channelId - ) - ) { - return false; - } - - const shouldRespondContext = - await this._shouldRespondBasedOnContext( - message, - channelState - ); - - // If context is different, ignore. If similar, don't ignore - return !shouldRespondContext; - } - - // If another team member is handling and we're not mentioned or coordinating - else if ( - !this._isMessageForMe(message) && - !this._isTeamCoordinationRequest(message.content) - ) { - return true; - } - } - } - - let messageContent = message.content.toLowerCase(); - - // Replace the bot's @ping with the character name - const botMention = `<@!?${this.client.user?.id}>`; - messageContent = messageContent.replace( - new RegExp(botMention, "gi"), - this.runtime.character.name.toLowerCase() - ); - - // Replace the bot's username with the character name - const botUsername = this.client.user?.username.toLowerCase(); - messageContent = messageContent.replace( - new RegExp(`\\b${botUsername}\\b`, "g"), - this.runtime.character.name.toLowerCase() - ); - - // strip all special characters - messageContent = messageContent.replace(/[^a-zA-Z0-9\s]/g, ""); - - // short responses where eliza should stop talking and disengage unless mentioned again - if ( - messageContent.length < MESSAGE_LENGTH_THRESHOLDS.LOSE_INTEREST && - LOSE_INTEREST_WORDS.some((word) => messageContent.includes(word)) - ) { - delete this.interestChannels[message.channelId]; - return true; - } - - // If we're not interested in the channel and it's a short message, ignore it - if ( - messageContent.length < MESSAGE_LENGTH_THRESHOLDS.SHORT_MESSAGE && - !this.interestChannels[message.channelId] - ) { - return true; - } - - const targetedPhrases = [ - this.runtime.character.name + " stop responding", - this.runtime.character.name + " stop talking", - this.runtime.character.name + " shut up", - this.runtime.character.name + " stfu", - "stop talking" + this.runtime.character.name, - this.runtime.character.name + " stop talking", - "shut up " + this.runtime.character.name, - this.runtime.character.name + " shut up", - "stfu " + this.runtime.character.name, - this.runtime.character.name + " stfu", - "chill" + this.runtime.character.name, - this.runtime.character.name + " chill", - ]; - - // lose interest if pinged and told to stop responding - if (targetedPhrases.some((phrase) => messageContent.includes(phrase))) { - delete this.interestChannels[message.channelId]; - return true; - } - - // if the message is short, ignore but maintain interest - if ( - !this.interestChannels[message.channelId] && - messageContent.length < MESSAGE_LENGTH_THRESHOLDS.VERY_SHORT_MESSAGE - ) { - return true; - } - - if ( - message.content.length < - MESSAGE_LENGTH_THRESHOLDS.IGNORE_RESPONSE && - IGNORE_RESPONSE_WORDS.some((word) => - message.content.toLowerCase().includes(word) - ) - ) { - return true; - } - return false; - } - - private async _shouldRespond( - message: DiscordMessage, - state: State - ): Promise { - if (message.author.id === this.client.user?.id) return false; - // if (message.author.bot) return false; - - // Honor mentions-only mode - if ( - this.runtime.character.clientConfig?.discord - ?.shouldRespondOnlyToMentions - ) { - return this._isMessageForMe(message); - } - - const channelState = this.interestChannels[message.channelId]; - - // Check if team member has direct interest first - if ( - this.runtime.character.clientConfig?.discord?.isPartOfTeam && - !this._isTeamLeader() && - this._isRelevantToTeamMember(message.content, message.channelId) - ) { - return true; - } - - try { - // Team-based response logic - if (this.runtime.character.clientConfig?.discord?.isPartOfTeam) { - // Team leader coordination - if ( - this._isTeamLeader() && - this._isTeamCoordinationRequest(message.content) - ) { - return true; - } - - if ( - !this._isTeamLeader() && - this._isRelevantToTeamMember( - message.content, - message.channelId - ) - ) { - // Add small delay for non-leader responses - await new Promise((resolve) => - setTimeout(resolve, TIMING_CONSTANTS.TEAM_MEMBER_DELAY) - ); //1.5 second delay - - // If leader has responded in last few seconds, reduce chance of responding - - if (channelState?.messages?.length) { - const recentMessages = channelState.messages.slice( - -MESSAGE_CONSTANTS.RECENT_MESSAGE_COUNT - ); - const leaderResponded = recentMessages.some( - (m) => - m.userId === - this.runtime.character.clientConfig?.discord - ?.teamLeaderId && - Date.now() - channelState.lastMessageSent < 3000 - ); - - if (leaderResponded) { - // 50% chance to respond if leader just did - return ( - Math.random() > RESPONSE_CHANCES.AFTER_LEADER - ); - } - } - - return true; - } - - // If I'm the leader but message doesn't match my keywords, add delay and check for team responses - if ( - this._isTeamLeader() && - !this._isRelevantToTeamMember( - message.content, - message.channelId - ) - ) { - const randomDelay = - Math.floor( - Math.random() * - (TIMING_CONSTANTS.LEADER_DELAY_MAX - - TIMING_CONSTANTS.LEADER_DELAY_MIN) - ) + TIMING_CONSTANTS.LEADER_DELAY_MIN; // 2-4 second random delay - await new Promise((resolve) => - setTimeout(resolve, randomDelay) - ); - - // After delay, check if another team member has already responded - if (channelState?.messages?.length) { - const recentResponses = channelState.messages.slice( - -MESSAGE_CONSTANTS.RECENT_MESSAGE_COUNT - ); - const otherTeamMemberResponded = recentResponses.some( - (m) => - m.userId !== this.client.user?.id && - this._isTeamMember(m.userId) - ); - - if (otherTeamMemberResponded) { - return false; - } - } - } - - // Update current handler if we're mentioned - if (this._isMessageForMe(message)) { - const channelState = - this.interestChannels[message.channelId]; - if (channelState) { - channelState.currentHandler = this.client.user?.id; - channelState.lastMessageSent = Date.now(); - } - return true; - } - - // Don't respond if another teammate is handling the conversation - if (channelState?.currentHandler) { - if ( - channelState.currentHandler !== this.client.user?.id && - this._isTeamMember(channelState.currentHandler) - ) { - return false; - } - } - - // Natural conversation cadence - if (!this._isMessageForMe(message) && channelState) { - // Count our recent messages - const recentMessages = channelState.messages.slice( - -MESSAGE_CONSTANTS.CHAT_HISTORY_COUNT - ); - const ourMessageCount = recentMessages.filter( - (m) => m.userId === this.client.user?.id - ).length; - - // Reduce responses if we've been talking a lot - if (ourMessageCount > 2) { - // Exponentially decrease chance to respond - const responseChance = Math.pow( - 0.5, - ourMessageCount - 2 - ); - if (Math.random() > responseChance) { - return false; - } - } - } - } - } catch (error) { - elizaLogger.error("Error in _shouldRespond team processing:", { - error, - agentId: this.runtime.agentId, - channelId: message.channelId, - }); - } - - // Otherwise do context check - if (channelState?.previousContext) { - const shouldRespondContext = - await this._shouldRespondBasedOnContext(message, channelState); - if (!shouldRespondContext) { - delete this.interestChannels[message.channelId]; - return false; - } - } - - if (message.mentions.has(this.client.user?.id as string)) return true; - - const guild = message.guild; - const member = guild?.members.cache.get(this.client.user?.id as string); - const nickname = member?.nickname; - - if ( - message.content - .toLowerCase() - .includes(this.client.user?.username.toLowerCase() as string) || - message.content - .toLowerCase() - .includes(this.client.user?.tag.toLowerCase() as string) || - (nickname && - message.content.toLowerCase().includes(nickname.toLowerCase())) - ) { - return true; - } - - if (!message.guild) { - return true; - } - - // If none of the above conditions are met, use the generateText to decide - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates - ?.discordShouldRespondTemplate || - this.runtime.character.templates?.shouldRespondTemplate || - composeRandomUser(discordShouldRespondTemplate, 2), - }); - - const response = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - if (response === "RESPOND") { - if (channelState) { - channelState.previousContext = { - content: message.content, - timestamp: Date.now(), - }; - } - - return true; - } else if (response === "IGNORE") { - return false; - } else if (response === "STOP") { - delete this.interestChannels[message.channelId]; - return false; - } else { - console.error( - "Invalid response from response generateText:", - response - ); - return false; - } - } - - private async _generateResponse( - message: Memory, - state: State, - context: string - ): Promise { - const { userId, roomId } = message; - - const response = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - - if (!response) { - console.error("No response from generateMessageResponse"); - return; - } - - await this.runtime.databaseAdapter.log({ - body: { message, context, response }, - userId: userId, - roomId, - type: "response", - }); - - return response; - } - - async fetchBotName(botToken: string) { - const url = "https://discord.com/api/v10/users/@me"; - - const response = await fetch(url, { - method: "GET", - headers: { - Authorization: `Bot ${botToken}`, - }, - }); - - if (!response.ok) { - throw new Error( - `Error fetching bot details: ${response.statusText}` - ); - } - - const data = await response.json(); - return data.username; - } - - /** - * Simulate discord typing while generating a response; - * returns a function to interrupt the typing loop - * - * @param message - */ - private simulateTyping(message: DiscordMessage) { - let typing = true; - - const typingLoop = async () => { - while (typing) { - await message.channel.sendTyping(); - await new Promise((resolve) => setTimeout(resolve, 3000)); - } - }; - - typingLoop(); - - return function stopTyping() { - typing = false; - }; - } -} diff --git a/packages/client-discord/src/providers/channelState.ts b/packages/client-discord/src/providers/channelState.ts deleted file mode 100644 index 08a24f6d59b6d..0000000000000 --- a/packages/client-discord/src/providers/channelState.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { - ChannelType, - type Message as DiscordMessage, - type TextChannel, -} from "discord.js"; -import type { IAgentRuntime, Memory, Provider, State } from "@elizaos/core"; - -const channelStateProvider: Provider = { - get: async (runtime: IAgentRuntime, message: Memory, state?: State) => { - const discordMessage = - (state?.discordMessage as DiscordMessage) || - (state?.discordChannel as DiscordMessage); - if (!discordMessage) { - return ""; - } - - const guild = discordMessage?.guild; - const agentName = state?.agentName || "The agent"; - const senderName = state?.senderName || "someone"; - - if (!guild) { - return ( - agentName + - " is currently in a direct message conversation with " + - senderName - ); - } - - const serverName = guild.name; // The name of the server - const guildId = guild.id; // The ID of the guild - const channel = discordMessage.channel; - - if (!channel) { - console.log("channel is null"); - return ""; - } - - let response = - agentName + - " is currently having a conversation in the channel `@" + - channel.id + - " in the server `" + - serverName + - "` (@" + - guildId + - ")"; - if ( - channel.type === ChannelType.GuildText && - (channel as TextChannel).topic - ) { - // Check if the channel is a text channel - response += - "\nThe topic of the channel is: " + - (channel as TextChannel).topic; - } - return response; - }, -}; - -export default channelStateProvider; diff --git a/packages/client-discord/src/providers/voiceState.ts b/packages/client-discord/src/providers/voiceState.ts deleted file mode 100644 index 283bcb14babed..0000000000000 --- a/packages/client-discord/src/providers/voiceState.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { getVoiceConnection } from "@discordjs/voice"; -import { ChannelType, type Message as DiscordMessage } from "discord.js"; -import type { IAgentRuntime, Memory, Provider, State } from "@elizaos/core"; - -const voiceStateProvider: Provider = { - get: async (runtime: IAgentRuntime, message: Memory, state?: State) => { - // Voice doesn't get a discord message, so we need to use the channel for guild data - const discordMessage = (state?.discordMessage || - state.discordChannel) as DiscordMessage; - const connection = getVoiceConnection( - (discordMessage as DiscordMessage)?.guild?.id as string - ); - const agentName = state?.agentName || "The agent"; - if (!connection) { - return agentName + " is not currently in a voice channel"; - } - - const channel = ( - (state?.discordMessage as DiscordMessage) || - (state.discordChannel as DiscordMessage) - )?.guild?.channels?.cache?.get( - connection.joinConfig.channelId as string - ); - - if (!channel || channel.type !== ChannelType.GuildVoice) { - return agentName + " is in an invalid voice channel"; - } - - return `${agentName} is currently in the voice channel: ${channel.name} (ID: ${channel.id})`; - }, -}; - -export default voiceStateProvider; diff --git a/packages/client-discord/src/templates.ts b/packages/client-discord/src/templates.ts deleted file mode 100644 index 898285470e170..0000000000000 --- a/packages/client-discord/src/templates.ts +++ /dev/null @@ -1,195 +0,0 @@ -import { messageCompletionFooter, shouldRespondFooter } from "@elizaos/core"; - -export const discordShouldRespondTemplate = - `# Task: Decide if {{agentName}} should respond. -About {{agentName}}: -{{bio}} - -# INSTRUCTIONS: Determine if {{agentName}} should respond to the message and participate in the conversation. Do not comment. Just respond with "RESPOND" or "IGNORE" or "STOP". - -# RESPONSE EXAMPLES -{{user1}}: I just saw a really great movie -{{user2}}: Oh? Which movie? -Result: [IGNORE] - -{{agentName}}: Oh, this is my favorite scene -{{user1}}: sick -{{user2}}: wait, why is it your favorite scene -Result: [RESPOND] - -{{user1}}: stfu bot -Result: [STOP] - -{{user1}}: Hey {{agent}}, can you help me with something -Result: [RESPOND] - -{{user1}}: {{agentName}} stfu plz -Result: [STOP] - -{{user1}}: i need help -{{agentName}}: how can I help you? -{{user1}}: no. i need help from someone else -Result: [IGNORE] - -{{user1}}: Hey {{agent}}, can I ask you a question -{{agentName}}: Sure, what is it -{{user1}}: can you ask claude to create a basic react module that demonstrates a counter -Result: [RESPOND] - -{{user1}}: {{agentName}} can you tell me a story -{{user1}}: about a girl named elara -{{agentName}}: Sure. -{{agentName}}: Once upon a time, in a quaint little village, there was a curious girl named Elara. -{{agentName}}: Elara was known for her adventurous spirit and her knack for finding beauty in the mundane. -{{user1}}: I'm loving it, keep going -Result: [RESPOND] - -{{user1}}: {{agentName}} stop responding plz -Result: [STOP] - -{{user1}}: okay, i want to test something. can you say marco? -{{agentName}}: marco -{{user1}}: great. okay, now do it again -Result: [RESPOND] - -Response options are [RESPOND], [IGNORE] and [STOP]. - -{{agentName}} is in a room with other users and is very worried about being annoying and saying too much. -Respond with [RESPOND] to messages that are directed at {{agentName}}, or participate in conversations that are interesting or relevant to their background. -If a message is not interesting or relevant, respond with [IGNORE] -Unless directly responding to a user, respond with [IGNORE] to messages that are very short or do not contain much information. -If a user asks {{agentName}} to be quiet, respond with [STOP] -If {{agentName}} concludes a conversation and isn't part of the conversation anymore, respond with [STOP] - -IMPORTANT: {{agentName}} is particularly sensitive about being annoying, so if there is any doubt, it is better to respond with [IGNORE]. -If {{agentName}} is conversing with a user and they have not asked to stop, it is better to respond with [RESPOND]. - -{{recentMessages}} - -# INSTRUCTIONS: Choose the option that best describes {{agentName}}'s response to the last message. Ignore messages if they are addressed to someone else. -` + shouldRespondFooter; - -export const discordVoiceHandlerTemplate = - `# Task: Generate conversational voice dialog for {{agentName}}. -About {{agentName}}: -{{bio}} - -# Attachments -{{attachments}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -{{actions}} - -{{messageDirections}} - -{{recentMessages}} - -# Instructions: Write the next message for {{agentName}}. Include an optional action if appropriate. {{actionNames}} -` + messageCompletionFooter; - -export const discordMessageHandlerTemplate = - // {{goals}} - `# Action Examples -{{actionExamples}} -(Action examples are for reference only. Do not use the information from them in your response.) - -# Knowledge -{{knowledge}} - -# Task: Generate dialog and actions for the character {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -Examples of {{agentName}}'s dialog and actions: -{{characterMessageExamples}} - -{{providers}} - -{{attachments}} - -{{actions}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -{{messageDirections}} - -{{recentMessages}} - -# Instructions: Write the next message for {{agentName}}. Include an action, if appropriate. {{actionNames}} -` + messageCompletionFooter; - -export const discordAutoPostTemplate = - `# Action Examples -NONE: Respond but perform no additional action. This is the default if the agent is speaking and not doing anything additional. - -# Task: Generate an engaging community message as {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -Examples of {{agentName}}'s dialog and actions: -{{characterMessageExamples}} - -{{messageDirections}} - -# Recent Chat History: -{{recentMessages}} - -# Instructions: Write a natural, engaging message to restart community conversation. Focus on: -- Community engagement -- Educational topics -- General discusions -- Support queries -- Keep message warm and inviting -- Maximum 3 lines -- Use 1-2 emojis maximum -- Avoid financial advice -- Stay within known facts -- No team member mentions -- Be hyped, not repetitive -- Be natural, act like a human, connect with the community -- Don't sound so robotic like -- Randomly grab the most recent 5 messages for some context. Validate the context randomly and use that as a reference point for your next message, but not always, only when relevant. -- If the recent messages are mostly from {{agentName}}, make sure to create conversation starters, given there is no messages from others to reference. -- DO NOT REPEAT THE SAME thing that you just said from your recent chat history, start the message different each time, and be organic, non reptitive. - -# Instructions: Write the next message for {{agentName}}. Include the "NONE" action only, as the only valid action for auto-posts is "NONE". -` + messageCompletionFooter; - -export const discordAnnouncementHypeTemplate = - `# Action Examples -NONE: Respond but perform no additional action. This is the default if the agent is speaking and not doing anything additional. - -# Task: Generate announcement hype message as {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -Examples of {{agentName}}'s dialog and actions: -{{characterMessageExamples}} - -{{messageDirections}} - -# Announcement Content: -{{announcementContent}} - -# Instructions: Write an exciting message to bring attention to the announcement. Requirements: -- Reference the announcement channel using <#{{announcementChannelId}}> -- Reference the announcement content to get information about the announcement to use where appropriate to make the message dynamic vs a static post -- Create genuine excitement -- Encourage community participation -- If there are links like Twitter/X posts, encourage users to like/retweet/comment to spread awarenress, but directly say that, wrap that into the post so its natural. -- Stay within announced facts only -- No additional promises or assumptions -- No team member mentions -- Start the message differently each time. Don't start with the same word like "hey", "hey hey", etc. be dynamic -- Address everyone, not as a direct reply to whoever made the announcement or wrote it, but you can reference them -- Maximum 3-7 lines formatted nicely if needed, based on the context of the announcement -- Use 1-2 emojis maximum - -# Instructions: Write the next message for {{agentName}}. Include the "NONE" action only, as no other actions are appropriate for announcement hype. -` + messageCompletionFooter; \ No newline at end of file diff --git a/packages/client-discord/src/utils.ts b/packages/client-discord/src/utils.ts deleted file mode 100644 index 7a1603f8f0743..0000000000000 --- a/packages/client-discord/src/utils.ts +++ /dev/null @@ -1,314 +0,0 @@ -import { - type IAgentRuntime, - ModelClass, - elizaLogger, - generateText, - trimTokens, - parseJSONObjectFromText, -} from "@elizaos/core"; -import { - ChannelType, - type Message as DiscordMessage, - PermissionsBitField, - type TextChannel, - ThreadChannel, -} from "discord.js"; - -export function getWavHeader( - audioLength: number, - sampleRate: number, - channelCount = 1, - bitsPerSample = 16 -): Buffer { - const wavHeader = Buffer.alloc(44); - wavHeader.write("RIFF", 0); - wavHeader.writeUInt32LE(36 + audioLength, 4); // Length of entire file in bytes minus 8 - wavHeader.write("WAVE", 8); - wavHeader.write("fmt ", 12); - wavHeader.writeUInt32LE(16, 16); // Length of format data - wavHeader.writeUInt16LE(1, 20); // Type of format (1 is PCM) - wavHeader.writeUInt16LE(channelCount, 22); // Number of channels - wavHeader.writeUInt32LE(sampleRate, 24); // Sample rate - wavHeader.writeUInt32LE( - (sampleRate * bitsPerSample * channelCount) / 8, - 28 - ); // Byte rate - wavHeader.writeUInt16LE((bitsPerSample * channelCount) / 8, 32); // Block align ((BitsPerSample * Channels) / 8) - wavHeader.writeUInt16LE(bitsPerSample, 34); // Bits per sample - wavHeader.write("data", 36); // Data chunk header - wavHeader.writeUInt32LE(audioLength, 40); // Data chunk size - return wavHeader; -} - -const MAX_MESSAGE_LENGTH = 1900; - -export async function generateSummary( - runtime: IAgentRuntime, - text: string -): Promise<{ title: string; description: string }> { - // make sure text is under 128k characters - text = await trimTokens(text, 100000, runtime); - - const prompt = `Please generate a concise summary for the following text: - - Text: """ - ${text} - """ - - Respond with a JSON object in the following format: - \`\`\`json - { - "title": "Generated Title", - "summary": "Generated summary and/or description of the text" - } - \`\`\``; - - const response = await generateText({ - runtime, - context: prompt, - modelClass: ModelClass.SMALL, - }); - - const parsedResponse = parseJSONObjectFromText(response); - - if (parsedResponse?.title && parsedResponse?.summary) { - return { - title: parsedResponse.title, - description: parsedResponse.summary, - }; - } - - return { - title: "", - description: "", - }; -} - -export async function sendMessageInChunks( - channel: TextChannel, - content: string, - inReplyTo: string, - files: any[] -): Promise { - const sentMessages: DiscordMessage[] = []; - const messages = splitMessage(content); - try { - for (let i = 0; i < messages.length; i++) { - const message = messages[i]; - if ( - message.trim().length > 0 || - (i === messages.length - 1 && files && files.length > 0) - ) { - const options: any = { - content: message.trim(), - }; - - // if (i === 0 && inReplyTo) { - // // Reply to the specified message for the first chunk - // options.reply = { - // messageReference: inReplyTo, - // }; - // } - - if (i === messages.length - 1 && files && files.length > 0) { - // Attach files to the last message chunk - options.files = files; - } - - const m = await channel.send(options); - sentMessages.push(m); - } - } - } catch (error) { - elizaLogger.error("Error sending message:", error); - } - - return sentMessages; -} - -function splitMessage(content: string): string[] { - const messages: string[] = []; - let currentMessage = ""; - - const rawLines = content?.split("\n") || []; - // split all lines into MAX_MESSAGE_LENGTH chunks so any long lines are split - const lines = rawLines.flatMap((line) => { - const chunks = []; - while (line.length > MAX_MESSAGE_LENGTH) { - chunks.push(line.slice(0, MAX_MESSAGE_LENGTH)); - line = line.slice(MAX_MESSAGE_LENGTH); - } - chunks.push(line); - return chunks; - }); - - for (const line of lines) { - if (currentMessage.length + line.length + 1 > MAX_MESSAGE_LENGTH) { - messages.push(currentMessage.trim()); - currentMessage = ""; - } - currentMessage += line + "\n"; - } - - if (currentMessage.trim().length > 0) { - messages.push(currentMessage.trim()); - } - - return messages; -} - -export function canSendMessage(channel) { - // validate input - if (!channel) { - return { - canSend: false, - reason: "No channel given", - }; - } - // if it is a DM channel, we can always send messages - if (channel.type === ChannelType.DM) { - return { - canSend: true, - reason: null, - }; - } - const botMember = channel.guild?.members.cache.get(channel.client.user.id); - - if (!botMember) { - return { - canSend: false, - reason: "Not a guild channel or bot member not found", - }; - } - - // Required permissions for sending messages - const requiredPermissions = [ - PermissionsBitField.Flags.ViewChannel, - PermissionsBitField.Flags.SendMessages, - PermissionsBitField.Flags.ReadMessageHistory, - ]; - - // Add thread-specific permission if it's a thread - if (channel instanceof ThreadChannel) { - requiredPermissions.push( - PermissionsBitField.Flags.SendMessagesInThreads - ); - } - - // Check permissions - const permissions = channel.permissionsFor(botMember); - - if (!permissions) { - return { - canSend: false, - reason: "Could not retrieve permissions", - }; - } - - // Check each required permission - const missingPermissions = requiredPermissions.filter( - (perm) => !permissions.has(perm) - ); - - return { - canSend: missingPermissions.length === 0, - missingPermissions: missingPermissions, - reason: - missingPermissions.length > 0 - ? `Missing permissions: ${missingPermissions - .map((p) => String(p)) - .join(", ")}` - : null, - }; -} - -export function cosineSimilarity( - text1: string, - text2: string, - text3?: string -): number { - const preprocessText = (text: string) => - text - .toLowerCase() - .replace(/[^\w\s'_-]/g, " ") - .replace(/\s+/g, " ") - .trim(); - - const getWords = (text: string) => { - return text.split(" ").filter((word) => word.length > 1); - }; - - const words1 = getWords(preprocessText(text1)); - const words2 = getWords(preprocessText(text2)); - const words3 = text3 ? getWords(preprocessText(text3)) : []; - - const freq1: { [key: string]: number } = {}; - const freq2: { [key: string]: number } = {}; - const freq3: { [key: string]: number } = {}; - - words1.forEach((word) => (freq1[word] = (freq1[word] || 0) + 1)); - words2.forEach((word) => (freq2[word] = (freq2[word] || 0) + 1)); - if (words3.length) { - words3.forEach((word) => (freq3[word] = (freq3[word] || 0) + 1)); - } - - const uniqueWords = new Set([ - ...Object.keys(freq1), - ...Object.keys(freq2), - ...(words3.length ? Object.keys(freq3) : []), - ]); - - let dotProduct = 0; - let magnitude1 = 0; - let magnitude2 = 0; - let magnitude3 = 0; - - uniqueWords.forEach((word) => { - const val1 = freq1[word] || 0; - const val2 = freq2[word] || 0; - const val3 = freq3[word] || 0; - - if (words3.length) { - // For three-way, calculate pairwise similarities - const sim12 = val1 * val2; - const sim23 = val2 * val3; - const sim13 = val1 * val3; - - // Take maximum similarity between any pair - dotProduct += Math.max(sim12, sim23, sim13); - } else { - dotProduct += val1 * val2; - } - - magnitude1 += val1 * val1; - magnitude2 += val2 * val2; - if (words3.length) { - magnitude3 += val3 * val3; - } - }); - - magnitude1 = Math.sqrt(magnitude1); - magnitude2 = Math.sqrt(magnitude2); - magnitude3 = words3.length ? Math.sqrt(magnitude3) : 1; - - if ( - magnitude1 === 0 || - magnitude2 === 0 || - (words3.length && magnitude3 === 0) - ) - return 0; - - // For two texts, use original calculation - if (!words3.length) { - return dotProduct / (magnitude1 * magnitude2); - } - - // For three texts, use max magnitude pair to maintain scale - const maxMagnitude = Math.max( - magnitude1 * magnitude2, - magnitude2 * magnitude3, - magnitude1 * magnitude3 - ); - - return dotProduct / maxMagnitude; -} diff --git a/packages/client-discord/src/voice.ts b/packages/client-discord/src/voice.ts deleted file mode 100644 index 715826e128ba5..0000000000000 --- a/packages/client-discord/src/voice.ts +++ /dev/null @@ -1,1102 +0,0 @@ -import { - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - ServiceType, - type State, - type UUID, - composeContext, - composeRandomUser, - elizaLogger, - getEmbeddingZeroVector, - generateMessageResponse, - stringToUuid, - generateShouldRespond, - type ITranscriptionService, - type ISpeechService, -} from "@elizaos/core"; -import { - type AudioPlayer, - type AudioReceiveStream, - NoSubscriberBehavior, - StreamType, - type VoiceConnection, - VoiceConnectionStatus, - createAudioPlayer, - createAudioResource, - getVoiceConnections, - joinVoiceChannel, - entersState, -} from "@discordjs/voice"; -import { - type BaseGuildVoiceChannel, - ChannelType, - type Client, - type Guild, - type GuildMember, - type VoiceChannel, - type VoiceState, -} from "discord.js"; -import EventEmitter from "events"; -import prism from "prism-media"; -import { type Readable, pipeline } from "stream"; -import type { DiscordClient } from "./index.ts"; -import { - discordShouldRespondTemplate, - discordVoiceHandlerTemplate, -} from "./templates.ts"; -import { getWavHeader } from "./utils.ts"; - -// These values are chosen for compatibility with picovoice components -const DECODE_FRAME_SIZE = 1024; -const DECODE_SAMPLE_RATE = 16000; - -export class AudioMonitor { - private readable: Readable; - private buffers: Buffer[] = []; - private maxSize: number; - private lastFlagged = -1; - private ended = false; - - constructor( - readable: Readable, - maxSize: number, - onStart: () => void, - callback: (buffer: Buffer) => void - ) { - this.readable = readable; - this.maxSize = maxSize; - this.readable.on("data", (chunk: Buffer) => { - //console.log('AudioMonitor got data'); - if (this.lastFlagged < 0) { - this.lastFlagged = this.buffers.length; - } - this.buffers.push(chunk); - const currentSize = this.buffers.reduce( - (acc, cur) => acc + cur.length, - 0 - ); - while (currentSize > this.maxSize) { - this.buffers.shift(); - this.lastFlagged--; - } - }); - this.readable.on("end", () => { - elizaLogger.log("AudioMonitor ended"); - this.ended = true; - if (this.lastFlagged < 0) return; - callback(this.getBufferFromStart()); - this.lastFlagged = -1; - }); - this.readable.on("speakingStopped", () => { - if (this.ended) return; - elizaLogger.log("Speaking stopped"); - if (this.lastFlagged < 0) return; - callback(this.getBufferFromStart()); - }); - this.readable.on("speakingStarted", () => { - if (this.ended) return; - onStart(); - elizaLogger.log("Speaking started"); - this.reset(); - }); - } - - stop() { - this.readable.removeAllListeners("data"); - this.readable.removeAllListeners("end"); - this.readable.removeAllListeners("speakingStopped"); - this.readable.removeAllListeners("speakingStarted"); - } - - isFlagged() { - return this.lastFlagged >= 0; - } - - getBufferFromFlag() { - if (this.lastFlagged < 0) { - return null; - } - const buffer = Buffer.concat(this.buffers.slice(this.lastFlagged)); - return buffer; - } - - getBufferFromStart() { - const buffer = Buffer.concat(this.buffers); - return buffer; - } - - reset() { - this.buffers = []; - this.lastFlagged = -1; - } - - isEnded() { - return this.ended; - } -} - -export class VoiceManager extends EventEmitter { - private processingVoice = false; - private transcriptionTimeout: NodeJS.Timeout | null = null; - private userStates: Map< - string, - { - buffers: Buffer[]; - totalLength: number; - lastActive: number; - transcriptionText: string; - } - > = new Map(); - private activeAudioPlayer: AudioPlayer | null = null; - private client: Client; - private runtime: IAgentRuntime; - private streams: Map = new Map(); - private connections: Map = new Map(); - private activeMonitors: Map< - string, - { channel: BaseGuildVoiceChannel; monitor: AudioMonitor } - > = new Map(); - - constructor(client: DiscordClient) { - super(); - this.client = client.client; - this.runtime = client.runtime; - } - - async handleVoiceStateUpdate(oldState: VoiceState, newState: VoiceState) { - const oldChannelId = oldState.channelId; - const newChannelId = newState.channelId; - const member = newState.member; - if (!member) return; - if (member.id === this.client.user?.id) { - return; - } - - // Ignore mute/unmute events - if (oldChannelId === newChannelId) { - return; - } - - // User leaving a channel where the bot is present - if (oldChannelId && this.connections.has(oldChannelId)) { - this.stopMonitoringMember(member.id); - } - - // User joining a channel where the bot is present - if (newChannelId && this.connections.has(newChannelId)) { - await this.monitorMember( - member, - newState.channel as BaseGuildVoiceChannel - ); - } - } - - async joinChannel(channel: BaseGuildVoiceChannel) { - const oldConnection = this.getVoiceConnection( - channel.guildId as string - ); - if (oldConnection) { - try { - oldConnection.destroy(); - // Remove all associated streams and monitors - this.streams.clear(); - this.activeMonitors.clear(); - } catch (error) { - console.error("Error leaving voice channel:", error); - } - } - - const connection = joinVoiceChannel({ - channelId: channel.id, - guildId: channel.guild.id, - adapterCreator: channel.guild.voiceAdapterCreator as any, - selfDeaf: false, - selfMute: false, - group: this.client.user.id, - }); - - try { - // Wait for either Ready or Signalling state - await Promise.race([ - entersState(connection, VoiceConnectionStatus.Ready, 20_000), - entersState( - connection, - VoiceConnectionStatus.Signalling, - 20_000 - ), - ]); - - // Log connection success - elizaLogger.log( - `Voice connection established in state: ${connection.state.status}` - ); - - // Set up ongoing state change monitoring - connection.on("stateChange", async (oldState, newState) => { - elizaLogger.log( - `Voice connection state changed from ${oldState.status} to ${newState.status}` - ); - - if (newState.status === VoiceConnectionStatus.Disconnected) { - elizaLogger.log("Handling disconnection..."); - - try { - // Try to reconnect if disconnected - await Promise.race([ - entersState( - connection, - VoiceConnectionStatus.Signalling, - 5_000 - ), - entersState( - connection, - VoiceConnectionStatus.Connecting, - 5_000 - ), - ]); - // Seems to be reconnecting to a new channel - elizaLogger.log("Reconnecting to channel..."); - } catch (e) { - // Seems to be a real disconnect, destroy and cleanup - elizaLogger.log( - "Disconnection confirmed - cleaning up..." + e - ); - connection.destroy(); - this.connections.delete(channel.id); - } - } else if ( - newState.status === VoiceConnectionStatus.Destroyed - ) { - this.connections.delete(channel.id); - } else if ( - !this.connections.has(channel.id) && - (newState.status === VoiceConnectionStatus.Ready || - newState.status === VoiceConnectionStatus.Signalling) - ) { - this.connections.set(channel.id, connection); - } - }); - - connection.on("error", (error) => { - elizaLogger.log("Voice connection error:", error); - // Don't immediately destroy - let the state change handler deal with it - elizaLogger.log( - "Connection error - will attempt to recover..." - ); - }); - - // Store the connection - this.connections.set(channel.id, connection); - - // Continue with voice state modifications - const me = channel.guild.members.me; - if (me?.voice && me.permissions.has("DeafenMembers")) { - try { - await me.voice.setDeaf(false); - await me.voice.setMute(false); - } catch (error) { - elizaLogger.log("Failed to modify voice state:", error); - // Continue even if this fails - } - } - - connection.receiver.speaking.on("start", async (userId: string) => { - let user = channel.members.get(userId); - if (!user) { - try { - user = await channel.guild.members.fetch(userId); - } catch (error) { - console.error("Failed to fetch user:", error); - } - } - if (user && !user?.user.bot) { - this.monitorMember(user as GuildMember, channel); - this.streams.get(userId)?.emit("speakingStarted"); - } - }); - - connection.receiver.speaking.on("end", async (userId: string) => { - const user = channel.members.get(userId); - if (!user?.user.bot) { - this.streams.get(userId)?.emit("speakingStopped"); - } - }); - } catch (error) { - elizaLogger.log("Failed to establish voice connection:", error); - connection.destroy(); - this.connections.delete(channel.id); - throw error; - } - } - - private getVoiceConnection(guildId: string) { - const connections = getVoiceConnections(this.client.user.id); - if (!connections) { - return; - } - const connection = [...connections.values()].find( - (connection) => connection.joinConfig.guildId === guildId - ); - return connection; - } - - private async monitorMember( - member: GuildMember, - channel: BaseGuildVoiceChannel - ) { - const userId = member?.id; - const userName = member?.user?.username; - const name = member?.user?.displayName; - const connection = this.getVoiceConnection(member?.guild?.id); - const receiveStream = connection?.receiver.subscribe(userId, { - autoDestroy: true, - emitClose: true, - }); - if (!receiveStream || receiveStream.readableLength === 0) { - return; - } - const opusDecoder = new prism.opus.Decoder({ - channels: 1, - rate: DECODE_SAMPLE_RATE, - frameSize: DECODE_FRAME_SIZE, - }); - const volumeBuffer: number[] = []; - const VOLUME_WINDOW_SIZE = 30; - const SPEAKING_THRESHOLD = 0.05; - opusDecoder.on("data", (pcmData: Buffer) => { - // Monitor the audio volume while the agent is speaking. - // If the average volume of the user's audio exceeds the defined threshold, it indicates active speaking. - // When active speaking is detected, stop the agent's current audio playback to avoid overlap. - - if (this.activeAudioPlayer) { - const samples = new Int16Array( - pcmData.buffer, - pcmData.byteOffset, - pcmData.length / 2 - ); - const maxAmplitude = Math.max(...samples.map(Math.abs)) / 32768; - volumeBuffer.push(maxAmplitude); - - if (volumeBuffer.length > VOLUME_WINDOW_SIZE) { - volumeBuffer.shift(); - } - const avgVolume = - volumeBuffer.reduce((sum, v) => sum + v, 0) / - VOLUME_WINDOW_SIZE; - - if (avgVolume > SPEAKING_THRESHOLD) { - volumeBuffer.length = 0; - this.cleanupAudioPlayer(this.activeAudioPlayer); - this.processingVoice = false; - } - } - }); - pipeline( - receiveStream as AudioReceiveStream, - opusDecoder as any, - (err: Error | null) => { - if (err) { - console.log(`Opus decoding pipeline error: ${err}`); - } - } - ); - this.streams.set(userId, opusDecoder); - this.connections.set(userId, connection as VoiceConnection); - opusDecoder.on("error", (err: any) => { - console.log(`Opus decoding error: ${err}`); - }); - const errorHandler = (err: any) => { - console.log(`Opus decoding error: ${err}`); - }; - const streamCloseHandler = () => { - console.log(`voice stream from ${member?.displayName} closed`); - this.streams.delete(userId); - this.connections.delete(userId); - }; - const closeHandler = () => { - console.log(`Opus decoder for ${member?.displayName} closed`); - opusDecoder.removeListener("error", errorHandler); - opusDecoder.removeListener("close", closeHandler); - receiveStream?.removeListener("close", streamCloseHandler); - }; - opusDecoder.on("error", errorHandler); - opusDecoder.on("close", closeHandler); - receiveStream?.on("close", streamCloseHandler); - - this.client.emit( - "userStream", - userId, - name, - userName, - channel, - opusDecoder - ); - } - - leaveChannel(channel: BaseGuildVoiceChannel) { - const connection = this.connections.get(channel.id); - if (connection) { - connection.destroy(); - this.connections.delete(channel.id); - } - - // Stop monitoring all members in this channel - for (const [memberId, monitorInfo] of this.activeMonitors) { - if ( - monitorInfo.channel.id === channel.id && - memberId !== this.client.user?.id - ) { - this.stopMonitoringMember(memberId); - } - } - - console.log(`Left voice channel: ${channel.name} (${channel.id})`); - } - - stopMonitoringMember(memberId: string) { - const monitorInfo = this.activeMonitors.get(memberId); - if (monitorInfo) { - monitorInfo.monitor.stop(); - this.activeMonitors.delete(memberId); - this.streams.delete(memberId); - console.log(`Stopped monitoring user ${memberId}`); - } - } - - async handleGuildCreate(guild: Guild) { - console.log(`Joined guild ${guild.name}`); - // this.scanGuild(guild); - } - - async debouncedProcessTranscription( - userId: UUID, - name: string, - userName: string, - channel: BaseGuildVoiceChannel - ) { - const DEBOUNCE_TRANSCRIPTION_THRESHOLD = 1500; // wait for 1.5 seconds of silence - - if (this.activeAudioPlayer?.state?.status === "idle") { - elizaLogger.log("Cleaning up idle audio player."); - this.cleanupAudioPlayer(this.activeAudioPlayer); - } - - if (this.activeAudioPlayer || this.processingVoice) { - const state = this.userStates.get(userId); - state.buffers.length = 0; - state.totalLength = 0; - return; - } - - if (this.transcriptionTimeout) { - clearTimeout(this.transcriptionTimeout); - } - - this.transcriptionTimeout = setTimeout(async () => { - this.processingVoice = true; - try { - await this.processTranscription( - userId, - channel.id, - channel, - name, - userName - ); - - // Clean all users' previous buffers - this.userStates.forEach((state, _) => { - state.buffers.length = 0; - state.totalLength = 0; - }); - } finally { - this.processingVoice = false; - } - }, DEBOUNCE_TRANSCRIPTION_THRESHOLD); - } - - async handleUserStream( - userId: UUID, - name: string, - userName: string, - channel: BaseGuildVoiceChannel, - audioStream: Readable - ) { - console.log(`Starting audio monitor for user: ${userId}`); - if (!this.userStates.has(userId)) { - this.userStates.set(userId, { - buffers: [], - totalLength: 0, - lastActive: Date.now(), - transcriptionText: "", - }); - } - - const state = this.userStates.get(userId); - - const processBuffer = async (buffer: Buffer) => { - try { - state!.buffers.push(buffer); - state!.totalLength += buffer.length; - state!.lastActive = Date.now(); - this.debouncedProcessTranscription( - userId, - name, - userName, - channel - ); - } catch (error) { - console.error( - `Error processing buffer for user ${userId}:`, - error - ); - } - }; - - new AudioMonitor( - audioStream, - 10000000, - () => { - if (this.transcriptionTimeout) { - clearTimeout(this.transcriptionTimeout); - } - }, - async (buffer) => { - if (!buffer) { - console.error("Received empty buffer"); - return; - } - await processBuffer(buffer); - } - ); - } - - private async processTranscription( - userId: UUID, - channelId: string, - channel: BaseGuildVoiceChannel, - name: string, - userName: string - ) { - const state = this.userStates.get(userId); - if (!state || state.buffers.length === 0) return; - try { - const inputBuffer = Buffer.concat(state.buffers, state.totalLength); - - state.buffers.length = 0; // Clear the buffers - state.totalLength = 0; - // Convert Opus to WAV - const wavBuffer = await this.convertOpusToWav(inputBuffer); - console.log("Starting transcription..."); - - const transcriptionText = await this.runtime - .getService(ServiceType.TRANSCRIPTION) - .transcribe(wavBuffer); - - function isValidTranscription(text: string): boolean { - if (!text || text.includes("[BLANK_AUDIO]")) return false; - return true; - } - - if (transcriptionText && isValidTranscription(transcriptionText)) { - state.transcriptionText += transcriptionText; - } - - if (state.transcriptionText.length) { - this.cleanupAudioPlayer(this.activeAudioPlayer); - const finalText = state.transcriptionText; - state.transcriptionText = ""; - await this.handleUserMessage( - finalText, - userId, - channelId, - channel, - name, - userName - ); - } - } catch (error) { - console.error( - `Error transcribing audio for user ${userId}:`, - error - ); - } - } - - private async handleUserMessage( - message: string, - userId: UUID, - channelId: string, - channel: BaseGuildVoiceChannel, - name: string, - userName: string - ) { - try { - const roomId = stringToUuid(channelId + "-" + this.runtime.agentId); - const userIdUUID = stringToUuid(userId); - - await this.runtime.ensureConnection( - userIdUUID, - roomId, - userName, - name, - "discord" - ); - - let state = await this.runtime.composeState( - { - agentId: this.runtime.agentId, - content: { text: message, source: "Discord" }, - userId: userIdUUID, - roomId, - }, - { - discordChannel: channel, - discordClient: this.client, - agentName: this.runtime.character.name, - } - ); - - if (message && message.startsWith("/")) { - return null; - } - - const memory = { - id: stringToUuid(channelId + "-voice-message-" + Date.now()), - agentId: this.runtime.agentId, - content: { - text: message, - source: "discord", - url: channel.url, - }, - userId: userIdUUID, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }; - - if (!memory.content.text) { - return { text: "", action: "IGNORE" }; - } - - await this.runtime.messageManager.createMemory(memory); - - state = await this.runtime.updateRecentMessageState(state); - - const shouldIgnore = await this._shouldIgnore(memory); - - if (shouldIgnore) { - return { text: "", action: "IGNORE" }; - } - - const shouldRespond = await this._shouldRespond( - message, - userId, - channel, - state - ); - - if (!shouldRespond) { - return; - } - - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.discordVoiceHandlerTemplate || - this.runtime.character.templates?.messageHandlerTemplate || - discordVoiceHandlerTemplate, - }); - - const responseContent = await this._generateResponse( - memory, - state, - context - ); - - const callback: HandlerCallback = async (content: Content) => { - console.log("callback content: ", content); - const { roomId } = memory; - - const responseMemory: Memory = { - id: stringToUuid( - memory.id + "-voice-response-" + Date.now() - ), - agentId: this.runtime.agentId, - userId: this.runtime.agentId, - content: { - ...content, - user: this.runtime.character.name, - inReplyTo: memory.id, - }, - roomId, - embedding: getEmbeddingZeroVector(), - }; - - if (responseMemory.content.text?.trim()) { - await this.runtime.messageManager.createMemory( - responseMemory - ); - state = await this.runtime.updateRecentMessageState(state); - - const responseStream = await this.runtime - .getService( - ServiceType.SPEECH_GENERATION - ) - .generate(this.runtime, content.text); - - if (responseStream) { - await this.playAudioStream( - userId, - responseStream as Readable - ); - } - - await this.runtime.evaluate(memory, state); - } else { - console.warn("Empty response, skipping"); - } - return [responseMemory]; - }; - - const responseMemories = await callback(responseContent); - - const response = responseContent; - - const content = (response.responseMessage || - response.content || - response.message) as string; - - if (!content) { - return null; - } - - console.log("responseMemories: ", responseMemories); - - await this.runtime.processActions( - memory, - responseMemories, - state, - callback - ); - } catch (error) { - console.error("Error processing transcribed text:", error); - } - } - - private async convertOpusToWav(pcmBuffer: Buffer): Promise { - try { - // Generate the WAV header - const wavHeader = getWavHeader( - pcmBuffer.length, - DECODE_SAMPLE_RATE - ); - - // Concatenate the WAV header and PCM data - const wavBuffer = Buffer.concat([wavHeader, pcmBuffer]); - - return wavBuffer; - } catch (error) { - console.error("Error converting PCM to WAV:", error); - throw error; - } - } - - private async _shouldRespond( - message: string, - userId: UUID, - channel: BaseGuildVoiceChannel, - state: State - ): Promise { - if (userId === this.client.user?.id) return false; - const lowerMessage = message.toLowerCase(); - const botName = this.client.user.username.toLowerCase(); - const characterName = this.runtime.character.name.toLowerCase(); - const guild = channel.guild; - const member = guild?.members.cache.get(this.client.user?.id as string); - const nickname = member?.nickname; - - if ( - lowerMessage.includes(botName as string) || - lowerMessage.includes(characterName) || - lowerMessage.includes( - this.client.user?.tag.toLowerCase() as string - ) || - (nickname && lowerMessage.includes(nickname.toLowerCase())) - ) { - return true; - } - - if (!channel.guild) { - return true; - } - - // If none of the above conditions are met, use the generateText to decide - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates - ?.discordShouldRespondTemplate || - this.runtime.character.templates?.shouldRespondTemplate || - composeRandomUser(discordShouldRespondTemplate, 2), - }); - - const response = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - if (response === "RESPOND") { - return true; - } else if (response === "IGNORE") { - return false; - } else if (response === "STOP") { - return false; - } else { - console.error( - "Invalid response from response generateText:", - response - ); - return false; - } - } - - private async _generateResponse( - message: Memory, - state: State, - context: string - ): Promise { - const { userId, roomId } = message; - - const response = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - response.source = "discord"; - - if (!response) { - console.error("No response from generateMessageResponse"); - return; - } - - await this.runtime.databaseAdapter.log({ - body: { message, context, response }, - userId: userId, - roomId, - type: "response", - }); - - return response; - } - - private async _shouldIgnore(message: Memory): Promise { - // console.log("message: ", message); - elizaLogger.debug("message.content: ", message.content); - // if the message is 3 characters or less, ignore it - if ((message.content as Content).text.length < 3) { - return true; - } - - const loseInterestWords = [ - // telling the bot to stop talking - "shut up", - "stop", - "dont talk", - "silence", - "stop talking", - "be quiet", - "hush", - "stfu", - "stupid bot", - "dumb bot", - - // offensive words - "fuck", - "shit", - "damn", - "suck", - "dick", - "cock", - "sex", - "sexy", - ]; - if ( - (message.content as Content).text.length < 50 && - loseInterestWords.some((word) => - (message.content as Content).text?.toLowerCase().includes(word) - ) - ) { - return true; - } - - const ignoreWords = ["k", "ok", "bye", "lol", "nm", "uh"]; - if ( - (message.content as Content).text?.length < 8 && - ignoreWords.some((word) => - (message.content as Content).text?.toLowerCase().includes(word) - ) - ) { - return true; - } - - return false; - } - - async scanGuild(guild: Guild) { - let chosenChannel: BaseGuildVoiceChannel | null = null; - - try { - const channelId = this.runtime.getSetting( - "DISCORD_VOICE_CHANNEL_ID" - ) as string; - if (channelId) { - const channel = await guild.channels.fetch(channelId); - if (channel?.isVoiceBased()) { - chosenChannel = channel as BaseGuildVoiceChannel; - } - } - - if (!chosenChannel) { - const channels = (await guild.channels.fetch()).filter( - (channel) => channel?.type == ChannelType.GuildVoice - ); - for (const [, channel] of channels) { - const voiceChannel = channel as BaseGuildVoiceChannel; - if ( - voiceChannel.members.size > 0 && - (chosenChannel === null || - voiceChannel.members.size > - chosenChannel.members.size) - ) { - chosenChannel = voiceChannel; - } - } - } - - if (chosenChannel) { - console.log(`Joining channel: ${chosenChannel.name}`); - await this.joinChannel(chosenChannel); - } else { - console.warn("No suitable voice channel found to join."); - } - } catch (error) { - console.error("Error selecting or joining a voice channel:", error); - } - } - - async playAudioStream(userId: UUID, audioStream: Readable) { - const connection = this.connections.get(userId); - if (connection == null) { - console.log(`No connection for user ${userId}`); - return; - } - this.cleanupAudioPlayer(this.activeAudioPlayer); - const audioPlayer = createAudioPlayer({ - behaviors: { - noSubscriber: NoSubscriberBehavior.Pause, - }, - }); - this.activeAudioPlayer = audioPlayer; - connection.subscribe(audioPlayer); - - const audioStartTime = Date.now(); - - const resource = createAudioResource(audioStream, { - inputType: StreamType.Arbitrary, - }); - audioPlayer.play(resource); - - audioPlayer.on("error", (err: any) => { - console.log(`Audio player error: ${err}`); - }); - - audioPlayer.on( - "stateChange", - (_oldState: any, newState: { status: string }) => { - if (newState.status == "idle") { - const idleTime = Date.now(); - console.log( - `Audio playback took: ${idleTime - audioStartTime}ms` - ); - } - } - ); - } - - cleanupAudioPlayer(audioPlayer: AudioPlayer) { - if (!audioPlayer) return; - - audioPlayer.stop(); - audioPlayer.removeAllListeners(); - if (audioPlayer === this.activeAudioPlayer) { - this.activeAudioPlayer = null; - } - } - - async handleJoinChannelCommand(interaction: any) { - try { - // Defer the reply immediately to prevent interaction timeout - await interaction.deferReply(); - - const channelId = interaction.options.get("channel") - ?.value as string; - if (!channelId) { - await interaction.editReply( - "Please provide a voice channel to join." - ); - return; - } - - const guild = interaction.guild; - if (!guild) { - await interaction.editReply("Could not find guild."); - return; - } - - const voiceChannel = interaction.guild.channels.cache.find( - (channel: VoiceChannel) => - channel.id === channelId && - channel.type === ChannelType.GuildVoice - ); - - if (!voiceChannel) { - await interaction.editReply("Voice channel not found!"); - return; - } - - await this.joinChannel(voiceChannel as BaseGuildVoiceChannel); - await interaction.editReply( - `Joined voice channel: ${voiceChannel.name}` - ); - } catch (error) { - console.error("Error joining voice channel:", error); - // Use editReply instead of reply for the error case - await interaction - .editReply("Failed to join the voice channel.") - .catch(console.error); - } - } - - async handleLeaveChannelCommand(interaction: any) { - const connection = this.getVoiceConnection(interaction.guildId as any); - - if (!connection) { - await interaction.reply("Not currently in a voice channel."); - return; - } - - try { - connection.destroy(); - await interaction.reply("Left the voice channel."); - } catch (error) { - console.error("Error leaving voice channel:", error); - await interaction.reply("Failed to leave the voice channel."); - } - } -} diff --git a/packages/client-discord/tsconfig.json b/packages/client-discord/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/client-discord/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-discord/tsup.config.ts b/packages/client-discord/tsup.config.ts deleted file mode 100644 index 8eea21ba74f12..0000000000000 --- a/packages/client-discord/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "fluent-ffmpeg", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-discord/vitest.config.ts b/packages/client-discord/vitest.config.ts deleted file mode 100644 index a11fbbd0d9ee8..0000000000000 --- a/packages/client-discord/vitest.config.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { defineConfig } from 'vitest/config'; -import { resolve } from 'path'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - }, - resolve: { - alias: { - '@elizaos/core': resolve(__dirname, '../core/src'), - }, - }, -}); diff --git a/packages/client-eliza-home/__tests__/services/smart_things_api.test.ts b/packages/client-eliza-home/__tests__/services/smart_things_api.test.ts deleted file mode 100644 index bec748258b118..0000000000000 --- a/packages/client-eliza-home/__tests__/services/smart_things_api.test.ts +++ /dev/null @@ -1,234 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import type { IAgentRuntime } from '@elizaos/core'; - -// Mock global fetch -const mockFetch = vi.fn(); -global.fetch = mockFetch; - -// Create a mock class that matches the SmartThingsApi interface -class MockSmartThingsApi { - private baseUrl = 'https://api.smartthings.com/v1'; - private token: string; - - constructor(runtime: IAgentRuntime) { - this.token = runtime.getSetting("SMARTTHINGS_TOKEN"); - if (!this.token) { - throw new Error("SmartThings token is required"); - } - } - - private async request(endpoint: string, options: RequestInit = {}) { - const url = `${this.baseUrl}${endpoint}`; - const response = await fetch(url, { - ...options, - headers: { - 'Authorization': `Bearer ${this.token}`, - 'Content-Type': 'application/json', - ...options.headers, - }, - }); - - if (!response.ok) { - throw new Error(`SmartThings API error: ${response.statusText}`); - } - - return response.json(); - } - - devices = { - list: () => this.request('/devices'), - get: (deviceId: string) => this.request(`/devices/${deviceId}`), - getStatus: (deviceId: string) => this.request(`/devices/${deviceId}/status`), - executeCommand: (deviceId: string, command: any) => - this.request(`/devices/${deviceId}/commands`, { - method: 'POST', - body: JSON.stringify({ - commands: [command] - }) - }), - executeCommands: (deviceId: string, commands: any[]) => - this.request(`/devices/${deviceId}/commands`, { - method: 'POST', - body: JSON.stringify({ commands }) - }), - getComponents: (deviceId: string) => - this.request(`/devices/${deviceId}/components`), - getCapabilities: (deviceId: string) => - this.request(`/devices/${deviceId}/capabilities`) - }; - - scenes = { - list: () => this.request('/scenes'), - execute: (sceneId: string) => - this.request(`/scenes/${sceneId}/execute`, { - method: 'POST' - }) - }; - - rooms = { - list: () => this.request('/rooms'), - get: (roomId: string) => this.request(`/rooms/${roomId}`) - }; -} - -describe('SmartThingsApi', () => { - let api: MockSmartThingsApi; - let mockRuntime: IAgentRuntime; - - beforeEach(() => { - vi.clearAllMocks(); - mockRuntime = { - getSetting: vi.fn().mockReturnValue('mock-token'), - } as unknown as IAgentRuntime; - api = new MockSmartThingsApi(mockRuntime); - }); - - it('should throw error if token is not provided', () => { - const runtimeWithoutToken = { - getSetting: vi.fn().mockReturnValue(null), - } as unknown as IAgentRuntime; - - expect(() => new MockSmartThingsApi(runtimeWithoutToken)) - .toThrow('SmartThings token is required'); - }); - - describe('devices', () => { - beforeEach(() => { - mockFetch.mockResolvedValue({ - ok: true, - json: () => Promise.resolve({ data: 'success' }), - }); - }); - - it('should list devices', async () => { - await api.devices.list(); - - expect(mockFetch).toHaveBeenCalledWith( - 'https://api.smartthings.com/v1/devices', - expect.objectContaining({ - headers: expect.objectContaining({ - 'Authorization': 'Bearer mock-token', - 'Content-Type': 'application/json', - }), - }) - ); - }); - - it('should get device details', async () => { - const deviceId = 'device123'; - await api.devices.get(deviceId); - - expect(mockFetch).toHaveBeenCalledWith( - `https://api.smartthings.com/v1/devices/${deviceId}`, - expect.objectContaining({ - headers: expect.objectContaining({ - 'Authorization': 'Bearer mock-token', - }), - }) - ); - }); - - it('should execute device command', async () => { - const deviceId = 'device123'; - const command = { capability: 'switch', command: 'on' }; - - await api.devices.executeCommand(deviceId, command); - - expect(mockFetch).toHaveBeenCalledWith( - `https://api.smartthings.com/v1/devices/${deviceId}/commands`, - expect.objectContaining({ - method: 'POST', - body: JSON.stringify({ commands: [command] }), - headers: expect.objectContaining({ - 'Authorization': 'Bearer mock-token', - 'Content-Type': 'application/json', - }), - }) - ); - }); - - it('should handle API errors', async () => { - mockFetch.mockResolvedValue({ - ok: false, - statusText: 'Not Found', - }); - - await expect(api.devices.list()) - .rejects - .toThrow('SmartThings API error: Not Found'); - }); - }); - - describe('scenes', () => { - beforeEach(() => { - mockFetch.mockResolvedValue({ - ok: true, - json: () => Promise.resolve({ data: 'success' }), - }); - }); - - it('should list scenes', async () => { - await api.scenes.list(); - - expect(mockFetch).toHaveBeenCalledWith( - 'https://api.smartthings.com/v1/scenes', - expect.objectContaining({ - headers: expect.objectContaining({ - 'Authorization': 'Bearer mock-token', - }), - }) - ); - }); - - it('should execute scene', async () => { - const sceneId = 'scene123'; - await api.scenes.execute(sceneId); - - expect(mockFetch).toHaveBeenCalledWith( - `https://api.smartthings.com/v1/scenes/${sceneId}/execute`, - expect.objectContaining({ - method: 'POST', - headers: expect.objectContaining({ - 'Authorization': 'Bearer mock-token', - }), - }) - ); - }); - }); - - describe('rooms', () => { - beforeEach(() => { - mockFetch.mockResolvedValue({ - ok: true, - json: () => Promise.resolve({ data: 'success' }), - }); - }); - - it('should list rooms', async () => { - await api.rooms.list(); - - expect(mockFetch).toHaveBeenCalledWith( - 'https://api.smartthings.com/v1/rooms', - expect.objectContaining({ - headers: expect.objectContaining({ - 'Authorization': 'Bearer mock-token', - }), - }) - ); - }); - - it('should get room details', async () => { - const roomId = 'room123'; - await api.rooms.get(roomId); - - expect(mockFetch).toHaveBeenCalledWith( - `https://api.smartthings.com/v1/rooms/${roomId}`, - expect.objectContaining({ - headers: expect.objectContaining({ - 'Authorization': 'Bearer mock-token', - }), - }) - ); - }); - }); -}); diff --git a/packages/client-eliza-home/__tests__/smart_home.test.ts b/packages/client-eliza-home/__tests__/smart_home.test.ts deleted file mode 100644 index 7e345ab5605b7..0000000000000 --- a/packages/client-eliza-home/__tests__/smart_home.test.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { SmartHomeManager } from '../src/smart_home'; -import { SmartThingsApi } from '../src/services/smart_things_api'; -import { CommandParser } from '../src/utils/command_parser'; -import type { IAgentRuntime } from '@elizaos/core'; - -// Define mock interface that extends IAgentRuntime -interface MockAgentRuntime extends IAgentRuntime { - llm: { - shouldRespond: ReturnType; - complete: ReturnType; - }; -} - -// Mock dependencies -vi.mock('../src/services/smart_things_api', () => ({ - SmartThingsApi: vi.fn().mockImplementation(() => ({ - devices: { - list: vi.fn().mockResolvedValue([]), - executeCommand: vi.fn().mockResolvedValue({ status: 'success' }) - } - })) -})); -vi.mock('../src/utils/command_parser'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - error: vi.fn(), - }, -})); - -describe('SmartHomeManager', () => { - let smartHomeManager: SmartHomeManager; - let mockRuntime: MockAgentRuntime; - - beforeEach(() => { - // Reset all mocks - vi.clearAllMocks(); - - // Create mock runtime with proper typing - mockRuntime = { - llm: { - shouldRespond: vi.fn(), - complete: vi.fn(), - }, - getSetting: vi.fn().mockReturnValue('mock-token'), - // Add required IAgentRuntime properties - agentId: 'test-agent-id', - serverUrl: 'http://test-server', - databaseAdapter: { - init: vi.fn(), - close: vi.fn(), - // Add other required database methods as needed - }, - token: 'test-token', - modelProvider: 'test-provider', - } as MockAgentRuntime; - - smartHomeManager = new SmartHomeManager(mockRuntime); - }); - - describe('handleCommand', () => { - it('should return null when shouldRespond returns IGNORE', async () => { - // Arrange - vi.mocked(mockRuntime.llm.shouldRespond).mockResolvedValue('IGNORE'); - - // Act - const result = await smartHomeManager.handleCommand('turn on lights', 'user123'); - - // Assert - expect(result).toBeNull(); - expect(mockRuntime.llm.shouldRespond).toHaveBeenCalledWith( - expect.any(String), - 'turn on lights' - ); - }); - - it('should execute command and return response when shouldRespond returns RESPOND', async () => { - // Arrange - const mockResponse = 'Command executed successfully'; - vi.mocked(mockRuntime.llm.shouldRespond).mockResolvedValue('RESPOND'); - vi.mocked(mockRuntime.llm.complete).mockResolvedValue(mockResponse); - vi.mocked(CommandParser.parseCommand).mockReturnValue({ - command: 'turn_on', - args: { device: 'lights' } - }); - vi.mocked(CommandParser.mapToDeviceCommand).mockReturnValue({ - deviceId: 'device123', - capability: 'switch', - command: 'on' - }); - - // Act - const result = await smartHomeManager.handleCommand('turn on lights', 'user123'); - - // Assert - expect(result).toEqual({ - success: true, - message: mockResponse, - data: { status: 'success' } - }); - expect(mockRuntime.llm.shouldRespond).toHaveBeenCalled(); - expect(mockRuntime.llm.complete).toHaveBeenCalled(); - expect(CommandParser.parseCommand).toHaveBeenCalledWith('turn on lights'); - expect(CommandParser.mapToDeviceCommand).toHaveBeenCalled(); - }); - - it('should handle errors gracefully', async () => { - // Arrange - const mockError = new Error('Test error'); - vi.mocked(mockRuntime.llm.shouldRespond).mockRejectedValue(mockError); - - // Act & Assert - await expect(smartHomeManager.handleCommand('turn on lights', 'user123')) - .rejects - .toThrow(mockError); - }); - }); -}); diff --git a/packages/client-eliza-home/__tests__/utils/command_parser.test.ts b/packages/client-eliza-home/__tests__/utils/command_parser.test.ts deleted file mode 100644 index a14a5847c2c1e..0000000000000 --- a/packages/client-eliza-home/__tests__/utils/command_parser.test.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { CommandParser } from '../../src/utils/command_parser'; - -describe('CommandParser', () => { - describe('parseCommand', () => { - it('should parse turn on commands', () => { - const inputs = ['turn on', 'switch on', 'enable']; - inputs.forEach(input => { - const result = CommandParser.parseCommand(input); - expect(result).toEqual({ - command: 'turnOn', - args: undefined - }); - }); - }); - - it('should parse turn off commands', () => { - const inputs = ['turn off', 'switch off', 'disable']; - inputs.forEach(input => { - const result = CommandParser.parseCommand(input); - expect(result).toEqual({ - command: 'turnOff', - args: undefined - }); - }); - }); - - it('should parse brightness commands with values', () => { - const inputs = [ - ['set brightness to 50', '50'], - ['dim to 25', '25'], - ['brighten to 75', '75'] - ]; - inputs.forEach(([input, expectedValue]) => { - const result = CommandParser.parseCommand(input); - expect(result).toEqual({ - command: 'setBrightness', - args: { value: expectedValue } - }); - }); - }); - - it('should parse temperature commands with values', () => { - const inputs = [ - ['set temperature to 72', '72'], - ['change temp to 68', '68'] - ]; - inputs.forEach(([input, expectedValue]) => { - const result = CommandParser.parseCommand(input); - expect(result).toEqual({ - command: 'setTemperature', - args: { value: expectedValue } - }); - }); - }); - - it('should parse color commands with values', () => { - const inputs = [ - ['change color to red', 'red'], - ['set color to blue', 'blue'] - ]; - inputs.forEach(([input, expectedValue]) => { - const result = CommandParser.parseCommand(input); - expect(result).toEqual({ - command: 'setColor', - args: { value: expectedValue } - }); - }); - }); - - it('should throw error for unknown commands', () => { - expect(() => CommandParser.parseCommand('invalid command')) - .toThrow('Unable to parse command'); - }); - }); - - describe('mapToDeviceCommand', () => { - it('should map turnOn command', () => { - const result = CommandParser.mapToDeviceCommand('turnOn'); - expect(result).toEqual({ - capability: 'switch', - command: 'on' - }); - }); - - it('should map turnOff command', () => { - const result = CommandParser.mapToDeviceCommand('turnOff'); - expect(result).toEqual({ - capability: 'switch', - command: 'off' - }); - }); - - it('should map setBrightness command with value', () => { - const result = CommandParser.mapToDeviceCommand('setBrightness', { value: '50' }); - expect(result).toEqual({ - capability: 'switchLevel', - command: 'setLevel', - arguments: [50] - }); - }); - - it('should map setTemperature command with value', () => { - const result = CommandParser.mapToDeviceCommand('setTemperature', { value: '72' }); - expect(result).toEqual({ - capability: 'thermostat', - command: 'setTemperature', - arguments: [72] - }); - }); - - it('should map setColor command with value', () => { - const result = CommandParser.mapToDeviceCommand('setColor', { value: 'red' }); - expect(result).toEqual({ - capability: 'colorControl', - command: 'setColor', - arguments: [{ hex: 'red' }] - }); - }); - - it('should throw error for unknown commands', () => { - expect(() => CommandParser.mapToDeviceCommand('invalidCommand')) - .toThrow('Unknown command: invalidCommand'); - }); - }); -}); diff --git a/packages/client-eliza-home/package.json b/packages/client-eliza-home/package.json deleted file mode 100644 index 66c821a531e39..0000000000000 --- a/packages/client-eliza-home/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "@elizaos/client-eliza-home", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*" - }, - "devDependencies": { - "tsup": "8.3.5", - "vitest": "^3.0.0", - "@vitest/coverage-v8": "^1.2.1" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint . --fix", - "test": "vitest run", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage" - } -} diff --git a/packages/client-eliza-home/src/actions/control_device.ts b/packages/client-eliza-home/src/actions/control_device.ts deleted file mode 100644 index 9ca7b1ece3bb1..0000000000000 --- a/packages/client-eliza-home/src/actions/control_device.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { - Action, - ActionExample, - Content, - HandlerCallback, - IAgentRuntime, - Memory, - State, -} from "@elizaos/core"; -import { SmartHomeManager } from "../smart_home.ts"; -import { smartThingsApi } from "../services/smart_things_api.ts"; -import { deviceStateProvider } from "../providers/device_state.ts"; - -export const controlDeviceAction = { - name: "CONTROL_DEVICE", - similes: ["DEVICE_CONTROL", "SMART_HOME_CONTROL", "HOME_CONTROL"], - description: "Controls smart home devices with specific commands", - validate: async (runtime: IAgentRuntime, message: Memory) => { - const keywords = [ - "turn on", - "turn off", - "switch", - "toggle", - "set", - "change", - "adjust", - "dim", - "brighten", - "lock", - "unlock", - ]; - return keywords.some(keyword => - message.content.text.toLowerCase().includes(keyword) - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ) => { - const smartHomeManager = new SmartHomeManager(runtime); - const result = await smartHomeManager.handleCommand(message.content.text, message.userId); - - const response: Content = { - text: `Command executed: ${result.message || "Success"}`, - action: "DEVICE_CONTROL_RESPONSE", - source: "home-assistant" - }; - - await callback(response); - return response; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Turn on the living room lights", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll turn on the living room lights for you", - action: "CONTROL_DEVICE", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -export async function controlDevice(deviceId: string, command: string, args: any = {}) { - try { - // Map internal commands to SmartThings commands - const smartThingsCommand = mapCommand(command, args); - - await smartThingsApi.devices.executeCommand(deviceId, smartThingsCommand); - - // Update local device state - await deviceStateProvider.updateDeviceState(deviceId, command, args); - - } catch (error) { - console.error(`Failed to control device ${deviceId}:`, error); - throw error; - } -} - -function mapCommand(command: string, args: any) { - // Map our internal commands to SmartThings command format - switch (command) { - case 'turnOn': - return { capability: 'switch', command: 'on' }; - case 'turnOff': - return { capability: 'switch', command: 'off' }; - case 'setLevel': - return { - capability: 'switchLevel', - command: 'setLevel', - arguments: [args.level] - }; - // ... map other commands - default: - throw new Error(`Unknown command: ${command}`); - } -} - -export default controlDeviceAction; \ No newline at end of file diff --git a/packages/client-eliza-home/src/actions/discover_devices.ts b/packages/client-eliza-home/src/actions/discover_devices.ts deleted file mode 100644 index 9a6b2f26674e2..0000000000000 --- a/packages/client-eliza-home/src/actions/discover_devices.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { - Action, - ActionExample, - Content, - HandlerCallback, - IAgentRuntime, - Memory, - State, -} from "@elizaos/core"; -import { EntityManager } from "../entities.ts"; -import { CAPABILITIES } from '../capabilities'; -import { smartThingsApi } from "../services/smart_things_api.ts"; - -export const discoverDevicesAction = { - name: "DISCOVER_DEVICES", - similes: ["SCAN_DEVICES", "FIND_DEVICES", "LIST_DEVICES"], - description: "Discovers and lists all available smart home devices", - validate: async (runtime: IAgentRuntime, message: Memory) => { - const keywords = [ - "discover", - "find", - "scan", - "list", - "show", - "what", - "devices", - "lights", - "switches", - ]; - return keywords.some(keyword => - message.content.text.toLowerCase().includes(keyword) - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ) => { - const entityManager = new EntityManager(runtime); - await entityManager.discoverEntities(); - - const entities = entityManager.getAllEntities(); - const deviceList = entities - .map(entity => `- ${entity.name} (${entity.entityId}): ${entity.state}`) - .join('\n'); - - const response: Content = { - text: `Here are all the available devices:\n\n${deviceList}`, - action: "DEVICE_LIST_RESPONSE", - source: "home-assistant" - }; - - await callback(response); - return response; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What devices do you see?", - }, - }, - { - user: "{{user2}}", - content: { - text: "Let me check what devices are available...", - action: "DISCOVER_DEVICES", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -export default discoverDevicesAction; - -export async function discoverDevices() { - // Implementation to discover SmartThings devices - try { - const devices = await smartThingsApi.devices.list(); - - return devices.map(device => ({ - id: device.deviceId, - name: device.label || device.name, - capabilities: device.capabilities.map(cap => ({ - id: cap.id, - version: cap.version - })), - room: device.roomId, - status: parseDeviceStatus(device.status) - })); - } catch (error) { - console.error('Failed to discover devices:', error); - throw error; - } -} - -function parseDeviceStatus(status: any) { - // Convert SmartThings status format to our internal format - const deviceStatus: any = {}; - - if (status.switch) { - deviceStatus.switch = status.switch.value; - } - if (status.level) { - deviceStatus.level = status.level.value; - } - // ... parse other status values - - return deviceStatus; -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/capabilities.ts b/packages/client-eliza-home/src/capabilities.ts deleted file mode 100644 index ffce19ca3d419..0000000000000 --- a/packages/client-eliza-home/src/capabilities.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; - -// Define SmartThings capability interfaces -export interface SmartThingsCapability { - id: string; - version: number; -} - -export const CAPABILITIES = { - // Basic capabilities - SWITCH: { id: 'switch', version: 1 }, - POWER_METER: { id: 'powerMeter', version: 1 }, - ENERGY_METER: { id: 'energyMeter', version: 1 }, - - // Climate capabilities - THERMOSTAT: { id: 'thermostat', version: 1 }, - TEMPERATURE_MEASUREMENT: { id: 'temperatureMeasurement', version: 1 }, - HUMIDITY_MEASUREMENT: { id: 'humidityMeasurement', version: 1 }, - - // Lighting capabilities - SWITCH_LEVEL: { id: 'switchLevel', version: 1 }, - COLOR_CONTROL: { id: 'colorControl', version: 1 }, - COLOR_TEMPERATURE: { id: 'colorTemperature', version: 1 }, - - // Security capabilities - LOCK: { id: 'lock', version: 1 }, - MOTION_SENSOR: { id: 'motionSensor', version: 1 }, - CONTACT_SENSOR: { id: 'contactSensor', version: 1 }, - PRESENCE_SENSOR: { id: 'presenceSensor', version: 1 }, - - // Media capabilities - MEDIA_PLAYBACK: { id: 'mediaPlayback', version: 1 }, - VOLUME: { id: 'volume', version: 1 }, - - // Window/Door capabilities - WINDOW_SHADE: { id: 'windowShade', version: 1 }, - GARAGE_DOOR: { id: 'garageDoor', version: 1 }, - - // Fan capabilities - FAN_SPEED: { id: 'fanSpeed', version: 1 }, - - // Battery capabilities - BATTERY: { id: 'battery', version: 1 }, -} as const; - -export type CapabilityType = keyof typeof CAPABILITIES; - -export interface Capability { - interface: string; - version: string; - type: string; - properties: { - supported: Array<{ - name: string; - }>; - proactivelyReported: boolean; - retrievable: boolean; - }; -} - -export class CapabilityManager { - private runtime: IAgentRuntime; - private capabilities: Map; - - constructor(runtime: IAgentRuntime) { - this.runtime = runtime; - this.capabilities = new Map(); - this.initializeCapabilities(); - } - - private initializeCapabilities() { - // Add standard capabilities - this.addCapability({ - interface: "Alexa.PowerController", - version: "3", - type: "AlexaInterface", - properties: { - supported: [{ name: "powerState" }], - proactivelyReported: true, - retrievable: true, - }, - }); - - this.addCapability({ - interface: "Alexa.BrightnessController", - version: "3", - type: "AlexaInterface", - properties: { - supported: [{ name: "brightness" }], - proactivelyReported: true, - retrievable: true, - }, - }); - - // Add more capabilities as needed - } - - addCapability(capability: Capability) { - this.capabilities.set(capability.interface, capability); - } - - getCapability(interfaceName: string): Capability | undefined { - return this.capabilities.get(interfaceName); - } - - getAllCapabilities(): Capability[] { - return Array.from(this.capabilities.values()); - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/config.ts b/packages/client-eliza-home/src/config.ts deleted file mode 100644 index 2d2ad9953cb32..0000000000000 --- a/packages/client-eliza-home/src/config.ts +++ /dev/null @@ -1,50 +0,0 @@ -export const DEFAULT_CONFIG = { - DISCOVERY_INTERVAL: 300000, // 5 minutes - STATE_UPDATE_INTERVAL: 60000, // 1 minute - MAX_RETRY_ATTEMPTS: 3, - RETRY_DELAY: 1000, // 1 second -}; - -export const SUPPORTED_CAPABILITIES = [ - 'switch', - 'light', - 'thermostat', - 'lock', - 'colorControl', - 'colorTemperature', - 'motionSensor', - 'contactSensor', - 'mediaPlayback', -]; - -export const CAPABILITY_MAPPINGS = { - switch: ['switch'], - light: ['switch', 'switchLevel', 'colorControl', 'colorTemperature'], - thermostat: ['thermostat', 'temperatureMeasurement', 'humidityMeasurement'], - lock: ['lock'], - motionSensor: ['motionSensor'], - contactSensor: ['contactSensor'], - presenceSensor: ['presenceSensor'], - mediaPlayer: ['mediaPlayback', 'volume'], - windowShade: ['windowShade'], - garageDoor: ['garageDoor'], - fan: ['fanSpeed', 'switch'], - powerMeter: ['powerMeter', 'energyMeter'], - battery: ['battery'] -}; - -export const DEVICE_CLASSES = { - switch: 'switch', - light: 'light', - thermostat: 'climate', - lock: 'lock', - motionSensor: 'binary_sensor', - contactSensor: 'binary_sensor', - presenceSensor: 'binary_sensor', - mediaPlayer: 'media_player', - windowShade: 'cover', - garageDoor: 'cover', - fan: 'fan', - powerMeter: 'sensor', - battery: 'sensor' -}; \ No newline at end of file diff --git a/packages/client-eliza-home/src/entities.ts b/packages/client-eliza-home/src/entities.ts deleted file mode 100644 index 78622f477367a..0000000000000 --- a/packages/client-eliza-home/src/entities.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; -import { SmartThingsApi } from "./services/smart_things_api"; -import { CAPABILITY_MAPPINGS } from "./config"; - -export interface Entity { - entityId: string; - name: string; - type: string; - capabilities: string[]; - state: any; -} - -export class EntityManager { - private runtime: IAgentRuntime; - private api: SmartThingsApi; - private entities: Map; - - constructor(runtime: IAgentRuntime) { - this.runtime = runtime; - this.api = new SmartThingsApi(runtime); - this.entities = new Map(); - } - - async discoverEntities(): Promise { - try { - const devices = await this.api.devices.list(); - - for (const device of devices) { - const entity: Entity = { - entityId: device.deviceId, - name: device.label || device.name, - type: this.determineDeviceType(device.capabilities), - capabilities: device.capabilities.map(cap => cap.id), - state: device.status, - }; - - this.entities.set(entity.entityId, entity); - } - } catch (error) { - throw new Error(`Entity discovery failed: ${error.message}`); - } - } - - private determineDeviceType(capabilities: any[]): string { - // Map capabilities to device type - for (const [type, requiredCaps] of Object.entries(CAPABILITY_MAPPINGS)) { - if (requiredCaps.every(cap => - capabilities.some(c => c.id === cap) - )) { - return type; - } - } - return 'unknown'; - } - - getEntity(entityId: string): Entity | undefined { - return this.entities.get(entityId); - } - - getAllEntities(): Entity[] { - return Array.from(this.entities.values()); - } - - async updateEntityState(entityId: string, state: any): Promise { - const entity = this.entities.get(entityId); - if (entity) { - entity.state = state; - this.entities.set(entityId, entity); - } - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/environment.ts b/packages/client-eliza-home/src/environment.ts deleted file mode 100644 index dc6047915e323..0000000000000 --- a/packages/client-eliza-home/src/environment.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const homeConfigSchema = z.object({ - SMARTTHINGS_TOKEN: z.string().min(1, "SmartThings token is required"), -}); - -export type HomeConfig = z.infer; - -export async function validateHomeConfig(runtime: IAgentRuntime): Promise { - try { - const config = { - SMARTTHINGS_TOKEN: runtime.getSetting("SMARTTHINGS_TOKEN") || process.env.SMARTTHINGS_TOKEN, - }; - - return homeConfigSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error(`SmartThings configuration validation failed:\n${errorMessages}`); - } - throw error; - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/handlers/device_handlers.ts b/packages/client-eliza-home/src/handlers/device_handlers.ts deleted file mode 100644 index a9e213e71bcfc..0000000000000 --- a/packages/client-eliza-home/src/handlers/device_handlers.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { SmartThingsApi } from "../services/smart_things_api"; -import { DeviceState } from "../providers/device_state"; - -export class DeviceHandlers { - constructor(private api: SmartThingsApi) {} - - async handleSwitch(deviceId: string, command: string): Promise { - await this.api.devices.executeCommand(deviceId, { - capability: 'switch', - command: command === 'on' ? 'on' : 'off' - }); - } - - async handleLight(deviceId: string, command: string, args: any = {}): Promise { - const commands = []; - - if (command === 'on' || command === 'off') { - commands.push({ - capability: 'switch', - command: command - }); - } - - if (args.brightness) { - commands.push({ - capability: 'switchLevel', - command: 'setLevel', - arguments: [args.brightness] - }); - } - - if (args.color) { - commands.push({ - capability: 'colorControl', - command: 'setColor', - arguments: [args.color] - }); - } - - await this.api.devices.executeCommands(deviceId, commands); - } - - async handleThermostat(deviceId: string, command: string, args: any = {}): Promise { - const commands = []; - - if (args.temperature) { - commands.push({ - capability: 'thermostat', - command: 'setTemperature', - arguments: [args.temperature] - }); - } - - if (args.mode) { - commands.push({ - capability: 'thermostat', - command: 'setMode', - arguments: [args.mode] - }); - } - - await this.api.devices.executeCommands(deviceId, commands); - } - - async handleLock(deviceId: string, command: string): Promise { - await this.api.devices.executeCommand(deviceId, { - capability: 'lock', - command: command === 'lock' ? 'lock' : 'unlock' - }); - } - - async handleWindowShade(deviceId: string, command: string, args: any = {}): Promise { - const commands = []; - - switch (command) { - case 'open': - commands.push({ - capability: 'windowShade', - command: 'open' - }); - break; - case 'close': - commands.push({ - capability: 'windowShade', - command: 'close' - }); - break; - case 'setLevel': - if (args.level !== undefined) { - commands.push({ - capability: 'windowShade', - command: 'setLevel', - arguments: [args.level] - }); - } - break; - } - - await this.api.devices.executeCommands(deviceId, commands); - } - - async handleFan(deviceId: string, command: string, args: any = {}): Promise { - const commands = []; - - if (command === 'on' || command === 'off') { - commands.push({ - capability: 'switch', - command: command - }); - } - - if (args.speed !== undefined) { - commands.push({ - capability: 'fanSpeed', - command: 'setSpeed', - arguments: [args.speed] - }); - } - - await this.api.devices.executeCommands(deviceId, commands); - } - - async handleGarageDoor(deviceId: string, command: string): Promise { - await this.api.devices.executeCommand(deviceId, { - capability: 'garageDoor', - command: command === 'open' ? 'open' : 'close' - }); - } - - async handleMediaPlayer(deviceId: string, command: string, args: any = {}): Promise { - const commands = []; - - switch (command) { - case 'play': - case 'pause': - case 'stop': - commands.push({ - capability: 'mediaPlayback', - command: command - }); - break; - case 'setVolume': - if (args.volume !== undefined) { - commands.push({ - capability: 'volume', - command: 'setVolume', - arguments: [args.volume] - }); - } - break; - } - - await this.api.devices.executeCommands(deviceId, commands); - } - - async handleSensor(deviceId: string, sensorType: string): Promise { - const status = await this.api.devices.getStatus(deviceId); - - switch (sensorType) { - case 'motion': - return status.motionSensor?.motion; - case 'contact': - return status.contactSensor?.contact; - case 'presence': - return status.presenceSensor?.presence; - case 'temperature': - return status.temperatureMeasurement?.temperature; - case 'humidity': - return status.humidityMeasurement?.humidity; - case 'battery': - return status.battery?.battery; - default: - throw new Error(`Unknown sensor type: ${sensorType}`); - } - } - - // Add other handlers for locks, sensors, covers, etc. -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/handlers/room_handlers.ts b/packages/client-eliza-home/src/handlers/room_handlers.ts deleted file mode 100644 index eabe89a72d797..0000000000000 --- a/packages/client-eliza-home/src/handlers/room_handlers.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { SmartThingsApi } from "../services/smart_things_api"; - -export class RoomHandlers { - constructor(private api: SmartThingsApi) {} - - async listRooms(): Promise { - return await this.api.rooms.list(); - } - - async getRoom(roomId: string): Promise { - return await this.api.rooms.get(roomId); - } - - async getRoomByName(name: string): Promise { - const rooms = await this.listRooms(); - return rooms.find(room => - room.name.toLowerCase() === name.toLowerCase() - ); - } - - async getDevicesInRoom(roomId: string): Promise { - const devices = await this.api.devices.list(); - return devices.filter(device => device.roomId === roomId); - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/handlers/scene_handlers.ts b/packages/client-eliza-home/src/handlers/scene_handlers.ts deleted file mode 100644 index 328d5df0e5823..0000000000000 --- a/packages/client-eliza-home/src/handlers/scene_handlers.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { SmartThingsApi } from "../services/smart_things_api"; - -export class SceneHandlers { - constructor(private api: SmartThingsApi) {} - - async listScenes(): Promise { - return await this.api.scenes.list(); - } - - async executeScene(sceneId: string): Promise { - await this.api.scenes.execute(sceneId); - } - - async getSceneByName(name: string): Promise { - const scenes = await this.listScenes(); - return scenes.find(scene => - scene.name.toLowerCase() === name.toLowerCase() - ); - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/index.ts b/packages/client-eliza-home/src/index.ts deleted file mode 100644 index 85aded60774c1..0000000000000 --- a/packages/client-eliza-home/src/index.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { EventEmitter } from "events"; -import { - IAgentRuntime, - Client as ElizaClient, - elizaLogger, - stringToUuid, - getEmbeddingZeroVector, - Memory, - Content, -} from "@elizaos/core"; -import { validateHomeConfig } from "./environment.ts"; -import { CapabilityManager } from "./capabilities.ts"; -import { EntityManager } from "./entities.ts"; -import { StateManager } from "./state.ts"; -import { SmartHomeManager } from "./smart_home.ts"; -import controlDeviceAction from "./actions/control_device.ts"; -import discoverDevicesAction from "./actions/discover_devices.ts"; -import deviceStateProvider from "./providers/device_state.ts"; -import automationStateProvider from "./providers/automation_state.ts"; - - -export class HomeClient extends EventEmitter { - private runtime: IAgentRuntime; - private capabilityManager: CapabilityManager; - private entityManager: EntityManager; - private stateManager: StateManager; - private smartHomeManager: SmartHomeManager; - - constructor(runtime: IAgentRuntime) { - super(); - this.runtime = runtime; - this.initialize(); - } - - private async initialize() { - try { - const config = await validateHomeConfig(this.runtime); - - this.capabilityManager = new CapabilityManager(this.runtime); - this.entityManager = new EntityManager(this.runtime); - this.stateManager = new StateManager(this.runtime); - this.smartHomeManager = new SmartHomeManager(this.runtime); - - // Register providers - this.runtime.providers.push(this.stateManager.getProvider()); - this.runtime.providers.push(deviceStateProvider); - this.runtime.providers.push(automationStateProvider); - - // Register actions - this.registerActions(); - - // Start state monitoring - this.startStateMonitoring(); - - elizaLogger.success("Home Assistant client initialized successfully"); - } catch (error) { - elizaLogger.error("Failed to initialize Home Assistant client:", error); - throw error; - } - } - - private registerActions() { - this.runtime.registerAction(controlDeviceAction); - this.runtime.registerAction(discoverDevicesAction); - } - - private startStateMonitoring() { - setInterval(async () => { - try { - await this.entityManager.discoverEntities(); - elizaLogger.debug("Updated device states"); - } catch (error) { - elizaLogger.error("Failed to update device states:", error); - } - }, 60000); // Update every minute - } - - async handleCommand(command: string, userId: string) { - const roomId = stringToUuid(`home-${userId}`); - const userIdUUID = stringToUuid(userId); - - const memory: Memory = { - id: stringToUuid(`command-${Date.now()}`), - userId: userIdUUID, - agentId: this.runtime.agentId, - roomId, - content: { - text: command, - source: "home-assistant" - }, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now() - }; - - await this.runtime.messageManager.createMemory(memory); - return this.smartHomeManager.handleCommand(command, userId); - } -} - -export const HomeClientInterface: ElizaClient = { - start: async (runtime: IAgentRuntime) => new HomeClient(runtime), - stop: async (runtime: IAgentRuntime) => { - elizaLogger.warn("Home Assistant client does not support stopping yet"); - } -}; - -export function startHome(runtime: IAgentRuntime) { - return new HomeClient(runtime); -} - -export { - homeShouldRespondTemplate, - homeMessageHandlerTemplate -} from "./templates"; \ No newline at end of file diff --git a/packages/client-eliza-home/src/providers/automation_state.ts b/packages/client-eliza-home/src/providers/automation_state.ts deleted file mode 100644 index 524aabb8d5486..0000000000000 --- a/packages/client-eliza-home/src/providers/automation_state.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { IAgentRuntime, Provider } from "@elizaos/core"; - -export const automationStateProvider: Provider = { - name: "automation-state", - get: async (runtime: IAgentRuntime) => { - try { - const response = await fetch( - `${runtime.getSetting("HOME_ASSISTANT_URL")}/api/states`, - { - headers: { - Authorization: `Bearer ${runtime.getSetting("HOME_ASSISTANT_TOKEN")}`, - "Content-Type": "application/json", - }, - } - ); - - if (!response.ok) { - throw new Error("Failed to fetch automation states"); - } - - const states = await response.json(); - const automations = states.filter(state => state.entity_id.startsWith('automation.')); - - const automationStates = automations - .map(automation => `${automation.attributes.friendly_name}: ${automation.state}`) - .join('\n'); - - return `Current Automation States:\n${automationStates}`; - } catch (error) { - return "Unable to fetch automation states"; - } - } -}; - -export default automationStateProvider; \ No newline at end of file diff --git a/packages/client-eliza-home/src/providers/device_state.ts b/packages/client-eliza-home/src/providers/device_state.ts deleted file mode 100644 index 09a008ca54903..0000000000000 --- a/packages/client-eliza-home/src/providers/device_state.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; -import { Provider } from "@elizaos/core"; -import { EntityManager } from "../entities.ts"; -import { SmartThingsCapability } from '../capabilities'; - -export interface DeviceState { - id: string; - name: string; - capabilities: SmartThingsCapability[]; - room?: string; - status: { - switch?: 'on' | 'off'; - level?: number; - temperature?: number; - motion?: 'active' | 'inactive'; - contact?: 'open' | 'closed'; - // ... other status fields - }; -} - -export const deviceStateProvider: Provider = { - get: async (runtime: IAgentRuntime) => { - const entityManager = new EntityManager(runtime); - await entityManager.discoverEntities(); - const entities = entityManager.getAllEntities(); - - const deviceStates = entities - .map(entity => `${entity.name}: ${entity.state}`) - .join('\n'); - - return `Current Device States:\n${deviceStates}`; - } -}; - -export default deviceStateProvider; - -export class DeviceStateProvider { - private devices: Map = new Map(); - - async updateDeviceState(deviceId: string, capability: string, value: any) { - const device = this.devices.get(deviceId); - if (!device) { - throw new Error(`Device ${deviceId} not found`); - } - - // Update device status based on capability - switch (capability) { - case 'switch': - device.status.switch = value; - break; - case 'level': - device.status.level = value; - break; - // ... handle other capabilities - } - - this.devices.set(deviceId, device); - - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/services/device_discovery.ts b/packages/client-eliza-home/src/services/device_discovery.ts deleted file mode 100644 index d2e4b3316de37..0000000000000 --- a/packages/client-eliza-home/src/services/device_discovery.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { SmartThingsApi } from './smart_things_api'; -import { SmartThingsDevice } from '../types/smart_things'; -import { DEVICE_CLASSES } from '../config'; - -export class DeviceDiscoveryService { - constructor(private api: SmartThingsApi) {} - - async discoverDevices(): Promise { - const devices = await this.api.devices.list(); - return devices.map(device => this.normalizeDevice(device)); - } - - async discoverDevicesByRoom(roomId: string): Promise { - const devices = await this.discoverDevices(); - return devices.filter(device => device.roomId === roomId); - } - - async discoverDevicesByType(type: string): Promise { - const devices = await this.discoverDevices(); - return devices.filter(device => this.getDeviceType(device) === type); - } - - private normalizeDevice(device: any): SmartThingsDevice { - return { - deviceId: device.deviceId, - name: device.label || device.name, - roomId: device.roomId, - capabilities: device.capabilities, - status: device.status || {} - }; - } - - private getDeviceType(device: SmartThingsDevice): string { - for (const [type, deviceClass] of Object.entries(DEVICE_CLASSES)) { - if (this.hasRequiredCapabilities(device, type)) { - return deviceClass; - } - } - return 'unknown'; - } - - private hasRequiredCapabilities(device: SmartThingsDevice, type: string): boolean { - const requiredCaps = DEVICE_CLASSES[type]; - if (!requiredCaps) return false; - - return device.capabilities.some(cap => - requiredCaps.includes(cap.id) - ); - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/services/smart_things_api.ts b/packages/client-eliza-home/src/services/smart_things_api.ts deleted file mode 100644 index 3033480fd90c7..0000000000000 --- a/packages/client-eliza-home/src/services/smart_things_api.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; -import { retryWithBackoff } from "../utils"; - -export class SmartThingsApi { - private baseUrl = 'https://api.smartthings.com/v1'; - private token: string; - - constructor(runtime: IAgentRuntime) { - this.token = runtime.getSetting("SMARTTHINGS_TOKEN"); - if (!this.token) { - throw new Error("SmartThings token is required"); - } - } - - private async request(endpoint: string, options: RequestInit = {}) { - const url = `${this.baseUrl}${endpoint}`; - const response = await fetch(url, { - ...options, - headers: { - 'Authorization': `Bearer ${this.token}`, - 'Content-Type': 'application/json', - ...options.headers, - }, - }); - - if (!response.ok) { - throw new Error(`SmartThings API error: ${response.statusText}`); - } - - return response.json(); - } - - devices = { - list: () => this.request('/devices'), - get: (deviceId: string) => this.request(`/devices/${deviceId}`), - getStatus: (deviceId: string) => this.request(`/devices/${deviceId}/status`), - executeCommand: (deviceId: string, command: any) => - this.request(`/devices/${deviceId}/commands`, { - method: 'POST', - body: JSON.stringify({ - commands: [command] - }) - }), - executeCommands: (deviceId: string, commands: any[]) => - this.request(`/devices/${deviceId}/commands`, { - method: 'POST', - body: JSON.stringify({ commands }) - }), - getComponents: (deviceId: string) => - this.request(`/devices/${deviceId}/components`), - getCapabilities: (deviceId: string) => - this.request(`/devices/${deviceId}/capabilities`) - }; - - scenes = { - list: () => this.request('/scenes'), - execute: (sceneId: string) => - this.request(`/scenes/${sceneId}/execute`, { - method: 'POST' - }) - }; - - rooms = { - list: () => this.request('/rooms'), - get: (roomId: string) => this.request(`/rooms/${roomId}`) - }; -} - -export const smartThingsApi = new SmartThingsApi(null as any); // Will be initialized later \ No newline at end of file diff --git a/packages/client-eliza-home/src/smart_home.ts b/packages/client-eliza-home/src/smart_home.ts deleted file mode 100644 index 967b633377bd9..0000000000000 --- a/packages/client-eliza-home/src/smart_home.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { SmartThingsApi } from "./services/smart_things_api"; -import { CommandParser } from "./utils/command_parser"; -import { homeShouldRespondTemplate, homeMessageHandlerTemplate } from "./templates"; - -export class SmartHomeManager { - private runtime: IAgentRuntime; - private api: SmartThingsApi; - - constructor(runtime: IAgentRuntime) { - this.runtime = runtime; - this.api = new SmartThingsApi(runtime); - } - - async handleCommand(command: string, userId: string): Promise { - try { - // First check if we should respond using the template - const shouldRespond = await this.runtime.llm.shouldRespond( - homeShouldRespondTemplate, - command - ); - - if (shouldRespond !== 'RESPOND') { - return null; - } - - // Parse the command using CommandParser - const parsedCommand = CommandParser.parseCommand(command); - const deviceCommand = CommandParser.mapToDeviceCommand( - parsedCommand.command, - parsedCommand.args - ); - - // Execute the command - const result = await this.executeCommand(deviceCommand); - - // Generate response using template - const response = await this.runtime.llm.complete( - homeMessageHandlerTemplate, - { - command, - result, - homeState: await this.getCurrentState() - } - ); - - return { - success: true, - message: response, - data: result - }; - - } catch (error) { - elizaLogger.error("Error handling smart home command:", error); - throw error; - } - } - - private async getCurrentState(): Promise { - try { - const devices = await this.api.devices.list(); - return devices - .map(device => `${device.name}: ${JSON.stringify(device.status)}`) - .join('\n'); - } catch (error) { - elizaLogger.error("Error getting current state:", error); - return "Unable to fetch current state"; - } - } - - private async executeCommand(deviceCommand: any): Promise { - try { - return await this.api.devices.executeCommand( - deviceCommand.deviceId, - { - capability: deviceCommand.capability, - command: deviceCommand.command, - arguments: deviceCommand.arguments - } - ); - } catch (error) { - elizaLogger.error("Error executing smart home command:", error); - throw error; - } - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/state.ts b/packages/client-eliza-home/src/state.ts deleted file mode 100644 index c120cd08564ea..0000000000000 --- a/packages/client-eliza-home/src/state.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { IAgentRuntime, Provider } from "@elizaos/core"; -import { Entity } from "./entities.ts"; - -export class StateManager { - private runtime: IAgentRuntime; - private states: Map; - - constructor(runtime: IAgentRuntime) { - this.runtime = runtime; - this.states = new Map(); - } - - async updateState(entityId: string, state: any): Promise { - this.states.set(entityId, state); - } - - getState(entityId: string): any { - return this.states.get(entityId); - } - - getAllStates(): Map { - return this.states; - } - - getProvider(): Provider { - return { - name: "home-assistant-state", - get: async () => { - const states = Array.from(this.states.entries()) - .map(([entityId, state]) => `${entityId}: ${JSON.stringify(state)}`) - .join('\n'); - return `Current Home Assistant States:\n${states}`; - } - }; - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/templates.ts b/packages/client-eliza-home/src/templates.ts deleted file mode 100644 index 84c34ddf9db44..0000000000000 --- a/packages/client-eliza-home/src/templates.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { messageCompletionFooter, shouldRespondFooter } from "@elizaos/core"; - -export const homeShouldRespondTemplate = ` -# Task: Decide if the assistant should respond to home automation requests. - -# Current home state: -{{homeState}} - -# Recent message: -{{message}} - -# Instructions: Determine if the assistant should respond to the message and control home devices. -Response options are [RESPOND], [IGNORE] and [STOP]. - -The assistant should: -- Respond with [RESPOND] to direct home automation requests (e.g., "turn on the lights") -- Respond with [RESPOND] to questions about device states (e.g., "are the lights on?") -- Respond with [IGNORE] to unrelated messages -- Respond with [STOP] if asked to stop controlling devices - -Choose the option that best describes how the assistant should respond to the message:`; - -export const homeMessageHandlerTemplate = ` -# Task: Generate a response for a home automation request. - -# Current home state: -{{homeState}} - -# User command: -{{command}} - -# Command result: -{{result}} - -# Instructions: Write a natural response that confirms the action taken and its result. -The response should be friendly and conversational while clearly indicating what was done. - -Response:`; \ No newline at end of file diff --git a/packages/client-eliza-home/src/types/smart_things.ts b/packages/client-eliza-home/src/types/smart_things.ts deleted file mode 100644 index 0a6f3c2790bdf..0000000000000 --- a/packages/client-eliza-home/src/types/smart_things.ts +++ /dev/null @@ -1,55 +0,0 @@ -export interface SmartThingsDevice { - deviceId: string; - name: string; - label?: string; - roomId?: string; - capabilities: Array<{ - id: string; - version: number; - }>; - status: DeviceStatus; -} - -export interface DeviceStatus { - switch?: { - value: 'on' | 'off'; - }; - level?: { - value: number; - }; - temperature?: { - value: number; - unit: string; - }; - motion?: { - value: 'active' | 'inactive'; - }; - contact?: { - value: 'open' | 'closed'; - }; - presence?: { - value: 'present' | 'not present'; - }; - battery?: { - value: number; - }; -} - -export interface SmartThingsRoom { - roomId: string; - name: string; - locationId: string; -} - -export interface SmartThingsScene { - sceneId: string; - sceneName: string; - locationId: string; - lastExecutedDate: string; -} - -export interface DeviceCommand { - capability: string; - command: string; - arguments?: any[]; -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/utils.ts b/packages/client-eliza-home/src/utils.ts deleted file mode 100644 index 4e7f90301286e..0000000000000 --- a/packages/client-eliza-home/src/utils.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; - -export async function retryWithBackoff( - operation: () => Promise, - maxRetries: number = 3, - baseDelay: number = 1000 -): Promise { - let lastError: Error; - - for (let i = 0; i < maxRetries; i++) { - try { - return await operation(); - } catch (error) { - lastError = error; - const delay = baseDelay * Math.pow(2, i); - elizaLogger.warn(`Operation failed, retrying in ${delay}ms...`, error); - await new Promise(resolve => setTimeout(resolve, delay)); - } - } - - throw lastError!; -} - -export function parseEntityId(entityId: string): { domain: string; name: string } { - const [domain, ...rest] = entityId.split('.'); - return { - domain, - name: rest.join('.'), - }; -} - -export function formatResponse(success: boolean, message: string, data?: any) { - return { - success, - message, - data, - timestamp: new Date().toISOString(), - }; -} \ No newline at end of file diff --git a/packages/client-eliza-home/src/utils/command_parser.ts b/packages/client-eliza-home/src/utils/command_parser.ts deleted file mode 100644 index c0e97a5572612..0000000000000 --- a/packages/client-eliza-home/src/utils/command_parser.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { DeviceCommand } from '../types/smart_things'; - -export class CommandParser { - private static readonly COMMAND_PATTERNS = { - turnOn: /turn on|switch on|enable/i, - turnOff: /turn off|switch off|disable/i, - setBrightness: /set brightness to (\d+)|dim to (\d+)|brighten to (\d+)/i, - setTemperature: /set temperature to (\d+)|change temp to (\d+)/i, - setColor: /change color to (\w+)|set color to (\w+)/i, - lock: /lock|secure/i, - unlock: /unlock|unsecure/i, - open: /open|raise/i, - close: /close|lower/i, - }; - - static parseCommand(text: string): { command: string; args?: any } { - for (const [action, pattern] of Object.entries(this.COMMAND_PATTERNS)) { - const match = text.match(pattern); - if (match) { - const args = match.slice(1).find(arg => arg !== undefined); - return { - command: action, - args: args ? { value: args } : undefined - }; - } - } - throw new Error('Unable to parse command'); - } - - static mapToDeviceCommand(command: string, args?: any): DeviceCommand { - switch (command) { - case 'turnOn': - return { capability: 'switch', command: 'on' }; - case 'turnOff': - return { capability: 'switch', command: 'off' }; - case 'setBrightness': - return { - capability: 'switchLevel', - command: 'setLevel', - arguments: [parseInt(args.value)] - }; - case 'setTemperature': - return { - capability: 'thermostat', - command: 'setTemperature', - arguments: [parseInt(args.value)] - }; - case 'setColor': - return { - capability: 'colorControl', - command: 'setColor', - arguments: [{ hex: args.value }] - }; - default: - throw new Error(`Unknown command: ${command}`); - } - } -} \ No newline at end of file diff --git a/packages/client-eliza-home/tsconfig.json b/packages/client-eliza-home/tsconfig.json deleted file mode 100644 index d946efe36dc72..0000000000000 --- a/packages/client-eliza-home/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-eliza-home/tsup.config.ts b/packages/client-eliza-home/tsup.config.ts deleted file mode 100644 index 028c8aa241783..0000000000000 --- a/packages/client-eliza-home/tsup.config.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: [ - "dotenv", - "fs", - "path", - ], -}); \ No newline at end of file diff --git a/packages/client-eliza-home/vitest.config.ts b/packages/client-eliza-home/vitest.config.ts deleted file mode 100644 index 1c6aad2fca419..0000000000000 --- a/packages/client-eliza-home/vitest.config.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - coverage: { - provider: 'v8', - reporter: ['text', 'json', 'html'], - }, - }, -}); diff --git a/packages/client-farcaster/__tests__/cast.test.ts b/packages/client-farcaster/__tests__/cast.test.ts deleted file mode 100644 index 2082a46c9927d..0000000000000 --- a/packages/client-farcaster/__tests__/cast.test.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { createTestCast } from './test-utils'; -import { FarcasterClient } from '../src/client'; -import { NeynarAPIClient } from '@neynar/nodejs-sdk'; - -// Mock dependencies -vi.mock('@neynar/nodejs-sdk', () => ({ - NeynarAPIClient: vi.fn().mockImplementation(() => ({ - publishCast: vi.fn().mockResolvedValue({ - success: true, - cast: { - hash: 'cast-1', - author: { fid: '123' }, - text: 'Test cast', - timestamp: '2025-01-20T20:00:00Z' - } - }), - fetchBulkUsers: vi.fn().mockResolvedValue({ - users: [{ - fid: '123', - username: 'test.farcaster', - display_name: 'Test User', - pfp: { - url: 'https://example.com/pic.jpg' - } - }] - }) - })) -})); - -describe('Cast Functions', () => { - let client: FarcasterClient; - - beforeEach(() => { - vi.clearAllMocks(); - client = new FarcasterClient({ - runtime: { - name: 'test-runtime', - memory: new Map(), - getMemory: vi.fn(), - setMemory: vi.fn(), - clearMemory: vi.fn() - }, - url: 'https://api.example.com', - ssl: true, - neynar: new NeynarAPIClient({ apiKey: 'test-key' }), - signerUuid: 'test-signer', - cache: new Map(), - farcasterConfig: { - apiKey: 'test-key', - signerUuid: 'test-signer' - } - }); - }); - - describe('createTestCast', () => { - it('should create a cast successfully', async () => { - const content = 'Test cast content'; - const result = await createTestCast(client, content); - - expect(result).toBeDefined(); - expect(result.success).toBe(true); - expect(result.cast.text).toBe(content); - expect(client.neynar.publishCast).toHaveBeenCalledWith({ - text: content, - signerUuid: 'test-signer' - }); - }); - - it('should handle cast creation errors', async () => { - const content = 'Test cast content'; - vi.mocked(client.neynar.publishCast).mockRejectedValueOnce(new Error('Cast creation failed')); - await expect(createTestCast(client, content)).rejects.toThrow('Cast creation failed'); - }); - - it('should handle empty content', async () => { - const content = ''; - await expect(createTestCast(client, content)).rejects.toThrow('Cast content cannot be empty'); - }); - - it('should handle very long content', async () => { - const content = 'a'.repeat(321); // Farcaster limit is 320 characters - await expect(createTestCast(client, content)).rejects.toThrow('Cast content too long'); - }); - }); -}); diff --git a/packages/client-farcaster/__tests__/client.test.ts b/packages/client-farcaster/__tests__/client.test.ts deleted file mode 100644 index 7735118010553..0000000000000 --- a/packages/client-farcaster/__tests__/client.test.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { FarcasterClient } from '../src/client'; -import { NeynarAPIClient } from '@neynar/nodejs-sdk'; - -// Mock dependencies -vi.mock('@neynar/nodejs-sdk', () => ({ - NeynarAPIClient: vi.fn().mockImplementation(() => ({ - publishCast: vi.fn().mockResolvedValue({ - success: true, - cast: { - hash: 'cast-1', - author: { fid: '123' }, - text: 'Test cast', - timestamp: '2025-01-20T20:00:00Z' - } - }), - fetchBulkUsers: vi.fn().mockResolvedValue({ - users: [{ - fid: '123', - username: 'test.farcaster', - display_name: 'Test User', - pfp: { - url: 'https://example.com/pic.jpg' - } - }] - }), - fetchCastsForUser: vi.fn().mockResolvedValue({ - casts: [ - { - hash: 'cast-1', - author: { - fid: '123', - username: 'test.farcaster', - display_name: 'Test User' - }, - text: 'Test cast', - timestamp: '2025-01-20T20:00:00Z' - } - ] - }) - })) -})); - -describe('FarcasterClient', () => { - let client: FarcasterClient; - const mockRuntime = { - name: 'test-runtime', - memory: new Map(), - getMemory: vi.fn(), - setMemory: vi.fn(), - clearMemory: vi.fn() - }; - - beforeEach(() => { - vi.clearAllMocks(); - client = new FarcasterClient({ - runtime: mockRuntime, - url: 'https://api.example.com', - ssl: true, - neynar: new NeynarAPIClient({ apiKey: 'test-key' }), - signerUuid: 'test-signer', - cache: new Map(), - farcasterConfig: { - apiKey: 'test-key', - signerUuid: 'test-signer' - } - }); - }); - - describe('loadCastFromNeynarResponse', () => { - it('should load cast from Neynar response', async () => { - const neynarResponse = { - hash: 'cast-1', - author: { fid: '123' }, - text: 'Test cast', - timestamp: '2025-01-20T20:00:00Z' - }; - - const cast = await client.loadCastFromNeynarResponse(neynarResponse); - expect(cast).toBeDefined(); - expect(cast.hash).toBe('cast-1'); - expect(cast.authorFid).toBe('123'); - expect(cast.text).toBe('Test cast'); - expect(cast.profile).toBeDefined(); - expect(cast.profile.fid).toBe('123'); - expect(cast.profile.username).toBe('test.farcaster'); - }); - - it('should handle cast with parent', async () => { - const neynarResponse = { - hash: 'cast-2', - author: { fid: '123' }, - text: 'Reply cast', - parent_hash: 'cast-1', - parent_author: { fid: '456' }, - timestamp: '2025-01-20T20:00:00Z' - }; - - const cast = await client.loadCastFromNeynarResponse(neynarResponse); - expect(cast.inReplyTo).toBeDefined(); - expect(cast.inReplyTo?.hash).toBe('cast-1'); - expect(cast.inReplyTo?.fid).toBe('456'); - }); - }); - - describe('getProfile', () => { - it('should fetch profile successfully', async () => { - const profile = await client.getProfile('123'); - expect(profile).toBeDefined(); - expect(profile.fid).toBe('123'); - expect(profile.username).toBe('test.farcaster'); - expect(profile.name).toBe('Test User'); - }); - - it('should handle profile fetch errors', async () => { - vi.mocked(client.neynar.fetchBulkUsers).mockRejectedValueOnce(new Error('Profile fetch failed')); - await expect(client.getProfile('123')).rejects.toThrow('Profile fetch failed'); - }); - }); - - describe('getCastsByFid', () => { - it('should fetch casts successfully', async () => { - const casts = await client.getCastsByFid({ fid: '123', pageSize: 10 }); - expect(casts).toHaveLength(1); - expect(casts[0].hash).toBe('cast-1'); - expect(casts[0].authorFid).toBe('123'); - expect(casts[0].text).toBe('Test cast'); - }); - - it('should handle cast fetch errors', async () => { - vi.mocked(client.neynar.fetchCastsForUser).mockRejectedValueOnce(new Error('Cast fetch failed')); - await expect(client.getCastsByFid({ fid: '123', pageSize: 10 })).rejects.toThrow('Cast fetch failed'); - }); - }); -}); diff --git a/packages/client-farcaster/__tests__/interactions.test.ts b/packages/client-farcaster/__tests__/interactions.test.ts deleted file mode 100644 index 5227787eff660..0000000000000 --- a/packages/client-farcaster/__tests__/interactions.test.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { createTestInteraction, handleTestInteraction } from './test-utils'; -import { FarcasterClient } from '../src/client'; -import { NeynarAPIClient } from '@neynar/nodejs-sdk'; -import type { Cast, Profile } from '../src/types'; - -// Mock dependencies -vi.mock('@neynar/nodejs-sdk', () => ({ - NeynarAPIClient: vi.fn().mockImplementation(() => ({ - publishCast: vi.fn().mockImplementation(({ text, parent }) => { - if (parent) { - return Promise.resolve({ - success: true, - cast: { - hash: 'interaction-1', - author: { fid: '123' }, - text: text || 'Interaction', - parent_hash: parent, - timestamp: '2025-01-20T20:00:00Z' - } - }); - } - return Promise.resolve({ - success: true, - cast: { - hash: 'cast-1', - author: { fid: '123' }, - text: text, - timestamp: '2025-01-20T20:00:00Z' - } - }); - }), - fetchBulkUsers: vi.fn().mockResolvedValue({ - users: [{ - fid: '123', - username: 'test.farcaster', - display_name: 'Test User', - pfp: { - url: 'https://example.com/pic.jpg' - } - }] - }) - })) -})); - -describe('Interactions', () => { - const mockCast: Cast = { - hash: 'cast-1', - authorFid: '123', - text: 'Test cast', - timestamp: new Date('2025-01-20T20:00:00Z'), - profile: { - fid: '123', - username: 'test.farcaster', - name: 'Test User', - pfp: 'https://example.com/pic.jpg' - }, - stats: { - recasts: 5, - replies: 3, - likes: 10 - } - }; - - const mockProfile: Profile = { - fid: '456', - username: 'other.farcaster', - name: 'Other User', - pfp: 'https://example.com/other-pic.jpg' - }; - - describe('createTestInteraction', () => { - it('should create recast interaction when conditions are met', () => { - const interaction = createTestInteraction(mockCast, mockProfile); - expect(interaction).toBeDefined(); - if (interaction) { - expect(['RECAST', 'REPLY', 'LIKE']).toContain(interaction.type); - } - }); - - it('should return null when no interaction is needed', () => { - const lowStatsCast = { - ...mockCast, - stats: { - recasts: 0, - replies: 0, - likes: 0 - } - }; - const interaction = createTestInteraction(lowStatsCast, mockProfile); - expect(interaction).toBeNull(); - }); - }); - - describe('handleTestInteraction', () => { - let client: FarcasterClient; - - beforeEach(() => { - vi.clearAllMocks(); - client = new FarcasterClient({ - runtime: { - name: 'test-runtime', - memory: new Map(), - getMemory: vi.fn(), - setMemory: vi.fn(), - clearMemory: vi.fn() - }, - url: 'https://api.example.com', - ssl: true, - neynar: new NeynarAPIClient({ apiKey: 'test-key' }), - signerUuid: 'test-signer', - cache: new Map(), - farcasterConfig: { - apiKey: 'test-key', - signerUuid: 'test-signer' - } - }); - }); - - it('should handle recast interaction successfully', async () => { - const interaction = { - type: 'RECAST' as const, - castId: 'cast-1' - }; - - const result = await handleTestInteraction(client, interaction); - expect(result).toBeDefined(); - expect(result.success).toBe(true); - expect(result.cast.parent_hash).toBe('cast-1'); - expect(client.neynar.publishCast).toHaveBeenCalledWith({ - text: '', - parent: 'cast-1', - signerUuid: 'test-signer' - }); - }); - - it('should handle reply interaction successfully', async () => { - const interaction = { - type: 'REPLY' as const, - castId: 'cast-1', - content: 'Test reply' - }; - - const result = await handleTestInteraction(client, interaction); - expect(result).toBeDefined(); - expect(result.success).toBe(true); - expect(result.cast.parent_hash).toBe('cast-1'); - expect(result.cast.text).toBe('Test reply'); - expect(client.neynar.publishCast).toHaveBeenCalledWith({ - text: 'Test reply', - parent: 'cast-1', - signerUuid: 'test-signer' - }); - }); - - it('should handle like interaction successfully', async () => { - const interaction = { - type: 'LIKE' as const, - castId: 'cast-1' - }; - - const result = await handleTestInteraction(client, interaction); - expect(result).toBeDefined(); - expect(result.success).toBe(true); - expect(result.cast.parent_hash).toBe('cast-1'); - expect(client.neynar.publishCast).toHaveBeenCalledWith({ - text: '', - parent: 'cast-1', - signerUuid: 'test-signer' - }); - }); - - it('should handle interaction errors', async () => { - const interaction = { - type: 'RECAST' as const, - castId: 'cast-1' - }; - - vi.mocked(client.neynar.publishCast).mockRejectedValueOnce(new Error('Interaction failed')); - await expect(handleTestInteraction(client, interaction)).rejects.toThrow('Interaction failed'); - }); - }); -}); diff --git a/packages/client-farcaster/__tests__/test-utils.ts b/packages/client-farcaster/__tests__/test-utils.ts deleted file mode 100644 index 7ee0541406239..0000000000000 --- a/packages/client-farcaster/__tests__/test-utils.ts +++ /dev/null @@ -1,59 +0,0 @@ -import type { FarcasterClient } from '../src/client'; -import type { Cast, Profile } from '../src/types'; - -export interface TestInteraction { - type: 'RECAST' | 'REPLY' | 'LIKE'; - castId?: string; - content?: string; -} - -export function createTestInteraction(cast: Cast, profile: Profile): TestInteraction | null { - const stats = cast.stats; - - // Simple heuristic: if the cast has good engagement, recast it - if (stats.recasts > 3 || stats.replies > 2 || stats.likes > 5) { - return { - type: 'RECAST', - castId: cast.hash - }; - } - - // If the cast is engaging but not viral, reply to it - if (stats.replies > 0 || stats.likes > 2) { - return { - type: 'REPLY', - castId: cast.hash, - content: 'Interesting perspective!' - }; - } - - return null; -} - -export async function handleTestInteraction(client: FarcasterClient, interaction: TestInteraction) { - switch (interaction.type) { - case 'RECAST': - if (!interaction.castId) throw new Error('Cast ID required for recast'); - return await client.publishCast('', { hash: interaction.castId }); - case 'REPLY': - if (!interaction.castId || !interaction.content) { - throw new Error('Cast ID and content required for reply'); - } - return await client.publishCast(interaction.content, { hash: interaction.castId }); - case 'LIKE': - if (!interaction.castId) throw new Error('Cast ID required for like'); - return await client.publishCast('', { hash: interaction.castId }); - default: - throw new Error('Unknown interaction type'); - } -} - -export async function createTestCast(client: FarcasterClient, content: string) { - if (!content) { - throw new Error('Cast content cannot be empty'); - } - if (content.length > 320) { - throw new Error('Cast content too long'); - } - return await client.publishCast(content, undefined); -} diff --git a/packages/client-farcaster/eslint.config.mjs b/packages/client-farcaster/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/client-farcaster/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/client-farcaster/package.json b/packages/client-farcaster/package.json deleted file mode 100644 index e42b50a6fc29e..0000000000000 --- a/packages/client-farcaster/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@elizaos/client-farcaster", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@neynar/nodejs-sdk": "^2.0.3" - }, - "devDependencies": { - "tsup": "^8.3.5", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint --fix --cache .", - "test": "vitest run", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage" - } -} diff --git a/packages/client-farcaster/src/actions.ts b/packages/client-farcaster/src/actions.ts deleted file mode 100644 index a70318dc1824c..0000000000000 --- a/packages/client-farcaster/src/actions.ts +++ /dev/null @@ -1,58 +0,0 @@ -import type { FarcasterClient } from "./client"; -import type { Content, IAgentRuntime, Memory, UUID } from "@elizaos/core"; -import type { Cast, CastId, Profile } from "./types"; -import { createCastMemory } from "./memory"; -import { splitPostContent } from "./utils"; - -export async function sendCast({ - client, - runtime, - content, - roomId, - inReplyTo, - profile, -}: { - profile: Profile; - client: FarcasterClient; - runtime: IAgentRuntime; - content: Content; - roomId: UUID; - signerUuid: string; - inReplyTo?: CastId; -}): Promise<{ memory: Memory; cast: Cast }[]> { - const chunks = splitPostContent(content.text); - const sent: Cast[] = []; - let parentCastId = inReplyTo; - - for (const chunk of chunks) { - const neynarCast = await client.publishCast(chunk, parentCastId); - - if (neynarCast) { - const cast: Cast = { - hash: neynarCast.hash, - authorFid: neynarCast.authorFid, - text: neynarCast.text, - profile, - inReplyTo: parentCastId, - timestamp: new Date(), - }; - - sent.push(cast!); - - parentCastId = { - fid: neynarCast.authorFid!, - hash: neynarCast.hash!, - }; - } - } - - return sent.map((cast) => ({ - cast, - memory: createCastMemory({ - roomId, - senderId: runtime.agentId, - runtime, - cast, - }), - })); -} diff --git a/packages/client-farcaster/src/client.ts b/packages/client-farcaster/src/client.ts deleted file mode 100644 index adc4cda9db14a..0000000000000 --- a/packages/client-farcaster/src/client.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { type NeynarAPIClient, isApiErrorResponse } from "@neynar/nodejs-sdk"; -import type { NeynarCastResponse, Cast, Profile, FidRequest, CastId } from "./types"; -import type { FarcasterConfig } from "./environment"; - -export class FarcasterClient { - runtime: IAgentRuntime; - neynar: NeynarAPIClient; - signerUuid: string; - cache: Map; - lastInteractionTimestamp: Date; - farcasterConfig: FarcasterConfig; - - constructor(opts: { - runtime: IAgentRuntime; - url: string; - ssl: boolean; - neynar: NeynarAPIClient; - signerUuid: string; - cache: Map; - farcasterConfig: FarcasterConfig; - }) { - this.cache = opts.cache; - this.runtime = opts.runtime; - this.neynar = opts.neynar; - this.signerUuid = opts.signerUuid; - this.lastInteractionTimestamp = new Date(); - this.farcasterConfig = opts.farcasterConfig; - } - - async loadCastFromNeynarResponse(neynarResponse: any): Promise { - const profile = await this.getProfile(neynarResponse.author.fid); - return { - hash: neynarResponse.hash, - authorFid: neynarResponse.author.fid, - text: neynarResponse.text, - profile, - ...(neynarResponse.parent_hash - ? { - inReplyTo: { - hash: neynarResponse.parent_hash, - fid: neynarResponse.parent_author.fid, - }, - } - : {}), - timestamp: new Date(neynarResponse.timestamp), - }; - } - - async publishCast( - cast: string, - parentCastId: CastId | undefined, - // eslint-disable-next-line - retryTimes?: number - ): Promise { - try { - const result = await this.neynar.publishCast({ - signerUuid: this.signerUuid, - text: cast, - parent: parentCastId?.hash, - }); - if (result.success) { - return { - hash: result.cast.hash, - authorFid: result.cast.author.fid, - text: result.cast.text, - }; - } - } catch (err) { - if (isApiErrorResponse(err)) { - elizaLogger.error("Neynar error: ", err.response.data); - throw err.response.data; - } else { - elizaLogger.error("Error: ", err); - throw err; - } - } - } - - async getCast(castHash: string): Promise { - if (this.cache.has(`farcaster/cast/${castHash}`)) { - return this.cache.get(`farcaster/cast/${castHash}`); - } - - const response = await this.neynar.lookupCastByHashOrWarpcastUrl({ - identifier: castHash, - type: "hash", - }); - const cast = { - hash: response.cast.hash, - authorFid: response.cast.author.fid, - text: response.cast.text, - profile: { - fid: response.cast.author.fid, - name: response.cast.author.display_name || "anon", - username: response.cast.author.username, - }, - ...(response.cast.parent_hash - ? { - inReplyTo: { - hash: response.cast.parent_hash, - fid: response.cast.parent_author.fid, - }, - } - : {}), - timestamp: new Date(response.cast.timestamp), - }; - - this.cache.set(`farcaster/cast/${castHash}`, cast); - - return cast; - } - - async getCastsByFid(request: FidRequest): Promise { - const timeline: Cast[] = []; - - const response = await this.neynar.fetchCastsForUser({ - fid: request.fid, - limit: request.pageSize, - }); - response.casts.map((cast) => { - this.cache.set(`farcaster/cast/${cast.hash}`, cast); - timeline.push({ - hash: cast.hash, - authorFid: cast.author.fid, - text: cast.text, - profile: { - fid: cast.author.fid, - name: cast.author.display_name || "anon", - username: cast.author.username, - }, - timestamp: new Date(cast.timestamp), - }); - }); - - return timeline; - } - - async getMentions(request: FidRequest): Promise { - const neynarMentionsResponse = await this.neynar.fetchAllNotifications({ - fid: request.fid, - type: ["mentions", "replies"], - }); - const mentions: Cast[] = []; - - neynarMentionsResponse.notifications.map((notification) => { - const cast = { - hash: notification.cast!.hash, - authorFid: notification.cast!.author.fid, - text: notification.cast!.text, - profile: { - fid: notification.cast!.author.fid, - name: notification.cast!.author.display_name || "anon", - username: notification.cast!.author.username, - }, - ...(notification.cast!.parent_hash - ? { - inReplyTo: { - hash: notification.cast!.parent_hash, - fid: notification.cast!.parent_author.fid, - }, - } - : {}), - timestamp: new Date(notification.cast!.timestamp), - }; - mentions.push(cast); - this.cache.set(`farcaster/cast/${cast.hash}`, cast); - }); - - return mentions; - } - - async getProfile(fid: number): Promise { - if (this.cache.has(`farcaster/profile/${fid}`)) { - return this.cache.get(`farcaster/profile/${fid}`) as Profile; - } - - const result = await this.neynar.fetchBulkUsers({ fids: [fid] }); - if (!result.users || result.users.length < 1) { - elizaLogger.error("Error fetching user by fid"); - - throw "getProfile ERROR"; - } - - const neynarUserProfile = result.users[0]; - - const profile: Profile = { - fid, - name: "", - username: "", - }; - - /* - const userDataBodyType = { - 1: "pfp", - 2: "name", - 3: "bio", - 5: "url", - 6: "username", - // 7: "location", - // 8: "twitter", - // 9: "github", - } as const; - */ - - profile.name = neynarUserProfile.display_name!; - profile.username = neynarUserProfile.username; - profile.bio = neynarUserProfile.profile.bio.text; - profile.pfp = neynarUserProfile.pfp_url; - - this.cache.set(`farcaster/profile/${fid}`, profile); - - return profile; - } - - async getTimeline(request: FidRequest): Promise<{ - timeline: Cast[]; - nextPageToken?: Uint8Array | undefined; - }> { - const timeline: Cast[] = []; - - const results = await this.getCastsByFid(request); - - for (const cast of results) { - this.cache.set(`farcaster/cast/${cast.hash}`, cast); - timeline.push(cast); - } - - return { - timeline, - //TODO implement paging - //nextPageToken: results.nextPageToken, - }; - } -} diff --git a/packages/client-farcaster/src/environment.ts b/packages/client-farcaster/src/environment.ts deleted file mode 100644 index 8276f65d31ad3..0000000000000 --- a/packages/client-farcaster/src/environment.ts +++ /dev/null @@ -1,140 +0,0 @@ -import { - parseBooleanFromText, - type IAgentRuntime, - ActionTimelineType, -} from "@elizaos/core"; -import { z, ZodError } from "zod"; - -export const DEFAULT_MAX_CAST_LENGTH = 320; -const DEFAULT_POLL_INTERVAL= 120; // 2 minutes -const DEFAULT_POST_INTERVAL_MIN = 90; // 1.5 hours -const DEFAULT_POST_INTERVAL_MAX = 180; // 3 hours -/** - * This schema defines all required/optional environment settings for Farcaster client - */ -export const farcasterEnvSchema = z.object({ - FARCASTER_DRY_RUN: z.boolean(), - FARCASTER_FID: z.number().int().min(1, "Farcaster fid is required"), - MAX_CAST_LENGTH: z.number().int().default(DEFAULT_MAX_CAST_LENGTH), - FARCASTER_POLL_INTERVAL: z.number().int().default(DEFAULT_POLL_INTERVAL), - ENABLE_POST: z.boolean(), - POST_INTERVAL_MIN: z.number().int(), - POST_INTERVAL_MAX: z.number().int(), - ENABLE_ACTION_PROCESSING: z.boolean(), - ACTION_INTERVAL: z.number().int(), - POST_IMMEDIATELY: z.boolean(), - MAX_ACTIONS_PROCESSING: z.number().int(), - ACTION_TIMELINE_TYPE: z - .nativeEnum(ActionTimelineType) - .default(ActionTimelineType.ForYou), -}); - -export type FarcasterConfig = z.infer; - -function safeParseInt( - value: string | undefined | null, - defaultValue: number -): number { - if (!value) return defaultValue; - const parsed = Number.parseInt(value, 10); - return Number.isNaN(parsed) ? defaultValue : Math.max(1, parsed); -} - -/** - * Validates or constructs a FarcasterConfig object using zod, - * taking values from the IAgentRuntime or process.env as needed. - */ -export async function validateFarcasterConfig( - runtime: IAgentRuntime -): Promise { - try { - const farcasterConfig = { - FARCASTER_DRY_RUN: - parseBooleanFromText( - runtime.getSetting("FARCASTER_DRY_RUN") || - process.env.FARCASTER_DRY_RUN || - "false" - ), - - FARCASTER_FID: safeParseInt( - runtime.getSetting("FARCASTER_FID") || - process.env.FARCASTER_FID, - 0 - ), - - MAX_CAST_LENGTH: safeParseInt( - runtime.getSetting("MAX_CAST_LENGTH") || - process.env.MAX_CAST_LENGTH, - DEFAULT_MAX_CAST_LENGTH - ), - - FARCASTER_POLL_INTERVAL: safeParseInt( - runtime.getSetting("FARCASTER_POLL_INTERVAL") || - process.env.FARCASTER_POLL_INTERVAL, - DEFAULT_POLL_INTERVAL - ), - - ENABLE_POST: parseBooleanFromText( - runtime.getSetting("ENABLE_POST") || - process.env.ENABLE_POST || - "true" - ), - - POST_INTERVAL_MIN: safeParseInt( - runtime.getSetting("POST_INTERVAL_MIN") || - process.env.POST_INTERVAL_MIN, - DEFAULT_POST_INTERVAL_MIN - ), - - POST_INTERVAL_MAX: safeParseInt( - runtime.getSetting("POST_INTERVAL_MAX") || - process.env.POST_INTERVAL_MAX, - DEFAULT_POST_INTERVAL_MAX - ), - - ENABLE_ACTION_PROCESSING: - parseBooleanFromText( - runtime.getSetting("ENABLE_ACTION_PROCESSING") || - process.env.ENABLE_ACTION_PROCESSING || - "false" - ) ?? false, - - ACTION_INTERVAL: safeParseInt( - runtime.getSetting("ACTION_INTERVAL") || - process.env.ACTION_INTERVAL, - 5 // 5 minutes - ), - - POST_IMMEDIATELY: - parseBooleanFromText( - runtime.getSetting("POST_IMMEDIATELY") || - process.env.POST_IMMEDIATELY || - "false" - ) ?? false, - - MAX_ACTIONS_PROCESSING: safeParseInt( - runtime.getSetting("MAX_ACTIONS_PROCESSING") || - process.env.MAX_ACTIONS_PROCESSING, - 1 - ), - - ACTION_TIMELINE_TYPE: ( - runtime.getSetting("ACTION_TIMELINE_TYPE") || - process.env.ACTION_TIMELINE_TYPE || - ActionTimelineType.ForYou - ) as ActionTimelineType, - }; - - return farcasterEnvSchema.parse(farcasterConfig); - } catch (error) { - if (error instanceof ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Farcaster configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-farcaster/src/index.ts b/packages/client-farcaster/src/index.ts deleted file mode 100644 index fbfad4d1962a3..0000000000000 --- a/packages/client-farcaster/src/index.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { type Client, type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { FarcasterClient } from "./client"; -import { FarcasterPostManager } from "./post"; -import { FarcasterInteractionManager } from "./interactions"; -import { Configuration, NeynarAPIClient } from "@neynar/nodejs-sdk"; -import { validateFarcasterConfig, type FarcasterConfig } from "./environment"; - -/** - * A manager that orchestrates all Farcaster operations: - * - client: base operations (Neynar client, hub connection, etc.) - * - posts: autonomous posting logic - * - interactions: handling mentions, replies, likes, etc. - */ -class FarcasterManager { - client: FarcasterClient; - posts: FarcasterPostManager; - interactions: FarcasterInteractionManager; - private signerUuid: string; - - constructor(runtime: IAgentRuntime, farcasterConfig: FarcasterConfig) { - const cache = new Map(); - this.signerUuid = runtime.getSetting("FARCASTER_NEYNAR_SIGNER_UUID")!; - - const neynarConfig = new Configuration({ - apiKey: runtime.getSetting("FARCASTER_NEYNAR_API_KEY")!, - }); - - const neynarClient = new NeynarAPIClient(neynarConfig); - - this.client = new FarcasterClient({ - runtime, - ssl: true, - url: runtime.getSetting("FARCASTER_HUB_URL") ?? "hub.pinata.cloud", - neynar: neynarClient, - signerUuid: this.signerUuid, - cache, - farcasterConfig, - }); - - elizaLogger.success("Farcaster Neynar client initialized."); - - this.posts = new FarcasterPostManager( - this.client, - runtime, - this.signerUuid, - cache - ); - - this.interactions = new FarcasterInteractionManager( - this.client, - runtime, - this.signerUuid, - cache - ); - } - - async start() { - await Promise.all([this.posts.start(), this.interactions.start()]); - } - - async stop() { - await Promise.all([this.posts.stop(), this.interactions.stop()]); - } -} - -export const FarcasterClientInterface: Client = { - async start(runtime: IAgentRuntime) { - const farcasterConfig = await validateFarcasterConfig(runtime); - - elizaLogger.log("Farcaster client started"); - - const manager = new FarcasterManager(runtime, farcasterConfig); - - // Start all services - await manager.start(); - runtime.clients.farcaster = manager; - return manager; - }, - - async stop(runtime: IAgentRuntime) { - try { - // stop it - elizaLogger.log("Stopping farcaster client", runtime.agentId); - if (runtime.clients.farcaster) { - await runtime.clients.farcaster.stop(); - } - } catch (e) { - elizaLogger.error("client-farcaster interface stop error", e); - } - }, -}; - -export default FarcasterClientInterface; diff --git a/packages/client-farcaster/src/interactions.ts b/packages/client-farcaster/src/interactions.ts deleted file mode 100644 index 685fb0fa861cd..0000000000000 --- a/packages/client-farcaster/src/interactions.ts +++ /dev/null @@ -1,291 +0,0 @@ -import { - composeContext, - generateMessageResponse, - generateShouldRespond, - type Memory, - ModelClass, - stringToUuid, - elizaLogger, - type HandlerCallback, - type Content, - type IAgentRuntime, -} from "@elizaos/core"; -import type { FarcasterClient } from "./client"; -import { toHex } from "viem"; -import { buildConversationThread, createCastMemory } from "./memory"; -import type { Cast, Profile } from "./types"; -import { - formatCast, - formatTimeline, - messageHandlerTemplate, - shouldRespondTemplate, -} from "./prompts"; -import { castUuid } from "./utils"; -import { sendCast } from "./actions"; - -export class FarcasterInteractionManager { - private timeout: NodeJS.Timeout | undefined; - constructor( - public client: FarcasterClient, - public runtime: IAgentRuntime, - private signerUuid: string, - public cache: Map - ) {} - - public async start() { - const handleInteractionsLoop = async () => { - try { - await this.handleInteractions(); - } catch (error) { - elizaLogger.error(error); - } - - // Always set up next check, even if there was an error - this.timeout = setTimeout( - handleInteractionsLoop, - Number(this.client.farcasterConfig?.FARCASTER_POLL_INTERVAL ?? 120) * - 1000 // Default to 2 minutes - ); - }; - - handleInteractionsLoop(); - } - - public async stop() { - if (this.timeout) clearTimeout(this.timeout); - } - - private async handleInteractions() { - const agentFid = this.client.farcasterConfig?.FARCASTER_FID ?? 0; - if (!agentFid) { - elizaLogger.info("No FID found, skipping interactions"); - return; - } - - const mentions = await this.client.getMentions({ - fid: agentFid, - pageSize: 10, - }); - - const agent = await this.client.getProfile(agentFid); - for (const mention of mentions) { - const messageHash = toHex(mention.hash); - const conversationId = `${messageHash}-${this.runtime.agentId}`; - const roomId = stringToUuid(conversationId); - const userId = stringToUuid(mention.authorFid.toString()); - - const pastMemoryId = castUuid({ - agentId: this.runtime.agentId, - hash: mention.hash, - }); - - const pastMemory = - await this.runtime.messageManager.getMemoryById(pastMemoryId); - - if (pastMemory) { - continue; - } - - await this.runtime.ensureConnection( - userId, - roomId, - mention.profile.username, - mention.profile.name, - "farcaster" - ); - - const thread = await buildConversationThread({ - client: this.client, - runtime: this.runtime, - cast: mention, - }); - - const memory: Memory = { - content: { text: mention.text }, - agentId: this.runtime.agentId, - userId, - roomId, - }; - - await this.handleCast({ - agent, - cast: mention, - memory, - thread, - }); - } - - this.client.lastInteractionTimestamp = new Date(); - } - - private async handleCast({ - agent, - cast, - memory, - thread, - }: { - agent: Profile; - cast: Cast; - memory: Memory; - thread: Cast[]; - }) { - if (cast.profile.fid === agent.fid) { - elizaLogger.info("skipping cast from bot itself", cast.hash); - return; - } - - if (!memory.content.text) { - elizaLogger.info("skipping cast with no text", cast.hash); - return { text: "", action: "IGNORE" }; - } - - const currentPost = formatCast(cast); - - const senderId = stringToUuid(cast.authorFid.toString()); - - const { timeline } = await this.client.getTimeline({ - fid: agent.fid, - pageSize: 10, - }); - - const formattedTimeline = formatTimeline( - this.runtime.character, - timeline - ); - - const formattedConversation = thread - .map( - (cast) => `@${cast.profile.username} (${new Date( - cast.timestamp - ).toLocaleString("en-US", { - hour: "2-digit", - minute: "2-digit", - month: "short", - day: "numeric", - })}): - ${cast.text}` - ) - .join("\n\n"); - - const state = await this.runtime.composeState(memory, { - farcasterUsername: agent.username, - timeline: formattedTimeline, - currentPost, - formattedConversation, - }); - - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates - ?.farcasterShouldRespondTemplate || - this.runtime.character?.templates?.shouldRespondTemplate || - shouldRespondTemplate, - }); - - const memoryId = castUuid({ - agentId: this.runtime.agentId, - hash: cast.hash, - }); - - const castMemory = - await this.runtime.messageManager.getMemoryById(memoryId); - - if (!castMemory) { - await this.runtime.messageManager.createMemory( - createCastMemory({ - roomId: memory.roomId, - senderId, - runtime: this.runtime, - cast, - }) - ); - } - - const shouldRespondResponse = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - if ( - shouldRespondResponse === "IGNORE" || - shouldRespondResponse === "STOP" - ) { - elizaLogger.info( - `Not responding to cast because generated ShouldRespond was ${shouldRespondResponse}` - ); - return; - } - - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.farcasterMessageHandlerTemplate ?? - this.runtime.character?.templates?.messageHandlerTemplate ?? - messageHandlerTemplate, - }); - - const responseContent = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - - responseContent.inReplyTo = memoryId; - - if (!responseContent.text) return; - - if (this.client.farcasterConfig?.FARCASTER_DRY_RUN) { - elizaLogger.info( - `Dry run: would have responded to cast ${cast.hash} with ${responseContent.text}` - ); - return; - } - - const callback: HandlerCallback = async ( - content: Content, - _files: any[] - ) => { - try { - if (memoryId && !content.inReplyTo) { - content.inReplyTo = memoryId; - } - const results = await sendCast({ - runtime: this.runtime, - client: this.client, - signerUuid: this.signerUuid, - profile: cast.profile, - content: content, - roomId: memory.roomId, - inReplyTo: { - fid: cast.authorFid, - hash: cast.hash, - }, - }); - // sendCast lost response action, so we need to add it back here - results[0].memory.content.action = content.action; - - for (const { memory } of results) { - await this.runtime.messageManager.createMemory(memory); - } - return results.map((result) => result.memory); - } catch (error) { - elizaLogger.error("Error sending response cast:", error); - return []; - } - }; - - const responseMessages = await callback(responseContent); - - const newState = await this.runtime.updateRecentMessageState(state); - - await this.runtime.processActions( - { ...memory, content: { ...memory.content, cast } }, - responseMessages, - newState, - callback - ); - } -} diff --git a/packages/client-farcaster/src/memory.ts b/packages/client-farcaster/src/memory.ts deleted file mode 100644 index ba77478d1fed9..0000000000000 --- a/packages/client-farcaster/src/memory.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { - elizaLogger, - getEmbeddingZeroVector, - type IAgentRuntime, - stringToUuid, - type Memory, - type UUID, -} from "@elizaos/core"; -import type { Cast } from "./types"; -import { toHex } from "viem"; -import { castUuid } from "./utils"; -import type { FarcasterClient } from "./client"; - -export function createCastMemory({ - roomId, - senderId, - runtime, - cast, -}: { - roomId: UUID; - senderId: UUID; - runtime: IAgentRuntime; - cast: Cast; -}): Memory { - const inReplyTo = cast.inReplyTo - ? castUuid({ - hash: toHex(cast.inReplyTo.hash), - agentId: runtime.agentId, - }) - : undefined; - - return { - id: castUuid({ - hash: cast.hash, - agentId: runtime.agentId, - }), - agentId: runtime.agentId, - userId: senderId, - content: { - text: cast.text, - source: "farcaster", - url: "", - inReplyTo, - hash: cast.hash, - }, - roomId, - embedding: getEmbeddingZeroVector(), - }; -} - -export async function buildConversationThread({ - cast, - runtime, - client, -}: { - cast: Cast; - runtime: IAgentRuntime; - client: FarcasterClient; -}): Promise { - const thread: Cast[] = []; - const visited: Set = new Set(); - async function processThread(currentCast: Cast) { - if (visited.has(currentCast.hash)) { - return; - } - - visited.add(currentCast.hash); - - const roomId = castUuid({ - hash: currentCast.hash, - agentId: runtime.agentId, - }); - - // Check if the current cast has already been saved - const memory = await runtime.messageManager.getMemoryById(roomId); - - if (!memory) { - elizaLogger.log("Creating memory for cast", currentCast.hash); - - const userId = stringToUuid(currentCast.authorFid.toString()); - - await runtime.ensureConnection( - userId, - roomId, - currentCast.profile.username, - currentCast.profile.name, - "farcaster" - ); - - await runtime.messageManager.createMemory( - createCastMemory({ - roomId, - senderId: userId, - runtime, - cast: currentCast, - }) - ); - } - - thread.unshift(currentCast); - - if (currentCast.inReplyTo) { - const parentCast = await client.getCast(currentCast.inReplyTo.hash); - await processThread(parentCast); - } - } - - await processThread(cast); - return thread; -} diff --git a/packages/client-farcaster/src/post.ts b/packages/client-farcaster/src/post.ts deleted file mode 100644 index afcfa4b1cf3b2..0000000000000 --- a/packages/client-farcaster/src/post.ts +++ /dev/null @@ -1,233 +0,0 @@ -import { - composeContext, - generateText, - type IAgentRuntime, - ModelClass, - stringToUuid, - elizaLogger, -} from "@elizaos/core"; -import type { FarcasterClient } from "./client"; -import { formatTimeline, postTemplate } from "./prompts"; -import { castUuid, MAX_CAST_LENGTH } from "./utils"; -import { createCastMemory } from "./memory"; -import { sendCast } from "./actions"; - -export class FarcasterPostManager { - client: FarcasterClient; - runtime: IAgentRuntime; - fid: number; - isDryRun: boolean; - private timeout: NodeJS.Timeout | undefined; - - constructor( - client: FarcasterClient, - runtime: IAgentRuntime, - private signerUuid: string, - public cache: Map - ) { - this.client = client; - this.runtime = runtime; - - this.fid = this.client.farcasterConfig?.FARCASTER_FID ?? 0; - this.isDryRun = this.client.farcasterConfig?.FARCASTER_DRY_RUN ?? false; - - // Log configuration on initialization - elizaLogger.log("Farcaster Client Configuration:"); - elizaLogger.log(`- FID: ${this.fid}`); - elizaLogger.log( - `- Dry Run Mode: ${this.isDryRun ? "enabled" : "disabled"}` - ); - elizaLogger.log( - `- Enable Post: ${this.client.farcasterConfig.ENABLE_POST ? "enabled" : "disabled"}` - ); - if (this.client.farcasterConfig.ENABLE_POST) { - elizaLogger.log( - `- Post Interval: ${this.client.farcasterConfig.POST_INTERVAL_MIN}-${this.client.farcasterConfig.POST_INTERVAL_MAX} minutes` - ); - elizaLogger.log( - `- Post Immediately: ${this.client.farcasterConfig.POST_IMMEDIATELY ? "enabled" : "disabled"}` - ); - } - elizaLogger.log( - `- Action Processing: ${this.client.farcasterConfig.ENABLE_ACTION_PROCESSING ? "enabled" : "disabled"}` - ); - elizaLogger.log( - `- Action Interval: ${this.client.farcasterConfig.ACTION_INTERVAL} minutes` - ); - - if (this.isDryRun) { - elizaLogger.log( - "Farcaster client initialized in dry run mode - no actual casts should be posted" - ); - } - } - - public async start() { - const generateNewCastLoop = async () => { - - const lastPost = await this.runtime.cacheManager.get<{ - timestamp: number; - }>("farcaster/" + this.fid + "/lastPost"); - - const lastPostTimestamp = lastPost?.timestamp ?? 0; - const minMinutes = this.client.farcasterConfig.POST_INTERVAL_MIN; - const maxMinutes = this.client.farcasterConfig.POST_INTERVAL_MAX; - const randomMinutes = - Math.floor(Math.random() * (maxMinutes - minMinutes + 1)) + - minMinutes; - const delay = randomMinutes * 60 * 1000; - - if (Date.now() > lastPostTimestamp + delay) { - try { - await this.generateNewCast(); - } catch (error) { - elizaLogger.error(error); - return; - } - } - - this.timeout = setTimeout(() => { - generateNewCastLoop(); // Set up next iteration - }, delay); - - elizaLogger.log(`Next cast scheduled in ${randomMinutes} minutes`); - }; - - if (this.client.farcasterConfig.ENABLE_POST) { - if (this.client.farcasterConfig.POST_IMMEDIATELY) { - await this.generateNewCast(); - } - generateNewCastLoop(); - } - } - - public async stop() { - if (this.timeout) clearTimeout(this.timeout); - } - - private async generateNewCast() { - elizaLogger.info("Generating new cast"); - try { - const profile = await this.client.getProfile(this.fid); - await this.runtime.ensureUserExists( - this.runtime.agentId, - profile.username, - this.runtime.character.name, - "farcaster" - ); - - const { timeline } = await this.client.getTimeline({ - fid: this.fid, - pageSize: 10, - }); - - this.cache.set("farcaster/timeline", timeline); - - const formattedHomeTimeline = formatTimeline( - this.runtime.character, - timeline - ); - - const generateRoomId = stringToUuid("farcaster_generate_room"); - - const state = await this.runtime.composeState( - { - roomId: generateRoomId, - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { text: "", action: "" }, - }, - { - farcasterUserName: profile.username, - timeline: formattedHomeTimeline, - } - ); - - // Generate new cast - const context = composeContext({ - state, - template: - this.runtime.character.templates?.farcasterPostTemplate || - postTemplate, - }); - - const newContent = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - const slice = newContent.replaceAll(/\\n/g, "\n").trim(); - - let content = slice.slice(0, MAX_CAST_LENGTH); - - // if it's bigger than the max limit, delete the last line - if (content.length > MAX_CAST_LENGTH) { - content = content.slice(0, content.lastIndexOf("\n")); - } - - if (content.length > MAX_CAST_LENGTH) { - // slice at the last period - content = content.slice(0, content.lastIndexOf(".")); - } - - // if it's still too long, get the period before the last period - if (content.length > MAX_CAST_LENGTH) { - content = content.slice(0, content.lastIndexOf(".")); - } - - if (this.runtime.getSetting("FARCASTER_DRY_RUN") === "true") { - elizaLogger.info(`Dry run: would have cast: ${content}`); - return; - } - - try { - const [{ cast }] = await sendCast({ - client: this.client, - runtime: this.runtime, - signerUuid: this.signerUuid, - roomId: generateRoomId, - content: { text: content }, - profile, - }); - - await this.runtime.cacheManager.set( - `farcaster/${this.fid}/lastCast`, - { - hash: cast.hash, - timestamp: Date.now(), - } - ); - - const roomId = castUuid({ - agentId: this.runtime.agentId, - hash: cast.hash, - }); - - await this.runtime.ensureRoomExists(roomId); - - await this.runtime.ensureParticipantInRoom( - this.runtime.agentId, - roomId - ); - - elizaLogger.info( - `[Farcaster Neynar Client] Published cast ${cast.hash}` - ); - - await this.runtime.messageManager.createMemory( - createCastMemory({ - roomId, - senderId: this.runtime.agentId, - runtime: this.runtime, - cast, - }) - ); - } catch (error) { - elizaLogger.error("Error sending cast:", error); - } - } catch (error) { - elizaLogger.error("Error generating new cast:", error); - } - } -} diff --git a/packages/client-farcaster/src/prompts.ts b/packages/client-farcaster/src/prompts.ts deleted file mode 100644 index 3459087f3167c..0000000000000 --- a/packages/client-farcaster/src/prompts.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { - type Character, - messageCompletionFooter, - shouldRespondFooter, -} from "@elizaos/core"; -import type { Cast } from "./types"; - -export const formatCast = (cast: Cast) => { - return `ID: ${cast.hash} - From: ${cast.profile.name} (@${cast.profile.username})${cast.profile.username})${cast.inReplyTo ? `\nIn reply to: ${cast.inReplyTo.fid}` : ""} -Text: ${cast.text}`; -}; - -export const formatTimeline = ( - character: Character, - timeline: Cast[] -) => `# ${character.name}'s Home Timeline -${timeline.map(formatCast).join("\n")} -`; - -export const headerTemplate = ` -{{timeline}} - -# Knowledge -{{knowledge}} - -About {{agentName}} (@{{farcasterUsername}}): -{{bio}} -{{lore}} -{{postDirections}} - -{{providers}} - -{{recentPosts}} - -{{characterPostExamples}}`; - -export const postTemplate = - headerTemplate + - ` -# Task: Generate a post in the voice and style of {{agentName}}, aka @{{farcasterUsername}} -Write a single sentence post that is {{adjective}} about {{topic}} (without mentioning {{topic}} directly), from the perspective of {{agentName}}. -Try to write something totally different than previous posts. Do not add commentary or ackwowledge this request, just write the post. - -Your response should not contain any questions. Brief, concise statements only. No emojis. Use \\n\\n (double spaces) between statements.`; - -export const messageHandlerTemplate = - headerTemplate + - ` -Recent interactions between {{agentName}} and other users: -{{recentPostInteractions}} - -Thread of casts You Are Replying To: -{{formattedConversation}} - -# Task: Generate a post in the voice, style and perspective of {{agentName}} (@{{farcasterUsername}}): -{{currentPost}}` + - messageCompletionFooter; - -export const shouldRespondTemplate = - // - `# Task: Decide if {{agentName}} should respond. - About {{agentName}}: - {{bio}} - - # INSTRUCTIONS: Determine if {{agentName}} (@{{farcasterUsername}}) should respond to the message and participate in the conversation. Do not comment. Just respond with "RESPOND" or "IGNORE" or "STOP". - -Response options are RESPOND, IGNORE and STOP. - -{{agentName}} should respond to messages that are directed at them, or participate in conversations that are interesting or relevant to their background, IGNORE messages that are irrelevant to them, and should STOP if the conversation is concluded. - -{{agentName}} is in a room with other users and wants to be conversational, but not annoying. -{{agentName}} should RESPOND to messages that are directed at them, or participate in conversations that are interesting or relevant to their background. -If a message is not interesting or relevant, {{agentName}} should IGNORE. -If a message thread has become repetitive, {{agentName}} should IGNORE. -Unless directly RESPONDing to a user, {{agentName}} should IGNORE messages that are very short or do not contain much information. -If a user asks {{agentName}} to stop talking, {{agentName}} should STOP. -If {{agentName}} concludes a conversation and isn't part of the conversation anymore, {{agentName}} should STOP. - -IMPORTANT: {{agentName}} (aka @{{farcasterUsername}}) is particularly sensitive about being annoying, so if there is any doubt, it is better to IGNORE than to RESPOND. - -Thread of messages You Are Replying To: -{{formattedConversation}} - -Current message: -{{currentPost}} - -` + shouldRespondFooter; diff --git a/packages/client-farcaster/src/types.ts b/packages/client-farcaster/src/types.ts deleted file mode 100644 index 929216d138b18..0000000000000 --- a/packages/client-farcaster/src/types.ts +++ /dev/null @@ -1,39 +0,0 @@ -export type Profile = { - fid: number; - name: string; - username: string; - pfp?: string; - bio?: string; - url?: string; - // location?: string; - // twitter?: string; - // github?: string; -}; - -export type NeynarCastResponse = { - hash: string; - authorFid: number; - text: string; -}; - -export type Cast = { - hash: string; - authorFid: number; - text: string; - profile: Profile; - inReplyTo?: { - hash: string; - fid: number; - }; - timestamp: Date; -}; - -export type CastId = { - hash: string; - fid: number; -}; - -export type FidRequest = { - fid: number; - pageSize: number; -}; diff --git a/packages/client-farcaster/src/utils.ts b/packages/client-farcaster/src/utils.ts deleted file mode 100644 index 4b22d467e24b2..0000000000000 --- a/packages/client-farcaster/src/utils.ts +++ /dev/null @@ -1,136 +0,0 @@ -import { stringToUuid } from "@elizaos/core"; - -export const MAX_CAST_LENGTH = 1024; // Updated to Twitter's current character limit - -export function castId({ hash, agentId }: { hash: string; agentId: string }) { - return `${hash}-${agentId}`; -} - -export function castUuid(props: { hash: string; agentId: string }) { - return stringToUuid(castId(props)); -} - -export function splitPostContent( - content: string, - maxLength: number = MAX_CAST_LENGTH -): string[] { - const paragraphs = content.split("\n\n").map((p) => p.trim()); - const posts: string[] = []; - let currentTweet = ""; - - for (const paragraph of paragraphs) { - if (!paragraph) continue; - - if ((currentTweet + "\n\n" + paragraph).trim().length <= maxLength) { - if (currentTweet) { - currentTweet += "\n\n" + paragraph; - } else { - currentTweet = paragraph; - } - } else { - if (currentTweet) { - posts.push(currentTweet.trim()); - } - if (paragraph.length <= maxLength) { - currentTweet = paragraph; - } else { - // Split long paragraph into smaller chunks - const chunks = splitParagraph(paragraph, maxLength); - posts.push(...chunks.slice(0, -1)); - currentTweet = chunks[chunks.length - 1]; - } - } - } - - if (currentTweet) { - posts.push(currentTweet.trim()); - } - - return posts; -} - -export function splitParagraph(paragraph: string, maxLength: number): string[] { - const sentences = paragraph.match(/[^.!?]+[.!?]+|[^.!?]+$/g) || [ - paragraph, - ]; - const chunks: string[] = []; - let currentChunk = ""; - - for (const sentence of sentences) { - if ((currentChunk + " " + sentence).trim().length <= maxLength) { - if (currentChunk) { - currentChunk += " " + sentence; - } else { - currentChunk = sentence; - } - } else { - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - if (sentence.length <= maxLength) { - currentChunk = sentence; - } else { - // Split long sentence into smaller pieces - const words = sentence.split(" "); - currentChunk = ""; - for (const word of words) { - if ( - (currentChunk + " " + word).trim().length <= maxLength - ) { - if (currentChunk) { - currentChunk += " " + word; - } else { - currentChunk = word; - } - } else { - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - currentChunk = word; - } - } - } - } - } - - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - - return chunks; -} - -export function populateMentions( - text: string, - userIds: number[], - positions: number[], - userMap: Record -) { - // Validate input arrays have same length - if (userIds.length !== positions.length) { - throw new Error( - "User IDs and positions arrays must have the same length" - ); - } - - // Create array of mention objects with position and user info - const mentions = userIds - .map((userId, index) => ({ - position: positions[index], - userId, - displayName: userMap[userId]!, - })) - .sort((a, b) => b.position - a.position); // Sort in reverse order to prevent position shifting - - // Create the resulting string by inserting mentions - let result = text; - mentions.forEach((mention) => { - const mentionText = `@${mention.displayName}`; - result = - result.slice(0, mention.position) + - mentionText + - result.slice(mention.position); - }); - - return result; -} diff --git a/packages/client-farcaster/tsconfig.json b/packages/client-farcaster/tsconfig.json deleted file mode 100644 index 8844144031b4d..0000000000000 --- a/packages/client-farcaster/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "jsx": "react", - "outDir": "dist", - "rootDir": "./src", - "strict": true - }, - "include": [ - "src", - "__tests__/test-utils.ts" - ] -} \ No newline at end of file diff --git a/packages/client-farcaster/tsup.config.ts b/packages/client-farcaster/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/client-farcaster/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-farcaster/vitest.config.ts b/packages/client-farcaster/vitest.config.ts deleted file mode 100644 index bd12fbf9c6c0d..0000000000000 --- a/packages/client-farcaster/vitest.config.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - environment: 'node', - include: ['**/__tests__/**/*.test.ts'], - coverage: { - reporter: ['text', 'json', 'html'], - include: ['src/**/*.ts'], - exclude: ['**/__tests__/**', '**/*.d.ts'] - } - } -}); diff --git a/packages/client-github/.npmignore b/packages/client-github/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-github/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-github/README.md b/packages/client-github/README.md deleted file mode 100644 index 17ec51f72b0cb..0000000000000 --- a/packages/client-github/README.md +++ /dev/null @@ -1,142 +0,0 @@ -# Client-GitHub for Eliza Framework - -## Overview - -The `client-github` module is a component of the Eliza framework designed to interact with GitHub repositories. It provides functionalities to clone repositories, manage branches, create pull requests, and maintain file-based knowledge for Eliza agents. - -This client leverages GitHub's REST API via the `@octokit/rest` library and includes robust error handling and configuration validation. - -## Features - -- **Repository Management**: Clone, pull, and switch branches -- **File Processing**: Generate agent memories from repository files -- **Pull Request Management**: Create and manage pull requests programmatically -- **Commit Operations**: Stage, commit, and push files with ease -- **Knowledge Base Integration**: Convert repository content into agent memories -- **Branch Management**: Flexible branch switching and creation - -## Installation - -Install the package as part of the Eliza framework: -bash -pnpm add @elizaos/client-github - -## Configuration - -The GitHub client requires the following environment variables: - -| Variable | Description | Required | -|-------------------|------------------------------------|----------| -| `GITHUB_OWNER` | Owner of the GitHub repository | Yes | -| `GITHUB_REPO` | Repository name | Yes | -| `GITHUB_BRANCH` | Target branch (default: `main`) | Yes | -| `GITHUB_PATH` | Path to focus on within the repo | Yes | -| `GITHUB_API_TOKEN`| GitHub API token for authentication| Yes | - -## Usage - -### Initialization -typescript:packages/client-github/README.md -import { GitHubClientInterface } from "@elizaos/client-github"; -// Initialize the client -const client = await GitHubClientInterface.start(runtime); - -### Creating Memories - -```typescript -// Convert repository files to agent memories -await client.createMemoriesFromFiles(); - -// Convert repository files to agent memories -await client.createMemoriesFromFiles(); -``` - -### Creating Pull Requests - -```typescript -await client.createPullRequest( - "Feature: Add new functionality", - "feature/new-feature", - [ - { - path: "src/feature.ts", - content: "// New feature implementation" - } - ], - "Implements new functionality with tests" -); - -await client.createPullRequest( -"Feature: Add new functionality", -"feature/new-feature", -[ -{ -path: "src/feature.ts", -content: "// New feature implementation" -} -], -"Implements new functionality with tests" -); -``` - -### Direct Commits - -```typescript -await client.createCommit( - "Update configuration", - [ - { - path: "config.json", - content: JSON.stringify(config, null, 2) - } - ] -); -``` - -## API Reference - -### GitHubClientInterface - -- `start(runtime: IAgentRuntime)`: Initialize the client -- `stop(runtime: IAgentRuntime)`: Clean up resources - -### GitHubClient - -- `initialize()`: Set up repository and configuration -- `createMemoriesFromFiles()`: Generate agent memories -- `createPullRequest(title: string, branch: string, files: Array<{path: string, content: string}>, description?: string)`: Create PR -- `createCommit(message: string, files: Array<{path: string, content: string}>)`: Direct commit - -## Scripts - -```bash -# Build the project -pnpm run build - -# Development with watch mode -pnpm run dev - -# Lint the codebase -pnpm run lint -``` - -## Dependencies - -- `@elizaos/core`: ^0.1.7-alpha.2 -- `@octokit/rest`: ^20.1.1 -- `@octokit/types`: ^12.6.0 -- `glob`: ^10.4.5 -- `simple-git`: ^3.27.0 - -## Development Dependencies - -- `@types/glob`: ^8.1.0 -- `tsup`: ^8.3.5 - -## Contribution - -Contributions are welcome! Please ensure all code adheres to the framework's standards and passes linting checks. - -## License - -This project is licensed under the MIT License. See the LICENSE file for details. diff --git a/packages/client-github/__tests__/environment.test.ts b/packages/client-github/__tests__/environment.test.ts deleted file mode 100644 index 1062dba6372f6..0000000000000 --- a/packages/client-github/__tests__/environment.test.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { validateGithubConfig, githubEnvSchema } from '../src/environment'; -import type { IAgentRuntime } from '@elizaos/core'; - -describe('GitHub Environment Configuration', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn(), - } as unknown as IAgentRuntime; - - it('validates correct GitHub configuration', async () => { - const validConfig = { - GITHUB_OWNER: 'testowner', - GITHUB_REPO: 'testrepo', - GITHUB_BRANCH: 'main', - GITHUB_PATH: 'src', - GITHUB_API_TOKEN: 'ghp_test123', - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => validConfig[key as keyof typeof validConfig]); - - const config = await validateGithubConfig(mockRuntime); - expect(config).toEqual(validConfig); - }); - - it('throws error for missing configuration', async () => { - const invalidConfig = { - GITHUB_OWNER: '', - GITHUB_REPO: '', - GITHUB_BRANCH: '', - GITHUB_PATH: '', - GITHUB_API_TOKEN: '', - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => invalidConfig[key as keyof typeof invalidConfig]); - - await expect(validateGithubConfig(mockRuntime)).rejects.toThrow(); - }); - - it('throws error for partial configuration', async () => { - const partialConfig = { - GITHUB_OWNER: 'testowner', - GITHUB_REPO: 'testrepo', - // Missing other required fields - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => partialConfig[key as keyof typeof partialConfig]); - - await expect(validateGithubConfig(mockRuntime)).rejects.toThrow(); - }); -}); diff --git a/packages/client-github/__tests__/index.test.ts b/packages/client-github/__tests__/index.test.ts deleted file mode 100644 index 71ea056a9f789..0000000000000 --- a/packages/client-github/__tests__/index.test.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { GitHubClient, GitHubClientInterface } from '../src'; -import type { AgentRuntime, IAgentRuntime } from '@elizaos/core'; -import { Octokit } from '@octokit/rest'; -import simpleGit from 'simple-git'; -import type fs from 'fs'; -import type fsPromises from 'fs/promises'; - -// Mock external dependencies -vi.mock('@octokit/rest', () => ({ - Octokit: vi.fn(), -})); - -vi.mock('simple-git', () => ({ - default: vi.fn(() => ({ - clone: vi.fn(), - pull: vi.fn(), - checkout: vi.fn(), - })), -})); - -vi.mock('fs/promises', async (importOriginal) => { - const actual = await importOriginal() as typeof fsPromises; - return { - ...actual, - mkdir: vi.fn(), - lstat: vi.fn(), - readdir: vi.fn(), - readFile: vi.fn(), - writeFile: vi.fn(), - }; -}); - -vi.mock('fs', async (importOriginal) => { - const actual = await importOriginal() as typeof fs; - return { - ...actual, - existsSync: vi.fn(), - realpathSync: vi.fn(), - lstatSync: vi.fn(), - readdirSync: vi.fn(), - }; -}); - -describe('GitHubClient', () => { - let mockRuntime: AgentRuntime; - const mockConfig = { - GITHUB_OWNER: 'testowner', - GITHUB_REPO: 'testrepo', - GITHUB_BRANCH: 'main', - GITHUB_PATH: 'src', - GITHUB_API_TOKEN: 'ghp_test123', - }; - - beforeEach(() => { - vi.clearAllMocks(); - mockRuntime = { - getSetting: vi.fn((key: string) => mockConfig[key as keyof typeof mockConfig]), - } as unknown as AgentRuntime; - }); - - it('initializes with correct configuration', () => { - const client = new GitHubClient(mockRuntime); - expect(Octokit).toHaveBeenCalledWith({ auth: mockConfig.GITHUB_API_TOKEN }); - }); - - describe('GitHubClientInterface', () => { - it('has start and stop methods', () => { - expect(GitHubClientInterface.start).toBeDefined(); - expect(GitHubClientInterface.stop).toBeDefined(); - }); - - it('start method initializes client', async () => { - const runtime = { - getSetting: vi.fn((key: string) => mockConfig[key as keyof typeof mockConfig]), - } as unknown as IAgentRuntime; - - await GitHubClientInterface.start(runtime); - // Add more specific assertions based on what start should do - }); - - it('stop method cleans up resources', () => { - const runtime = {} as IAgentRuntime; - GitHubClientInterface.stop(runtime); - // Add assertions for cleanup if needed - }); - }); -}); diff --git a/packages/client-github/package.json b/packages/client-github/package.json deleted file mode 100644 index 62599d0aaab4e..0000000000000 --- a/packages/client-github/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "@elizaos/client-github", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@octokit/rest": "20.1.1", - "@octokit/types": "12.6.0", - "glob": "10.4.5", - "simple-git": "3.27.0" - }, - "devDependencies": { - "@types/glob": "8.1.0", - "tsup": "8.3.5", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest" - } -} diff --git a/packages/client-github/src/environment.ts b/packages/client-github/src/environment.ts deleted file mode 100644 index c78159c61466f..0000000000000 --- a/packages/client-github/src/environment.ts +++ /dev/null @@ -1,38 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const githubEnvSchema = z.object({ - GITHUB_OWNER: z.string().min(1, "GitHub owner is required"), - GITHUB_REPO: z.string().min(1, "GitHub repo is required"), - GITHUB_BRANCH: z.string().min(1, "GitHub branch is required"), - GITHUB_PATH: z.string().min(1, "GitHub path is required"), - GITHUB_API_TOKEN: z.string().min(1, "GitHub API token is required"), -}); - -export type GithubConfig = z.infer; - -export async function validateGithubConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - GITHUB_OWNER: runtime.getSetting("GITHUB_OWNER"), - GITHUB_REPO: runtime.getSetting("GITHUB_REPO"), - GITHUB_BRANCH: runtime.getSetting("GITHUB_BRANCH"), - GITHUB_PATH: runtime.getSetting("GITHUB_PATH"), - GITHUB_API_TOKEN: runtime.getSetting("GITHUB_API_TOKEN"), - }; - - return githubEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `GitHub configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-github/src/index.ts b/packages/client-github/src/index.ts deleted file mode 100644 index 8afc75e3d698e..0000000000000 --- a/packages/client-github/src/index.ts +++ /dev/null @@ -1,222 +0,0 @@ -import { Octokit } from "@octokit/rest"; -import { glob } from "glob"; -import simpleGit, { type SimpleGit } from "simple-git"; -import path from "path"; -import fs from "fs/promises"; -import { existsSync } from "fs"; -import { createHash } from "crypto"; -import { - elizaLogger, - type AgentRuntime, - type Client, - type IAgentRuntime, - knowledge, - stringToUuid, -} from "@elizaos/core"; -import { validateGithubConfig } from "./environment"; - -export interface GitHubConfig { - owner: string; - repo: string; - branch?: string; - path?: string; - token: string; -} - -export class GitHubClient { - private octokit: Octokit; - private git: SimpleGit; - private config: GitHubConfig; - private runtime: AgentRuntime; - private repoPath: string; - - constructor(runtime: AgentRuntime) { - this.runtime = runtime; - this.config = { - owner: runtime.getSetting("GITHUB_OWNER") as string, - repo: runtime.getSetting("GITHUB_REPO") as string, - branch: runtime.getSetting("GITHUB_BRANCH") as string, - path: runtime.getSetting("GITHUB_PATH") as string, - token: runtime.getSetting("GITHUB_API_TOKEN") as string, - }; - this.octokit = new Octokit({ auth: this.config.token }); - this.git = simpleGit(); - this.repoPath = path.join( - process.cwd(), - ".repos", - this.config.owner, - this.config.repo - ); - } - - async initialize() { - // Create repos directory if it doesn't exist - await fs.mkdir(path.join(process.cwd(), ".repos", this.config.owner), { - recursive: true, - }); - - // Clone or pull repository - if (!existsSync(this.repoPath)) { - await this.cloneRepository(); - } else { - const git = simpleGit(this.repoPath); - await git.pull(); - } - - // Checkout specified branch if provided - if (this.config.branch) { - const git = simpleGit(this.repoPath); - await git.checkout(this.config.branch); - } - } - - private async cloneRepository() { - const repositoryUrl = `https://github.com/${this.config.owner}/${this.config.repo}.git`; - const maxRetries = 3; - let retries = 0; - - while (retries < maxRetries) { - try { - await this.git.clone(repositoryUrl, this.repoPath); - elizaLogger.log( - `Successfully cloned repository from ${repositoryUrl}` - ); - return; - } catch { - elizaLogger.error( - `Failed to clone repository from ${repositoryUrl}. Retrying...` - ); - retries++; - if (retries === maxRetries) { - throw new Error( - `Unable to clone repository from ${repositoryUrl} after ${maxRetries} retries.` - ); - } - } - } - } - - async createMemoriesFromFiles() { - console.log("Create memories"); - const searchPath = this.config.path - ? path.join(this.repoPath, this.config.path, "**/*") - : path.join(this.repoPath, "**/*"); - - const files = await glob(searchPath, { nodir: true }); - - for (const file of files) { - const relativePath = path.relative(this.repoPath, file); - const content = await fs.readFile(file, "utf-8"); - const contentHash = createHash("sha256") - .update(content) - .digest("hex"); - const knowledgeId = stringToUuid( - `github-${this.config.owner}-${this.config.repo}-${relativePath}` - ); - - const existingDocument = - await this.runtime.documentsManager.getMemoryById(knowledgeId); - - if ( - existingDocument && - existingDocument.content["hash"] == contentHash - ) { - continue; - } - - console.log( - "Processing knowledge for ", - this.runtime.character.name, - " - ", - relativePath - ); - - await knowledge.set(this.runtime, { - id: knowledgeId, - content: { - text: content, - hash: contentHash, - source: "github", - attachments: [], - metadata: { - path: relativePath, - repo: this.config.repo, - owner: this.config.owner, - }, - }, - }); - } - } - - async createPullRequest( - title: string, - branch: string, - files: Array<{ path: string; content: string }>, - description?: string - ) { - // Create new branch - const git = simpleGit(this.repoPath); - await git.checkout(["-b", branch]); - - // Write files - for (const file of files) { - const filePath = path.join(this.repoPath, file.path); - await fs.mkdir(path.dirname(filePath), { recursive: true }); - await fs.writeFile(filePath, file.content); - } - - // Commit and push changes - await git.add("."); - await git.commit(title); - await git.push("origin", branch); - - // Create PR - const pr = await this.octokit.pulls.create({ - owner: this.config.owner, - repo: this.config.repo, - title, - body: description || title, - head: branch, - base: this.config.branch || "main", - }); - - return pr.data; - } - - async createCommit( - message: string, - files: Array<{ path: string; content: string }> - ) { - const git = simpleGit(this.repoPath); - - // Write files - for (const file of files) { - const filePath = path.join(this.repoPath, file.path); - await fs.mkdir(path.dirname(filePath), { recursive: true }); - await fs.writeFile(filePath, file.content); - } - - // Commit and push changes - await git.add("."); - await git.commit(message); - await git.push(); - } -} - -export const GitHubClientInterface: Client = { - start: async (runtime: IAgentRuntime) => { - await validateGithubConfig(runtime); - elizaLogger.log("GitHubClientInterface start"); - - const client = new GitHubClient(runtime as AgentRuntime); - await client.initialize(); - await client.createMemoriesFromFiles(); - - return client; - }, - stop: async (_runtime: IAgentRuntime) => { - elizaLogger.log("GitHubClientInterface stop"); - }, -}; - -export default GitHubClientInterface; diff --git a/packages/client-github/tsconfig.json b/packages/client-github/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/client-github/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-github/tsup.config.ts b/packages/client-github/tsup.config.ts deleted file mode 100644 index 1a96f24afa1eb..0000000000000 --- a/packages/client-github/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-instagram/README.md b/packages/client-instagram/README.md deleted file mode 100644 index 6315f65dec36d..0000000000000 --- a/packages/client-instagram/README.md +++ /dev/null @@ -1,113 +0,0 @@ -# @elizaos/client-instagram - -An Instagram client implementation for ElizaOS, enabling Instagram integration with support for media posting, comment handling, and interaction management. - -## Features - -- Instagram API integration using instagram-private-api -- Media post creation and scheduling -- Comment and interaction handling -- Profile management -- Media processing utilities -- Rate limiting and request queuing -- Session management and caching - -## Installation - -As this is a workspace package, it's installed as part of the ElizaOS monorepo: - -```bash -pnpm install -``` - -## Configuration - -The client requires the following environment variables: - -```bash -# Instagram Credentials -INSTAGRAM_USERNAME=your_username -INSTAGRAM_PASSWORD=your_password -INSTAGRAM_APP_ID=your_app_id -INSTAGRAM_APP_SECRET=your_app_secret - -# Optional Business Account -INSTAGRAM_BUSINESS_ACCOUNT_ID=your_business_account_id - -# Posting Configuration -POST_INTERVAL_MIN=90 # Minimum interval between posts (minutes) -POST_INTERVAL_MAX=180 # Maximum interval between posts (minutes) -ENABLE_ACTION_PROCESSING=true -ACTION_INTERVAL=5 # Minutes between action processing -MAX_ACTIONS_PROCESSING=1 # Maximum actions to process per interval -``` - -## Usage - -### Basic Initialization - -```typescript -import { InstagramClientInterface } from '@elizaos/client-instagram'; - -// Initialize the client -const instagramManager = await InstagramClientInterface.start(runtime); -``` - -### Posting Content - -All posts on Instagram must include media (image, video, or carousel): - -```typescript -// Post a single image -await instagramManager.post.createPost({ - media: [{ - type: 'IMAGE', - url: 'path/to/image.jpg' - }], - caption: 'Hello Instagram!' -}); - -// Post a carousel -await instagramManager.post.createPost({ - media: [ - { type: 'IMAGE', url: 'path/to/image1.jpg' }, - { type: 'IMAGE', url: 'path/to/image2.jpg' } - ], - caption: 'Check out these photos!' -}); -``` - -### Handling Interactions - -```typescript -// Handle comments -await instagramManager.interaction.handleComment({ - mediaId: 'media-123', - comment: 'Great post!', - userId: 'user-123' -}); - -// Like media -await instagramManager.interaction.likeMedia('media-123'); -``` - -## Key Components - -1. ClientBase - - Handles authentication and session management - - Manages API rate limiting - - Provides core API functionality - - -2. PostClient - - Manages media uploads - - Handles post scheduling - - Processes media before upload - - -3. InteractionClient - - Handles comments and likes - - Manages user interactions - - Processes notifications - - diff --git a/packages/client-instagram/__tests__/environment.test.ts b/packages/client-instagram/__tests__/environment.test.ts deleted file mode 100644 index edd2428786a11..0000000000000 --- a/packages/client-instagram/__tests__/environment.test.ts +++ /dev/null @@ -1,192 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { validateInstagramConfig, instagramEnvSchema } from '../src/environment'; -import type { IAgentRuntime } from '@elizaos/core'; - -describe('Instagram Environment Configuration', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn(), - } as unknown as IAgentRuntime; - - it('validates correct Instagram configuration', async () => { - const validConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - INSTAGRAM_POST_INTERVAL_MIN: '60', - INSTAGRAM_POST_INTERVAL_MAX: '120', - INSTAGRAM_ENABLE_ACTION_PROCESSING: false, - INSTAGRAM_ACTION_INTERVAL: '5', - INSTAGRAM_MAX_ACTIONS: '1', - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - if (key === 'INSTAGRAM_ENABLE_ACTION_PROCESSING') return 'false'; - return validConfig[key as keyof typeof validConfig]?.toString() || null; - }); - - const config = await validateInstagramConfig(mockRuntime); - expect(config).toEqual(validConfig); - }); - - it('validates configuration with optional business account', async () => { - const validConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - INSTAGRAM_BUSINESS_ACCOUNT_ID: 'business_123', - INSTAGRAM_POST_INTERVAL_MIN: '60', - INSTAGRAM_POST_INTERVAL_MAX: '120', - INSTAGRAM_ENABLE_ACTION_PROCESSING: false, - INSTAGRAM_ACTION_INTERVAL: '5', - INSTAGRAM_MAX_ACTIONS: '1', - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - if (key === 'INSTAGRAM_ENABLE_ACTION_PROCESSING') return 'false'; - return validConfig[key as keyof typeof validConfig]?.toString() || null; - }); - - const config = await validateInstagramConfig(mockRuntime); - expect(config).toEqual(validConfig); - }); - - it('validates configuration with enhanced image settings', async () => { - const validConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - INSTAGRAM_POST_INTERVAL_MIN: '60', - INSTAGRAM_POST_INTERVAL_MAX: '120', - INSTAGRAM_ENABLE_ACTION_PROCESSING: false, - INSTAGRAM_ACTION_INTERVAL: '5', - INSTAGRAM_MAX_ACTIONS: '1', - INSTAGRAM_IMAGE_WIDTH: '1920', - INSTAGRAM_IMAGE_HEIGHT: '1080', - INSTAGRAM_IMAGE_NEGATIVE_PROMPT: 'blurry, low quality', - INSTAGRAM_IMAGE_ITERATIONS: '30', - INSTAGRAM_IMAGE_GUIDANCE_SCALE: '8.5', - INSTAGRAM_IMAGE_SEED: '12345', - INSTAGRAM_IMAGE_CFG_SCALE: '9', - INSTAGRAM_IMAGE_SAFE_MODE: true, - INSTAGRAM_IMAGE_STYLE_PRESET: 'test-preset', - INSTAGRAM_IMAGE_HIDE_WATERMARK: true - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - if (key === 'INSTAGRAM_ENABLE_ACTION_PROCESSING') return 'false'; - if (key === 'INSTAGRAM_IMAGE_SAFE_MODE') return 'true'; - if (key === 'INSTAGRAM_IMAGE_HIDE_WATERMARK') return 'true'; - return validConfig[key as keyof typeof validConfig]?.toString() || null; - }); - - const config = await validateInstagramConfig(mockRuntime); - expect(config).toEqual(validConfig); - }); - - it('validates configuration with partial image settings', async () => { - const validConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - INSTAGRAM_POST_INTERVAL_MIN: '60', - INSTAGRAM_POST_INTERVAL_MAX: '120', - INSTAGRAM_ENABLE_ACTION_PROCESSING: false, - INSTAGRAM_ACTION_INTERVAL: '5', - INSTAGRAM_MAX_ACTIONS: '1', - INSTAGRAM_IMAGE_WIDTH: '1920', - INSTAGRAM_IMAGE_HEIGHT: '1080', - INSTAGRAM_IMAGE_NEGATIVE_PROMPT: 'blurry' - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - if (key === 'INSTAGRAM_ENABLE_ACTION_PROCESSING') return 'false'; - return validConfig[key as keyof typeof validConfig]?.toString() || null; - }); - - const config = await validateInstagramConfig(mockRuntime); - expect(config).toEqual(validConfig); - }); - - it('throws error for invalid username format', async () => { - const invalidConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'invalid@username', // Invalid characters - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - return invalidConfig[key as keyof typeof invalidConfig]?.toString() || null; - }); - - await expect(validateInstagramConfig(mockRuntime)).rejects.toThrow(); - }); - - it('throws error for missing required fields', async () => { - const invalidConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - // Missing password and other required fields - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - return invalidConfig[key as keyof typeof invalidConfig]?.toString() || null; - }); - - await expect(validateInstagramConfig(mockRuntime)).rejects.toThrow(); - }); - - it('throws error for invalid image dimensions', async () => { - const invalidConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - INSTAGRAM_IMAGE_WIDTH: '-100', // Invalid negative width - INSTAGRAM_IMAGE_HEIGHT: '0', // Invalid zero height - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - return invalidConfig[key as keyof typeof invalidConfig]?.toString() || null; - }); - - await expect(validateInstagramConfig(mockRuntime)).rejects.toThrow(); - }); - - it('throws error for invalid numeric image settings', async () => { - const invalidConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - INSTAGRAM_IMAGE_GUIDANCE_SCALE: '-1', // Invalid negative guidance scale - INSTAGRAM_IMAGE_CFG_SCALE: '0', // Invalid zero cfg scale - INSTAGRAM_IMAGE_ITERATIONS: '-5' // Invalid negative iterations - }; - - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'false'; - return invalidConfig[key as keyof typeof invalidConfig]?.toString() || null; - }); - - await expect(validateInstagramConfig(mockRuntime)).rejects.toThrow(); - }); -}); diff --git a/packages/client-instagram/__tests__/index.test.ts b/packages/client-instagram/__tests__/index.test.ts deleted file mode 100644 index 7bc6940cac939..0000000000000 --- a/packages/client-instagram/__tests__/index.test.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { InstagramClientInterface } from '../src'; -import { type IAgentRuntime, elizaLogger } from '@elizaos/core'; -import { InstagramInteractionService } from '../src/services/interaction'; -import { InstagramPostService } from '../src/services/post'; - -// Mock dependencies -vi.mock('@elizaos/core', async (importOriginal) => { - const actual = await importOriginal(); - return { - ...actual, - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - }, - parseBooleanFromText: (value: string | undefined) => value === 'true', - }; -}); - -// Mock service instances -const mockPostService = { - start: vi.fn().mockResolvedValue(undefined), -}; - -const mockInteractionService = { - start: vi.fn().mockResolvedValue(undefined), -}; - -vi.mock('../src/lib/auth', () => ({ - initializeClient: vi.fn().mockResolvedValue({ - ig: {}, - config: { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_ENABLE_ACTION_PROCESSING: true, - }, - }), -})); - -vi.mock('../src/services/post', () => ({ - InstagramPostService: vi.fn().mockImplementation(() => mockPostService), -})); - -vi.mock('../src/services/interaction', () => ({ - InstagramInteractionService: vi.fn().mockImplementation(() => mockInteractionService), -})); - -describe('InstagramClientInterface', () => { - let mockRuntime: IAgentRuntime; - const mockConfig = { - INSTAGRAM_DRY_RUN: false, - INSTAGRAM_USERNAME: 'test_user', - INSTAGRAM_PASSWORD: 'test_password', - INSTAGRAM_APP_ID: 'test_app_id', - INSTAGRAM_APP_SECRET: 'test_app_secret', - INSTAGRAM_POST_INTERVAL_MIN: 60, - INSTAGRAM_POST_INTERVAL_MAX: 120, - INSTAGRAM_ENABLE_ACTION_PROCESSING: true, - INSTAGRAM_ACTION_INTERVAL: 5, - INSTAGRAM_MAX_ACTIONS: 1, - }; - - beforeEach(() => { - vi.clearAllMocks(); - mockRuntime = { - getSetting: vi.fn((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN' || key === 'INSTAGRAM_ENABLE_ACTION_PROCESSING') { - return String(mockConfig[key as keyof typeof mockConfig]); - } - return mockConfig[key as keyof typeof mockConfig]; - }), - } as unknown as IAgentRuntime; - }); - - it('starts successfully with all services', async () => { - const result = await InstagramClientInterface.start(mockRuntime); - - expect(result).toBeDefined(); - expect(result.post).toBeDefined(); - expect(result.interaction).toBeDefined(); - expect(InstagramPostService).toHaveBeenCalled(); - expect(InstagramInteractionService).toHaveBeenCalled(); - expect(result.post.start).toHaveBeenCalled(); - expect(result.interaction.start).toHaveBeenCalled(); - expect(elizaLogger.log).toHaveBeenCalledWith('Instagram client configuration validated'); - expect(elizaLogger.log).toHaveBeenCalledWith('Instagram client initialized'); - expect(elizaLogger.log).toHaveBeenCalledWith('Instagram post service started'); - expect(elizaLogger.log).toHaveBeenCalledWith('Instagram interaction service started'); - }); - - it('starts in dry-run mode', async () => { - const dryRunConfig = { ...mockConfig, INSTAGRAM_DRY_RUN: true }; - mockRuntime.getSetting = vi.fn((key: string) => { - if (key === 'INSTAGRAM_DRY_RUN') return 'true'; - if (key === 'INSTAGRAM_ENABLE_ACTION_PROCESSING') return String(dryRunConfig.INSTAGRAM_ENABLE_ACTION_PROCESSING); - return dryRunConfig[key as keyof typeof dryRunConfig]; - }); - - const result = await InstagramClientInterface.start(mockRuntime); - - expect(result).toBeDefined(); - expect(elizaLogger.log).toHaveBeenCalledWith('Instagram client running in dry-run mode'); - expect(mockPostService.start).not.toHaveBeenCalled(); - expect(mockInteractionService.start).not.toHaveBeenCalled(); - }); - - it('handles errors during startup', async () => { - const error = new Error('Startup failed'); - vi.mocked(mockRuntime.getSetting).mockImplementation(() => { - throw error; - }); - - await expect(InstagramClientInterface.start(mockRuntime)).rejects.toThrow('Startup failed'); - expect(elizaLogger.error).toHaveBeenCalledWith('Failed to start Instagram client:', error); - }); - - it('stops gracefully', async () => { - await InstagramClientInterface.stop(mockRuntime); - expect(elizaLogger.log).toHaveBeenCalledWith('Stopping Instagram client services...'); - }); -}); diff --git a/packages/client-instagram/__tests__/services/post.test.ts b/packages/client-instagram/__tests__/services/post.test.ts deleted file mode 100644 index b804d5bee8281..0000000000000 --- a/packages/client-instagram/__tests__/services/post.test.ts +++ /dev/null @@ -1,281 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { InstagramPostService } from '../../src/services/post'; -import { type IAgentRuntime, elizaLogger, generateImage } from '@elizaos/core'; -import type { InstagramState } from '../../src/types'; -import path from 'path'; -import { promises as fs } from 'fs'; - -// Mock dependencies -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - }, - generateImage: vi.fn(), - stringToUuid: vi.fn().mockReturnValue('mock-uuid'), - getEmbeddingZeroVector: vi.fn().mockReturnValue([]), -})); - -vi.mock('fs', () => ({ - promises: { - mkdir: vi.fn(), - writeFile: vi.fn(), - }, -})); - -describe('InstagramPostService', () => { - let service: InstagramPostService; - let mockRuntime: IAgentRuntime; - let mockState: InstagramState; - - beforeEach(() => { - // Initialize mockRuntime with required properties for testing - mockRuntime = { - getSetting: vi.fn(), - agentId: 'mock-agent-id', - character: { - settings: { - imageSettings: { - width: 1920, - height: 1080, - hideWatermark: true, - stylePreset: 'test-preset', - }, - }, - system: '', - name: 'test-character', - modelEndpointOverride: null, - }, - cacheManager: { - get: vi.fn(), - set: vi.fn(), - }, - messageManager: { - createMemory: vi.fn(), - }, - // Add minimal required properties for the test - serverUrl: 'http://test.com', - token: 'test-token', - modelProvider: 'test-provider', - imageModelProvider: 'test-image-provider', - databaseAdapter: null, - verifiableInferenceAdapter: null, - fetch: vi.fn(), - getService: vi.fn(), - } as unknown as IAgentRuntime; - - mockState = { - profile: { - username: 'test_user', - }, - } as InstagramState; - - service = new InstagramPostService(mockRuntime, mockState); - }); - - describe('Post Intervals', () => { - it('uses Instagram-specific interval settings when available', async () => { - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - switch (key) { - case 'INSTAGRAM_POST_INTERVAL_MIN': - return '120'; - case 'INSTAGRAM_POST_INTERVAL_MAX': - return '240'; - default: - return null; - } - }); - - vi.mocked(mockRuntime.cacheManager.get).mockResolvedValue(null); - - await service.start(); - - expect(mockRuntime.getSetting).toHaveBeenCalledWith('INSTAGRAM_POST_INTERVAL_MIN'); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('INSTAGRAM_POST_INTERVAL_MAX'); - }); - - it('falls back to generic interval settings when Instagram-specific ones are not set', async () => { - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - switch (key) { - case 'POST_INTERVAL_MIN': - return '90'; - case 'POST_INTERVAL_MAX': - return '180'; - default: - return null; - } - }); - - vi.mocked(mockRuntime.cacheManager.get).mockResolvedValue(null); - - await service.start(); - - expect(mockRuntime.getSetting).toHaveBeenCalledWith('INSTAGRAM_POST_INTERVAL_MIN'); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('POST_INTERVAL_MIN'); - }); - - it('uses default intervals when no settings are available', async () => { - vi.mocked(mockRuntime.getSetting).mockReturnValue(null); - vi.mocked(mockRuntime.cacheManager.get).mockResolvedValue(null); - - await service.start(); - - expect(mockRuntime.getSetting).toHaveBeenCalledWith('INSTAGRAM_POST_INTERVAL_MIN'); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('POST_INTERVAL_MIN'); - // Default values should be used (60 and 120) - expect(elizaLogger.log).toHaveBeenCalledWith(expect.stringContaining('Post interval:')); - }); - }); - - describe('Image Generation', () => { - it('uses character image settings for generation', async () => { - vi.mocked(generateImage).mockResolvedValue({ - success: true, - data: ['data:image/png;base64,test123'], - }); - - await service['getOrGenerateImage']('test content'); - - expect(generateImage).toHaveBeenCalledWith( - expect.objectContaining({ - width: 1920, - height: 1080, - hideWatermark: true, - stylePreset: 'test-preset', - }), - mockRuntime - ); - }); - - it('uses default values when image settings are not provided', async () => { - // Ensure character and settings are defined before modifying - mockRuntime.character = { - ...mockRuntime.character, - settings: { - imageSettings: {} - } - }; - - vi.mocked(generateImage).mockResolvedValue({ - success: true, - data: ['data:image/png;base64,test123'], - }); - - await service['getOrGenerateImage']('test content'); - - expect(generateImage).toHaveBeenCalledWith( - expect.objectContaining({ - width: 1024, - height: 1024, - count: 1, - numIterations: 50, - guidanceScale: 7.5, - }), - mockRuntime - ); - }); - - it('handles image generation failure', async () => { - vi.mocked(generateImage).mockResolvedValue({ - success: false, - error: 'Generation failed', - }); - - await expect(service['getOrGenerateImage']('test content')).rejects.toThrow('Failed to generate image'); - }); - - it('saves generated image to temp directory', async () => { - vi.mocked(generateImage).mockResolvedValue({ - success: true, - data: ['data:image/png;base64,test123'], - }); - - await service['getOrGenerateImage']('test content'); - - expect(fs.mkdir).toHaveBeenCalledWith(expect.stringContaining('temp'), { recursive: true }); - expect(fs.writeFile).toHaveBeenCalledWith( - expect.stringContaining('instagram-post-'), - expect.any(Buffer) - ); - }); - - it('uses enhanced image generation settings when provided', async () => { - // Ensure character and settings are defined before modifying - mockRuntime.character = { - ...mockRuntime.character, - settings: { - imageSettings: { - width: 1920, - height: 1080, - hideWatermark: true, - stylePreset: 'test-preset', - negativePrompt: 'blurry, low quality', - numIterations: 30, - guidanceScale: 8.5, - seed: 12345, - cfgScale: 9, - safeMode: true - } - } - }; - - vi.mocked(generateImage).mockResolvedValue({ - success: true, - data: ['data:image/png;base64,test123'], - }); - - await service['getOrGenerateImage']('test content'); - - expect(generateImage).toHaveBeenCalledWith( - expect.objectContaining({ - width: 1920, - height: 1080, - hideWatermark: true, - stylePreset: 'test-preset', - negativePrompt: 'blurry, low quality', - numIterations: 30, - guidanceScale: 8.5, - seed: 12345, - cfgScale: 9, - safeMode: true - }), - mockRuntime - ); - }); - - it('handles partial enhanced image settings', async () => { - // Ensure character and settings are defined before modifying - mockRuntime.character = { - ...mockRuntime.character, - settings: { - imageSettings: { - width: 1920, - height: 1080, - negativePrompt: 'blurry', - seed: 12345 - } - } - }; - - vi.mocked(generateImage).mockResolvedValue({ - success: true, - data: ['data:image/png;base64,test123'], - }); - - await service['getOrGenerateImage']('test content'); - - expect(generateImage).toHaveBeenCalledWith( - expect.objectContaining({ - width: 1920, - height: 1080, - negativePrompt: 'blurry', - seed: 12345, - count: 1, - numIterations: 50, - guidanceScale: 7.5 - }), - mockRuntime - ); - }); - }); -}); diff --git a/packages/client-instagram/eslint.config.mjs b/packages/client-instagram/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/client-instagram/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/client-instagram/package.json b/packages/client-instagram/package.json deleted file mode 100644 index 0ea7faa3253cf..0000000000000 --- a/packages/client-instagram/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "@elizaos/client-instagram", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "instagram-private-api": "^1.45.3", - "sharp": "^0.33.2", - "glob": "11.0.0" - }, - "devDependencies": { - "tsup": "8.3.5", - "@types/sharp": "^0.32.0", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest" - } -} diff --git a/packages/client-instagram/src/environment.ts b/packages/client-instagram/src/environment.ts deleted file mode 100644 index fe6534a7dd1c0..0000000000000 --- a/packages/client-instagram/src/environment.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { - type IAgentRuntime, - parseBooleanFromText, -} from "@elizaos/core"; -import { z } from "zod"; - -export const DEFAULT_POST_INTERVAL_MIN = 60; -export const DEFAULT_POST_INTERVAL_MAX = 120; -export const DEFAULT_ACTION_INTERVAL = 5; -export const DEFAULT_MAX_ACTIONS = 1; - -// Define validation schemas for Instagram usernames and other fields -const instagramUsernameSchema = z - .string() - .min(1, "An Instagram Username must be at least 1 character long") - .max(30, "An Instagram Username cannot exceed 30 characters") - .refine((username) => { - // Instagram usernames can contain letters, numbers, periods, and underscores - return /^[A-Za-z0-9._]+$/.test(username); - }, "An Instagram Username can only contain letters, numbers, periods, and underscores"); - -/** - * Environment configuration schema for Instagram client - */ -export const instagramEnvSchema = z.object({ - INSTAGRAM_DRY_RUN: z.boolean(), - INSTAGRAM_USERNAME: instagramUsernameSchema, - INSTAGRAM_PASSWORD: z.string().min(1, "Instagram password is required"), - - // Instagram API credentials - INSTAGRAM_APP_ID: z.string().min(1, "Instagram App ID is required"), - INSTAGRAM_APP_SECRET: z.string().min(1, "Instagram App Secret is required"), - - // Optional Business Account ID for additional features - INSTAGRAM_BUSINESS_ACCOUNT_ID: z.string().optional(), - - // Posting configuration - INSTAGRAM_POST_INTERVAL_MIN: z.number().int().default(DEFAULT_POST_INTERVAL_MIN), - INSTAGRAM_POST_INTERVAL_MAX: z.number().int().default(DEFAULT_POST_INTERVAL_MAX), - - // Action processing configuration - INSTAGRAM_ENABLE_ACTION_PROCESSING: z.boolean().default(false), - INSTAGRAM_ACTION_INTERVAL: z.number().int().default(DEFAULT_ACTION_INTERVAL), - INSTAGRAM_MAX_ACTIONS: z.number().int().default(DEFAULT_MAX_ACTIONS), -}); - -export type InstagramConfig = z.infer; - -/** - * Validates and constructs an InstagramConfig object using zod, - * taking values from the IAgentRuntime or process.env as needed. - */ -export async function validateInstagramConfig( - runtime: IAgentRuntime -): Promise { - try { - const instagramConfig = { - INSTAGRAM_DRY_RUN: parseBooleanFromText( - runtime.getSetting("INSTAGRAM_DRY_RUN") || - process.env.INSTAGRAM_DRY_RUN - ) ?? false, - - INSTAGRAM_USERNAME: runtime.getSetting("INSTAGRAM_USERNAME") || - process.env.INSTAGRAM_USERNAME, - - INSTAGRAM_PASSWORD: runtime.getSetting("INSTAGRAM_PASSWORD") || - process.env.INSTAGRAM_PASSWORD, - - INSTAGRAM_APP_ID: runtime.getSetting("INSTAGRAM_APP_ID") || - process.env.INSTAGRAM_APP_ID, - - INSTAGRAM_APP_SECRET: runtime.getSetting("INSTAGRAM_APP_SECRET") || - process.env.INSTAGRAM_APP_SECRET, - - INSTAGRAM_BUSINESS_ACCOUNT_ID: runtime.getSetting("INSTAGRAM_BUSINESS_ACCOUNT_ID") || - process.env.INSTAGRAM_BUSINESS_ACCOUNT_ID, - - INSTAGRAM_POST_INTERVAL_MIN: Number.parseInt( - runtime.getSetting("INSTAGRAM_POST_INTERVAL_MIN") || - process.env.INSTAGRAM_POST_INTERVAL_MIN || - DEFAULT_POST_INTERVAL_MIN.toString(), - 10 - ), - - INSTAGRAM_POST_INTERVAL_MAX: Number.parseInt( - runtime.getSetting("INSTAGRAM_POST_INTERVAL_MAX") || - process.env.INSTAGRAM_POST_INTERVAL_MAX || - DEFAULT_POST_INTERVAL_MAX.toString(), - 10 - ), - - INSTAGRAM_ENABLE_ACTION_PROCESSING: parseBooleanFromText( - runtime.getSetting("INSTAGRAM_ENABLE_ACTION_PROCESSING") || - process.env.INSTAGRAM_ENABLE_ACTION_PROCESSING - ) ?? false, - - INSTAGRAM_ACTION_INTERVAL: Number.parseInt( - runtime.getSetting("INSTAGRAM_ACTION_INTERVAL") || - process.env.INSTAGRAM_ACTION_INTERVAL || - DEFAULT_ACTION_INTERVAL.toString(), - 10 - ), - - INSTAGRAM_MAX_ACTIONS: Number.parseInt( - runtime.getSetting("MAX_ACTIONS_PROCESSING") || - process.env.MAX_ACTIONS_PROCESSING || - DEFAULT_MAX_ACTIONS.toString(), - 10 - ), - }; - - return instagramEnvSchema.parse(instagramConfig); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Instagram configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} \ No newline at end of file diff --git a/packages/client-instagram/src/index.ts b/packages/client-instagram/src/index.ts deleted file mode 100644 index 1391ed817681d..0000000000000 --- a/packages/client-instagram/src/index.ts +++ /dev/null @@ -1,57 +0,0 @@ -// src/index.ts -import { type Client, type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { validateInstagramConfig } from "./environment"; -import { initializeClient } from "./lib/auth"; -import { InstagramInteractionService } from "./services/interaction"; -import { InstagramPostService } from "./services/post"; - -export const InstagramClientInterface: Client = { - async start(runtime: IAgentRuntime) { - try { - // Validate configuration - const config = await validateInstagramConfig(runtime); - elizaLogger.log("Instagram client configuration validated"); - - // Initialize client and get initial state - const state = await initializeClient(runtime, config); - elizaLogger.log("Instagram client initialized"); - - // Create services - const postService = new InstagramPostService(runtime, state); - const interactionService = new InstagramInteractionService( - runtime, - state - ); - - // Start services - if (!config.INSTAGRAM_DRY_RUN) { - await postService.start(); - elizaLogger.log("Instagram post service started"); - - if (config.INSTAGRAM_ENABLE_ACTION_PROCESSING) { - await interactionService.start(); - elizaLogger.log("Instagram interaction service started"); - } - } else { - elizaLogger.log("Instagram client running in dry-run mode"); - } - - // Return manager instance - return { - post: postService, - interaction: interactionService, - state, - }; - } catch (error) { - elizaLogger.error("Failed to start Instagram client:", error); - throw error; - } - }, - // eslint-disable-next-line - async stop(runtime: IAgentRuntime) { - elizaLogger.log("Stopping Instagram client services..."); - // Cleanup will be handled by the services themselves - }, -}; - -export default InstagramClientInterface; diff --git a/packages/client-instagram/src/lib/actions.ts b/packages/client-instagram/src/lib/actions.ts deleted file mode 100644 index 16e54887bd888..0000000000000 --- a/packages/client-instagram/src/lib/actions.ts +++ /dev/null @@ -1,168 +0,0 @@ -// src/lib/actions.ts -import { elizaLogger } from "@elizaos/core"; -import type { Comment } from "../types"; -import { getIgClient } from "./state"; - -/** - * Fetches comments for a specific media post - */ -export async function fetchComments( - mediaId: string, - count = 20 -): Promise { - const ig = getIgClient(); - - try { - const feed = ig.feed.mediaComments(mediaId); - const comments = await feed.items(); - - return comments.slice(0, count).map(comment => ({ - id: comment.pk.toString(), - text: comment.text, - timestamp: new Date(comment.created_at * 1000).toISOString(), - username: comment.user.username, - replies: [] // Instagram API doesn't provide replies in the same call - })); - } catch (error) { - elizaLogger.error('Error fetching comments:', error); - throw error; - } -} - -/** - * Posts a comment on a media post - */ -export async function postComment( - mediaId: string, - text: string -): Promise { - const ig = getIgClient(); - - try { - const result = await ig.media.comment({ - mediaId, - text: text.slice(0, 2200) // Instagram comment length limit - }); - - return { - id: result.pk.toString(), - text: result.text, - timestamp: new Date(result.created_at * 1000).toISOString(), - username: result.user.username, - replies: [] - }; - } catch (error) { - elizaLogger.error('Error posting comment:', error); - throw error; - } -} - -/** - * Likes a media post - */ -export async function likeMedia(mediaId: string): Promise { - const ig = getIgClient(); - - try { - await ig.media.like({ - mediaId, - moduleInfo: { - module_name: 'profile', - user_id: ig.state.cookieUserId, - username: ig.state.cookieUsername - } - }); - elizaLogger.log(`Liked media: ${mediaId}`); - } catch (error) { - elizaLogger.error('Error liking media:', error); - throw error; - } -} - -/** - * Unlikes a media post - */ -export async function unlikeMedia(mediaId: string): Promise { - const ig = getIgClient(); - - try { - await ig.media.unlike({ - mediaId, - moduleInfo: { - module_name: 'profile', - user_id: ig.state.cookieUserId, - username: ig.state.cookieUsername - } - }); - elizaLogger.log(`Unliked media: ${mediaId}`); - } catch (error) { - elizaLogger.error('Error unliking media:', error); - throw error; - } -} - -/** - * Replies to a comment - */ -export async function replyToComment( - mediaId: string, - commentId: string, - text: string -): Promise { - const ig = getIgClient(); - - try { - const result = await ig.media.comment({ - mediaId, - text: text.slice(0, 2200), // Instagram comment length limit - replyToCommentId: commentId - }); - - return { - id: result.pk.toString(), - text: result.text, - timestamp: new Date(result.created_at * 1000).toISOString(), - username: result.user.username, - replies: [] - }; - } catch (error) { - elizaLogger.error('Error replying to comment:', error); - throw error; - } -} - -/** - * Deletes a comment - */ -export async function deleteComment( - mediaId: string, - commentId: string -): Promise { - const ig = getIgClient(); - - try { - await ig.media.deleteComment({ - mediaId, - commentId - }); - elizaLogger.log(`Deleted comment: ${commentId} from media: ${mediaId}`); - } catch (error) { - elizaLogger.error('Error deleting comment:', error); - throw error; - } -} - -/** - * Checks if current user has liked a media post - */ -export async function hasLikedMedia(mediaId: string): Promise { - const ig = getIgClient(); - - try { - const info = await ig.media.info(mediaId); - return info.items[0].has_liked ?? false; - } catch (error) { - elizaLogger.error('Error checking if media is liked:', error); - throw error; - } -} \ No newline at end of file diff --git a/packages/client-instagram/src/lib/auth.ts b/packages/client-instagram/src/lib/auth.ts deleted file mode 100644 index 1f77564edbe32..0000000000000 --- a/packages/client-instagram/src/lib/auth.ts +++ /dev/null @@ -1,103 +0,0 @@ -// src/lib/auth.ts -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { IgLoginTwoFactorRequiredError } from "instagram-private-api"; -import type { InstagramConfig } from "../environment"; -import type { InstagramState } from "../types"; -import { fetchProfile } from "./profile"; -import { createInitialState, getIgClient } from "./state"; - -/** - * Authenticates with Instagram - */ -async function authenticate( - runtime: IAgentRuntime, - config: InstagramConfig -): Promise { - const ig = getIgClient(); - const state = createInitialState(); - - try { - // Generate device ID - ig.state.generateDevice(config.INSTAGRAM_USERNAME); - - // Attempt to load cached session - const cachedSession = - await runtime.cacheManager.get("instagram/session"); - if (cachedSession) { - try { - await ig.state.deserialize(cachedSession); - const profile = await fetchProfile(runtime, config); - return { - ...state, - isInitialized: true, - profile, - }; - } catch { - elizaLogger.warn( - `Cached session invalid, proceeding with fresh login` - ); - } - } - - // Proceed with fresh login - try { - await ig.account.login( - config.INSTAGRAM_USERNAME, - config.INSTAGRAM_PASSWORD - ); - - // Cache the session - const serialized = await ig.state.serialize(); - await runtime.cacheManager.set("instagram/session", serialized); - - const profile = await fetchProfile(runtime, config); - - return { - ...state, - isInitialized: true, - profile, - }; - } catch (error) { - if (error instanceof IgLoginTwoFactorRequiredError) { - // Handle 2FA if needed - would need to implement 2FA code generation - throw new Error("2FA authentication not yet implemented"); - } - throw error; - } - } catch (error) { - elizaLogger.error("Authentication failed:", error); - throw error; - } -} - -/** - * Sets up webhooks for real-time updates if needed - */ -async function setupWebhooks() { - // Implement webhook setup - // This is a placeholder for future implementation -} - -/** - * Initializes the Instagram client - */ -export async function initializeClient( - runtime: IAgentRuntime, - config: InstagramConfig -): Promise { - try { - // Authenticate and get initial state - const state = await authenticate(runtime, config); - - // Set up webhook handlers if needed - await setupWebhooks(); - - return state; - } catch (error) { - elizaLogger.error("Failed to initialize Instagram client:", error); - throw error; - } -} - -// Export other authentication related functions if needed -export { authenticate, setupWebhooks }; diff --git a/packages/client-instagram/src/lib/media.ts b/packages/client-instagram/src/lib/media.ts deleted file mode 100644 index 7a65c0c58c978..0000000000000 --- a/packages/client-instagram/src/lib/media.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { elizaLogger, type IAgentRuntime } from "@elizaos/core"; -import type { InstagramConfig } from "../environment"; -import type { MediaItem } from "../types"; -import { getIgClient } from "./state"; - -export async function fetchRecentMedia( - runtime: IAgentRuntime, - config: InstagramConfig, - count = 10 - ): Promise { - const ig = getIgClient(); - - try { - const feed = ig.feed.user(ig.state.cookieUserId); - const items = await feed.items(); - - return items.slice(0, count).map((item: any) => ({ - id: item.id, - mediaType: item.media_type as MediaItem['mediaType'], - mediaUrl: item.media_url, - thumbnailUrl: item.thumbnail_url || null, - permalink: item.permalink, - caption: item.caption?.text || null, - timestamp: item.timestamp, - children: item.children?.map((child: any) => ({ - id: child.id, - mediaType: child.media_type as MediaItem['mediaType'], - mediaUrl: child.media_url, - thumbnailUrl: child.thumbnail_url || null, - permalink: child.permalink, - timestamp: child.timestamp - })) || null - })); - } catch (error) { - elizaLogger.error('Error fetching recent media:', error); - throw error; - } - } \ No newline at end of file diff --git a/packages/client-instagram/src/lib/profile.ts b/packages/client-instagram/src/lib/profile.ts deleted file mode 100644 index b414f6bd6fffa..0000000000000 --- a/packages/client-instagram/src/lib/profile.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { elizaLogger, type IAgentRuntime } from "@elizaos/core"; -import type { InstagramConfig } from "../environment"; -import type { InstagramProfile } from "../types"; -import { getIgClient } from "./state"; - -export async function fetchProfile( - runtime: IAgentRuntime, - config: InstagramConfig - ): Promise { - const ig = getIgClient(); - - try { - const userInfo = await ig.user.info(ig.state.cookieUserId); - - const profile: InstagramProfile = { - id: userInfo.pk.toString(), - username: userInfo.username, - name: userInfo.full_name, - biography: userInfo.biography, - mediaCount: userInfo.media_count, - followerCount: userInfo.follower_count, - followingCount: userInfo.following_count - }; - - // Cache profile info - await runtime.cacheManager.set( - `instagram/profile/${config.INSTAGRAM_USERNAME}`, - profile - ); - - return profile; - } catch (error) { - elizaLogger.error('Error fetching profile:', error); - throw error; - } - } diff --git a/packages/client-instagram/src/lib/state.ts b/packages/client-instagram/src/lib/state.ts deleted file mode 100644 index 66b12add6a69a..0000000000000 --- a/packages/client-instagram/src/lib/state.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { IgApiClient } from 'instagram-private-api'; -import type { InstagramState } from '../types'; - -// Create a singleton for the Instagram API client -let igClient: IgApiClient | null = null; - -export const getIgClient = () => { - if (!igClient) { - igClient = new IgApiClient(); - } - return igClient; -}; - -// Create initial state -export const createInitialState = (): InstagramState => ({ - accessToken: null, - longLivedToken: null, - profile: null, - isInitialized: false, - lastCheckedMediaId: null, -}); \ No newline at end of file diff --git a/packages/client-instagram/src/services/interaction.ts b/packages/client-instagram/src/services/interaction.ts deleted file mode 100644 index 736c58942f3c5..0000000000000 --- a/packages/client-instagram/src/services/interaction.ts +++ /dev/null @@ -1,322 +0,0 @@ -import { - composeContext, - elizaLogger, - generateText, - getEmbeddingZeroVector, - type IAgentRuntime, - ModelClass, - stringToUuid, - type UUID -} from "@elizaos/core"; -import { fetchComments, likeMedia, postComment } from "../lib/actions"; -import { getIgClient } from "../lib/state"; -import type { InstagramState } from "../types"; - - // Templates - const instagramCommentTemplate = ` - # Areas of Expertise - {{knowledge}} - - # About {{agentName}} (@{{instagramUsername}}): - {{bio}} - {{lore}} - {{topics}} - - {{providers}} - - {{characterPostExamples}} - - {{postDirections}} - - # Task: Generate a response to the following Instagram comment in the voice and style of {{agentName}}. - Original Comment (@{{commentUsername}}): {{commentText}} - - Your response should be friendly, engaging, and natural. Keep it brief (1-2 sentences). - Do not use hashtags in comment responses. Be conversational and authentic.`; - - const shouldInteractTemplate = ` - # About {{agentName}} (@{{instagramUsername}}): - {{bio}} - {{lore}} - {{topics}} - - {{postDirections}} - - # Task: Determine if {{agentName}} should interact with this content: - Interaction Type: {{interactionType}} - User: @{{username}} - Content: {{content}} - - Consider: - 1. Is this user's content relevant to {{agentName}}'s interests? - 2. Would interaction be authentic and meaningful? - 3. Is there potential for valuable engagement? - - Respond with one of: - [INTERACT] - Content is highly relevant and engagement would be valuable - [SKIP] - Content is not relevant enough or engagement wouldn't be authentic - - Choose [INTERACT] only if very confident about relevance and value.`; - - export class InstagramInteractionService { - private runtime: IAgentRuntime; - private state: InstagramState; - private isProcessing = false; - private stopProcessing = false; - - constructor(runtime: IAgentRuntime, state: InstagramState) { - this.runtime = runtime; - this.state = state; - } - - async start() { - const handleInteractionsLoop = () => { - this.handleInteractions(); - if (!this.stopProcessing) { - setTimeout( - handleInteractionsLoop, - Number.parseInt(this.runtime.getSetting('ACTION_INTERVAL') || '300', 10) * 1000 - ); - } - }; - - handleInteractionsLoop(); - } - - async stop() { - this.stopProcessing = true; - } - - private async generateResponse( - text: string, - username: string, - action: string - ) { - const state = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId: stringToUuid(`instagram-temp-${Date.now()}-${this.runtime.agentId}`), - agentId: this.runtime.agentId, - content: { - text, - action, - }, - }, - { - instagramUsername: this.state.profile?.username, - commentUsername: username, - commentText: text, - } - ); - - const context = composeContext({ - state, - template: instagramCommentTemplate, - }); - - const response = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - return this.cleanResponse(response); - } - - private cleanResponse(response: string): string { - return response - .replace(/^\s*{?\s*"text":\s*"|"\s*}?\s*$/g, "") - .replace(/^['"](.*)['"]$/g, "$1") - .replace(/\\"/g, '"') - .trim(); - } - - private async handleInteractions() { - if (this.isProcessing) { - elizaLogger.log("Already processing interactions, skipping"); - return; - } - - try { - this.isProcessing = true; - elizaLogger.log("Checking Instagram interactions"); - - const ig = getIgClient(); - const activity = await ig.feed.news().items(); - - for (const item of activity) { - const activityId = `instagram-activity-${item.pk}`; - if (await this.runtime.cacheManager.get(activityId)) continue; - - switch (item.type) { - case 2: // Comment on your post - await this.handleComment(item); - break; - case 3: // Like on your post - await this.handleLike(item); - break; - case 12: // Mention in comment - await this.handleMention(item); - break; - } - - await this.runtime.cacheManager.set(activityId, true); - } - } catch (error) { - elizaLogger.error("Error handling Instagram interactions:", error); - } finally { - this.isProcessing = false; - } - } - - private async handleComment(item: any) { - try { - const comments = await fetchComments(item.media_id); - const comment = comments.find(c => c.id === item.pk.toString()); - if (!comment) return; - - const roomId = stringToUuid(`instagram-comment-${item.media_id}-${this.runtime.agentId}`); - const commentId = stringToUuid(`instagram-comment-${comment.id}-${this.runtime.agentId}`); - const userId = stringToUuid(`instagram-user-${item.user_id}-${this.runtime.agentId}`); - - const cleanedResponse = await this.generateResponse( - comment.text, - comment.username, - "COMMENT" - ); - - if (!cleanedResponse) { - elizaLogger.error("Failed to generate valid comment response"); - return; - } - - await this.ensureEntities(roomId, userId, comment.username); - await this.createInteractionMemories( - commentId, - userId, - roomId, - comment, - cleanedResponse, - item.media_id - ); - - } catch (error) { - elizaLogger.error("Error handling comment:", error); - } - } - - private async handleLike(item: any) { - try { - const state = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId: stringToUuid(`instagram-like-${item.media_id}-${this.runtime.agentId}`), - agentId: this.runtime.agentId, - content: { text: "", action: "DECIDE_INTERACTION" }, - }, - { - instagramUsername: this.state.profile?.username, - interactionType: "like", - username: item.user?.username, - content: item.text || "", - } - ); - - const context = composeContext({ state, template: shouldInteractTemplate }); - const decision = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - if (decision.includes("[INTERACT]")) { - const userFeed = await getIgClient().feed.user(item.user_id).items(); - if (userFeed.length > 0) { - await likeMedia(userFeed[0].id); - elizaLogger.log(`Liked post from user: ${item.user?.username}`); - } - } - } catch (error) { - elizaLogger.error("Error handling like:", error); - } - } - - private async handleMention(item: any) { - try { - const roomId = stringToUuid(`instagram-mention-${item.media_id}-${this.runtime.agentId}`); - const mentionId = stringToUuid(`instagram-mention-${item.pk}-${this.runtime.agentId}`); - const userId = stringToUuid(`instagram-user-${item.user.pk}-${this.runtime.agentId}`); - - const cleanedResponse = await this.generateResponse( - item.text, - item.user.username, - "MENTION" - ); - - if (!cleanedResponse) { - elizaLogger.error("Failed to generate valid mention response"); - return; - } - - await this.ensureEntities(roomId, userId, item.user.username); - await this.createInteractionMemories( - mentionId, - userId, - roomId, - item, - cleanedResponse, - item.media_id - ); - - } catch (error) { - elizaLogger.error("Error handling mention:", error); - } - } - - private async ensureEntities(roomId: UUID, userId: UUID, username: string) { - await this.runtime.ensureRoomExists(roomId); - await this.runtime.ensureUserExists(userId, username, username, "instagram"); - await this.runtime.ensureParticipantInRoom(this.runtime.agentId, roomId); - } - - private async createInteractionMemories( - originalId: UUID, - userId: UUID, - roomId: UUID, - originalItem: any, - response: string, - mediaId: string - ) { - // Create memory of original interaction - await this.runtime.messageManager.createMemory({ - id: originalId, - userId, - agentId: this.runtime.agentId, - content: { - text: originalItem.text, - source: "instagram", - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: new Date(originalItem.timestamp || originalItem.created_at * 1000).getTime(), - }); - - // Post response - const postedComment = await postComment(mediaId, response); - - // Create memory of our response - await this.runtime.messageManager.createMemory({ - id: stringToUuid(`instagram-reply-${postedComment.id}-${this.runtime.agentId}`), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - text: response, - source: "instagram", - inReplyTo: originalId - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }); - } - } \ No newline at end of file diff --git a/packages/client-instagram/src/services/post.ts b/packages/client-instagram/src/services/post.ts deleted file mode 100644 index 0a434a7c68ded..0000000000000 --- a/packages/client-instagram/src/services/post.ts +++ /dev/null @@ -1,419 +0,0 @@ -// src/services/post.ts -import { - type IAgentRuntime, - ModelClass, - composeContext, - elizaLogger, - generateImage, - generateText, - getEmbeddingZeroVector, - stringToUuid, -} from "@elizaos/core"; -import { promises as fs } from "fs"; -import path from "path"; -import sharp from "sharp"; -import { getIgClient } from "../lib/state"; -import type { InstagramState } from "../types"; - -// Template for generating Instagram posts -const instagramPostTemplate = ` -# Areas of Expertise -{{knowledge}} - -# About {{agentName}} (@{{instagramUsername}}): -{{bio}} -{{lore}} -{{topics}} - -{{providers}} - -{{characterPostExamples}} - -{{postDirections}} - -# Task: Generate a post in the voice and style and perspective of {{agentName}}. -Write a post that is {{adjective}} about {{topic}} (without mentioning {{topic}} directly), from the perspective of {{agentName}}. -Your response should be 1-3 sentences (choose the length at random). -Your response should not contain any questions. Brief, concise statements only. -Add up to 3 relevant hashtags at the end.`; - -interface PostOptions { - media: Array<{ - type: "IMAGE" | "VIDEO" | "CAROUSEL"; - url: string; - }>; - caption?: string; -} - -export class InstagramPostService { - private runtime: IAgentRuntime; - private state: InstagramState; - private isProcessing = false; - private lastPostTime = 0; - private stopProcessing = false; - - constructor(runtime: IAgentRuntime, state: InstagramState) { - this.runtime = runtime; - this.state = state; - } - - async start() { - const generatePostLoop = async () => { - const lastPost = await this.runtime.cacheManager.get<{ - timestamp: number; - }>("instagram/lastPost"); - - const lastPostTimestamp = lastPost?.timestamp ?? 0; - const minMinutes = Number.parseInt( - this.runtime.getSetting("INSTAGRAM_POST_INTERVAL_MIN") || this.runtime.getSetting("POST_INTERVAL_MIN") || "90", - 10 - ); - const maxMinutes = Number.parseInt( - this.runtime.getSetting("INSTAGRAM_POST_INTERVAL_MAX") || this.runtime.getSetting("POST_INTERVAL_MAX") || "180", - 10 - ); - const randomMinutes = - Math.floor(Math.random() * (maxMinutes - minMinutes + 1)) + - minMinutes; - const delay = randomMinutes * 60 * 1000; - - if (Date.now() > lastPostTimestamp + delay) { - await this.generateNewPost(); - } - - if (!this.stopProcessing) { - setTimeout(generatePostLoop, delay); - } - - elizaLogger.log( - `Next Instagram post scheduled in ${randomMinutes} minutes` - ); - }; - - // Start the loop - generatePostLoop(); - } - - async stop() { - this.stopProcessing = true; - } - - private async generateNewPost() { - try { - elizaLogger.log("Generating new Instagram post"); - - const roomId = stringToUuid( - "instagram_generate_room-" + this.state.profile?.username - ); - - await this.runtime.ensureUserExists( - this.runtime.agentId, - this.state.profile?.username || "", - this.runtime.character.name, - "instagram" - ); - - const topics = this.runtime.character.topics.join(", "); - - const state = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId: roomId, - agentId: this.runtime.agentId, - content: { - text: topics || "", - action: "POST", - }, - }, - { - instagramUsername: this.state.profile?.username, - } - ); - - const context = composeContext({ - state, - // TODO: Add back in when we have a template for Instagram on character - //template: this.runtime.character.templates?.instagramPostTemplate || instagramPostTemplate, - template: instagramPostTemplate, - }); - - elizaLogger.debug("generate post prompt:\n" + context); - - const content = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - // Clean the generated content - let cleanedContent = ""; - - // Try parsing as JSON first - try { - const parsedResponse = JSON.parse(content); - if (parsedResponse.text) { - cleanedContent = parsedResponse.text; - } else if (typeof parsedResponse === "string") { - cleanedContent = parsedResponse; - } - } catch { - // If not JSON, clean the raw content - cleanedContent = content - .replace(/^\s*{?\s*"text":\s*"|"\s*}?\s*$/g, "") // Remove JSON-like wrapper - .replace(/^['"](.*)['"]$/g, "$1") // Remove quotes - .replace(/\\"/g, '"') // Unescape quotes - .replace(/\\n/g, "\n\n") // Unescape newlines - .trim(); - } - - if (!cleanedContent) { - elizaLogger.error( - "Failed to extract valid content from response:", - { - rawResponse: content, - attempted: "JSON parsing", - } - ); - return; - } - - // For Instagram, we need to generate or get an image - const mediaUrl = await this.getOrGenerateImage(cleanedContent); - - await this.createPost({ - media: [ - { - type: "IMAGE", - url: mediaUrl, - }, - ], - caption: cleanedContent, - }); - - // Create memory of the post - await this.runtime.messageManager.createMemory({ - id: stringToUuid(`instagram-post-${Date.now()}`), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - text: cleanedContent, - source: "instagram", - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }); - } catch (error) { - elizaLogger.error("Error generating Instagram post:", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - phase: "generateNewPost", - }); - } - } - - // Placeholder - implement actual image generation/selection - private async getOrGenerateImage(content: string): Promise { - try { - elizaLogger.log("Generating image for Instagram post"); - - let imageSettings = this.runtime.character.settings.imageSettings || {}; - - const result = await generateImage( - { - prompt: content, - width: imageSettings?.width || 1024, - height: imageSettings?.height || 1024, - count: imageSettings?.count || 1, -negativePrompt: imageSettings?.negativePrompt || null, - numIterations: imageSettings?.numIterations || 50, - guidanceScale: imageSettings?.guidanceScale || 7.5, -seed: imageSettings?.seed || null, - modelId: imageSettings?.modelId || null, - jobId: imageSettings?.jobId || null, - stylePreset: imageSettings?.stylePreset || "", - hideWatermark: imageSettings?.hideWatermark ?? true, - safeMode: imageSettings?.safeMode ?? true, - cfgScale: imageSettings?.cfgScale || null, - }, - this.runtime - ); - - if (!result.success || !result.data || result.data.length === 0) { - throw new Error( - "Failed to generate image: " + - (result.error || "No image data returned") - ); - } - - // Save the base64 image to a temporary file - const imageData = result.data[0].replace( - /^data:image\/\w+;base64,/, - "" - ); - const tempDir = path.resolve(process.cwd(), "temp"); - await fs.mkdir(tempDir, { recursive: true }); - const tempFile = path.join( - tempDir, - `instagram-post-${Date.now()}.png` - ); - await fs.writeFile(tempFile, Buffer.from(imageData, "base64")); - - return tempFile; - } catch { - // If not JSON, clean the raw content - cleanedContent = content - .replace(/^\s*{?\s*"text":\s*"|"\s*}?\s*$/g, "") // Remove JSON-like wrapper - .replace(/^['"](.*)['"]$/g, "$1") // Remove quotes - .replace(/\\"/g, '"') // Unescape quotes - .replace(/\\n/g, "\n\n") // Unescape newlines - .trim(); - } - - if (!cleanedContent) { - elizaLogger.error("Failed to extract valid content from response:", { - rawResponse: content, - attempted: "JSON parsing", - }); - return; - } - - // For Instagram, we need to generate or get an image - const mediaUrl = await this.getOrGenerateImage(cleanedContent); - - await this.createPost({ - media: [{ - type: 'IMAGE', - url: mediaUrl - }], - caption: cleanedContent - }); - - // Create memory of the post - await this.runtime.messageManager.createMemory({ - id: stringToUuid(`instagram-post-${Date.now()}`), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - text: cleanedContent, - source: "instagram", - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }); - - } catch (error) { - elizaLogger.error("Error generating Instagram post:", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - phase: 'generateNewPost' - }); - } - - async createPost(options: PostOptions) { - const ig = getIgClient(); - - try { - elizaLogger.log("Creating Instagram post", { - mediaCount: options.media.length, - hasCaption: !!options.caption, - }); - - // Process media - const processedMedia = await Promise.all( - options.media.map(async (media) => { - const buffer = await this.processMedia(media); - return { - ...media, - buffer, - }; - }) - ); - - // Handle different post types - if (processedMedia.length > 1) { - // Create carousel post - await ig.publish.album({ - items: processedMedia.map((media) => ({ - file: media.buffer, - caption: options.caption, - })), - }); - } else { - // Single image/video post - const media = processedMedia[0]; - if (media.type === "VIDEO") { - await ig.publish.video({ - video: media.buffer, - caption: options.caption, - coverImage: media.buffer, - }); - } else { - await ig.publish.photo({ - file: media.buffer, - caption: options.caption, - }); - } - } - - // Update last post time - this.lastPostTime = Date.now(); - await this.runtime.cacheManager.set("instagram/lastPost", { - timestamp: this.lastPostTime, - }); - - elizaLogger.log("Instagram post created successfully"); - } catch (error) { - elizaLogger.error("Error creating Instagram post:", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - phase: "createPost", - mediaCount: options.media.length, - hasCaption: !!options.caption, - }); - throw error; - } - } - - private async processMedia(media: { - type: string; - url: string; - }): Promise { - try { - elizaLogger.log("Processing media", { - type: media.type, - url: media.url, - }); - - // Read file directly from filesystem instead of using fetch - const buffer = await fs.readFile(media.url); - - if (media.type === "IMAGE") { - // Process image with sharp - return await sharp(buffer) - .resize(1080, 1080, { - fit: "inside", - withoutEnlargement: true, - }) - .jpeg({ - quality: 85, - progressive: true, - }) - .toBuffer(); - } - - // For other types, return original buffer - return buffer; - } catch (error) { - elizaLogger.error("Error processing media:", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - phase: "processMedia", - mediaType: media.type, - url: media.url, - }); - throw error; - } - } -} diff --git a/packages/client-instagram/src/types.ts b/packages/client-instagram/src/types.ts deleted file mode 100644 index e34a3a8c24cdc..0000000000000 --- a/packages/client-instagram/src/types.ts +++ /dev/null @@ -1,37 +0,0 @@ - -export interface InstagramState { - accessToken: string | null; - longLivedToken: string | null; - profile: InstagramProfile | null; - isInitialized: boolean; - lastCheckedMediaId: string | null; -} - -export interface InstagramProfile { - id: string; - username: string; - name: string; - biography: string; - mediaCount: number; - followerCount: number; - followingCount: number; -} - -export interface MediaItem { - id: string; - mediaType: 'IMAGE' | 'VIDEO' | 'CAROUSEL_ALBUM'; - mediaUrl: string; - thumbnailUrl?: string; - permalink: string; - caption?: string; - timestamp: string; - children?: MediaItem[]; -} - -export interface Comment { - id: string; - text: string; - timestamp: string; - username: string; - replies?: Comment[]; -} \ No newline at end of file diff --git a/packages/client-instagram/tsconfig.json b/packages/client-instagram/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/client-instagram/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/client-instagram/tsup.config.ts b/packages/client-instagram/tsup.config.ts deleted file mode 100644 index 8cba0c208915d..0000000000000 --- a/packages/client-instagram/tsup.config.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: [ - "sharp", - "fs", - "path", - "instagram-private-api", - // Add other externals as needed - ], -}); \ No newline at end of file diff --git a/packages/client-lens/__tests__/client.test.ts b/packages/client-lens/__tests__/client.test.ts deleted file mode 100644 index fea813db75939..0000000000000 --- a/packages/client-lens/__tests__/client.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { LensClient } from '../src/client'; -import { LensClient as LensClientCore, LimitType, PublicationType } from '@lens-protocol/client'; - -// Mock dependencies -vi.mock('@lens-protocol/client', async () => { - const actual = await vi.importActual('@lens-protocol/client'); - return { - ...actual, - LensClient: vi.fn().mockImplementation(() => ({ - authentication: { - generateChallenge: vi.fn().mockResolvedValue({ id: 'challenge-id', text: 'challenge-text' }), - authenticate: vi.fn().mockResolvedValue({ accessToken: 'mock-token', refreshToken: 'mock-refresh' }) - }, - profile: { - fetch: vi.fn().mockResolvedValue({ - id: '0x01', - handle: { localName: 'test.lens' }, - metadata: { - displayName: 'Test User', - bio: 'Test bio', - picture: { - uri: 'https://example.com/pic-raw.jpg' - } - } - }) - }, - publication: { - fetchAll: vi.fn().mockResolvedValue({ - items: [ - { - id: 'pub-1', - metadata: { content: 'Test post' }, - stats: { reactions: 10 } - } - ] - }) - } - })) - }; -}); - -describe('LensClient', () => { - let client: LensClient; - const mockRuntime = { - name: 'test-runtime', - memory: new Map(), - getMemory: vi.fn(), - setMemory: vi.fn(), - clearMemory: vi.fn() - }; - const mockAccount = { - address: '0x123' as `0x${string}`, - privateKey: '0xabc' as `0x${string}`, - signMessage: vi.fn().mockResolvedValue('signed-message'), - signTypedData: vi.fn() - }; - - beforeEach(() => { - vi.clearAllMocks(); - client = new LensClient({ - runtime: mockRuntime, - cache: new Map(), - account: mockAccount, - profileId: '0x01' as `0x${string}` - }); - }); - - describe('authenticate', () => { - it('should authenticate successfully', async () => { - await client.authenticate(); - expect(client['authenticated']).toBe(true); - expect(client['core'].authentication.generateChallenge).toHaveBeenCalledWith({ - signedBy: mockAccount.address, - for: '0x01' - }); - expect(mockAccount.signMessage).toHaveBeenCalledWith({ message: 'challenge-text' }); - }); - - it('should handle authentication errors', async () => { - const mockError = new Error('Auth failed'); - vi.mocked(client['core'].authentication.generateChallenge).mockRejectedValueOnce(mockError); - - await expect(client.authenticate()).rejects.toThrow('Auth failed'); - expect(client['authenticated']).toBe(false); - }); - }); - - describe('getPublicationsFor', () => { - it('should fetch publications successfully', async () => { - const publications = await client.getPublicationsFor('0x123'); - expect(publications).toHaveLength(1); - expect(publications[0].id).toBe('pub-1'); - expect(client['core'].publication.fetchAll).toHaveBeenCalledWith({ - limit: LimitType.Fifty, - where: { - from: ['0x123'], - publicationTypes: [PublicationType.Post] - } - }); - }); - - it('should handle fetch errors', async () => { - vi.mocked(client['core'].publication.fetchAll).mockRejectedValueOnce(new Error('Fetch failed')); - await expect(client.getPublicationsFor('0x123')).rejects.toThrow('Fetch failed'); - }); - }); - - describe('getProfile', () => { - it('should fetch profile successfully', async () => { - const profile = await client.getProfile('0x123'); - expect(profile).toBeDefined(); - expect(profile.id).toBe('0x01'); - expect(profile.handle).toBe('test.lens'); - expect(profile.pfp).toBe('https://example.com/pic-raw.jpg'); - expect(client['core'].profile.fetch).toHaveBeenCalledWith({ forProfileId: '0x123' }); - }); - - it('should handle profile fetch errors', async () => { - vi.mocked(client['core'].profile.fetch).mockRejectedValueOnce(new Error('Profile fetch failed')); - await expect(client.getProfile('0x123')).rejects.toThrow('Profile fetch failed'); - }); - }); -}); diff --git a/packages/client-lens/__tests__/interactions.test.ts b/packages/client-lens/__tests__/interactions.test.ts deleted file mode 100644 index 83ad995766787..0000000000000 --- a/packages/client-lens/__tests__/interactions.test.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { createTestInteraction, handleTestInteraction } from './test-utils'; -import { LensClient } from '../src/client'; -import type { AnyPublicationFragment, ProfileFragment } from '@lens-protocol/client'; - -// Mock LensClient -vi.mock('../src/client', () => ({ - LensClient: vi.fn().mockImplementation(() => ({ - authenticate: vi.fn().mockResolvedValue(undefined), - mirror: vi.fn().mockResolvedValue({ id: 'mirror-1' }), - comment: vi.fn().mockResolvedValue({ id: 'comment-1' }), - like: vi.fn().mockResolvedValue({ id: 'like-1' }), - follow: vi.fn().mockResolvedValue({ id: 'follow-1' }) - })) -})); - -describe('Interactions', () => { - const mockPublication = { - id: 'pub-1', - metadata: { - content: 'Test publication' - }, - stats: { - totalAmountOfMirrors: 5, - totalAmountOfComments: 3, - totalUpvotes: 10 - } - } as unknown as AnyPublicationFragment; - - const mockProfile = { - id: '0x01', - handle: 'test.lens', - stats: { - totalFollowers: 100, - totalFollowing: 50 - } - } as unknown as ProfileFragment; - - describe('createTestInteraction', () => { - it('should create mirror interaction when conditions are met', () => { - const interaction = createTestInteraction(mockPublication, mockProfile); - expect(interaction).toBeDefined(); - if (interaction) { - expect(['MIRROR', 'COMMENT', 'LIKE', 'FOLLOW']).toContain(interaction.type); - } - }); - - it('should return null when no interaction is needed', () => { - const lowStatsPublication = { - ...mockPublication, - stats: { - totalAmountOfMirrors: 0, - totalAmountOfComments: 0, - totalUpvotes: 0 - } - } as unknown as AnyPublicationFragment; - const interaction = createTestInteraction(lowStatsPublication, mockProfile); - expect(interaction).toBeNull(); - }); - }); - - describe('handleTestInteraction', () => { - let client: LensClient; - - beforeEach(() => { - vi.clearAllMocks(); - client = new LensClient({ - runtime: { - name: 'test-runtime', - memory: new Map(), - getMemory: vi.fn(), - setMemory: vi.fn(), - clearMemory: vi.fn() - }, - cache: new Map(), - account: { - address: '0x123' as `0x${string}`, - privateKey: '0xabc' as `0x${string}`, - signMessage: vi.fn(), - signTypedData: vi.fn() - }, - profileId: '0x01' as `0x${string}` - }); - }); - - it('should handle mirror interaction successfully', async () => { - const interaction = { - type: 'MIRROR' as const, - publicationId: 'pub-1' - }; - - const result = await handleTestInteraction(client, interaction); - expect(result).toBeDefined(); - expect(result.id).toBe('mirror-1'); - expect(client.mirror).toHaveBeenCalledWith('pub-1'); - }); - - it('should handle comment interaction successfully', async () => { - const interaction = { - type: 'COMMENT' as const, - publicationId: 'pub-1', - content: 'Test comment' - }; - - const result = await handleTestInteraction(client, interaction); - expect(result).toBeDefined(); - expect(result.id).toBe('comment-1'); - expect(client.comment).toHaveBeenCalledWith('pub-1', 'Test comment'); - }); - - it('should handle interaction errors', async () => { - const interaction = { - type: 'MIRROR' as const, - publicationId: 'pub-1' - }; - - vi.mocked(client.mirror).mockRejectedValueOnce(new Error('Mirror failed')); - await expect(handleTestInteraction(client, interaction)).rejects.toThrow('Mirror failed'); - }); - }); -}); diff --git a/packages/client-lens/__tests__/post.test.ts b/packages/client-lens/__tests__/post.test.ts deleted file mode 100644 index 5c1e6c45b3d20..0000000000000 --- a/packages/client-lens/__tests__/post.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { createTestPost } from './test-utils'; -import { LensClient } from '../src/client'; - -// Mock dependencies -vi.mock('../src/client', () => ({ - LensClient: vi.fn().mockImplementation(() => ({ - authenticate: vi.fn().mockResolvedValue(undefined), - post: vi.fn().mockResolvedValue({ id: 'post-1' }) - })) -})); - -describe('Post Functions', () => { - let client: LensClient; - - beforeEach(() => { - vi.clearAllMocks(); - client = new LensClient({ - runtime: { - name: 'test-runtime', - memory: new Map(), - getMemory: vi.fn(), - setMemory: vi.fn(), - clearMemory: vi.fn() - }, - cache: new Map(), - account: { - address: '0x123' as `0x${string}`, - privateKey: '0xabc' as `0x${string}`, - signMessage: vi.fn(), - signTypedData: vi.fn() - }, - profileId: '0x01' as `0x${string}` - }); - }); - - describe('createTestPost', () => { - it('should create a post successfully', async () => { - const content = 'Test post content'; - const result = await createTestPost(client, content); - - expect(result).toBeDefined(); - expect(result.id).toBe('post-1'); - expect(client.post).toHaveBeenCalledWith(content); - }); - - it('should handle post creation errors', async () => { - const content = 'Test post content'; - vi.mocked(client.post).mockRejectedValueOnce(new Error('Post creation failed')); - - await expect(createTestPost(client, content)).rejects.toThrow('Post creation failed'); - }); - - it('should handle empty content', async () => { - const content = ''; - await expect(createTestPost(client, content)).rejects.toThrow('Post content cannot be empty'); - }); - - it('should handle very long content', async () => { - const content = 'a'.repeat(5001); // Assuming max length is 5000 - await expect(createTestPost(client, content)).rejects.toThrow('Post content too long'); - }); - }); -}); diff --git a/packages/client-lens/__tests__/test-utils.ts b/packages/client-lens/__tests__/test-utils.ts deleted file mode 100644 index 0af8a0941fda1..0000000000000 --- a/packages/client-lens/__tests__/test-utils.ts +++ /dev/null @@ -1,63 +0,0 @@ -import type { AnyPublicationFragment } from "@lens-protocol/client"; -import type { LensClient } from "../src/client"; -import type { Profile } from "../src/types"; - -export interface TestInteraction { - type: 'MIRROR' | 'COMMENT' | 'LIKE' | 'FOLLOW'; - publicationId?: string; - content?: string; -} - -export function createTestInteraction(publication: AnyPublicationFragment, profile: Profile): TestInteraction | null { - const stats = publication.stats; - - // Simple heuristic: if the publication has good engagement, mirror it - if (stats.totalAmountOfMirrors > 3 || stats.totalAmountOfComments > 2 || stats.totalUpvotes > 5) { - return { - type: 'MIRROR', - publicationId: publication.id - }; - } - - // If the publication is engaging but not viral, comment on it - if (stats.totalAmountOfComments > 0 || stats.totalUpvotes > 2) { - return { - type: 'COMMENT', - publicationId: publication.id, - content: 'Interesting perspective!' - }; - } - - return null; -} - -export async function handleTestInteraction(client: LensClient, interaction: TestInteraction) { - switch (interaction.type) { - case 'MIRROR': - if (!interaction.publicationId) throw new Error('Publication ID required for mirror'); - return await client.mirror(interaction.publicationId); - case 'COMMENT': - if (!interaction.publicationId || !interaction.content) { - throw new Error('Publication ID and content required for comment'); - } - return await client.comment(interaction.publicationId, interaction.content); - case 'LIKE': - if (!interaction.publicationId) throw new Error('Publication ID required for like'); - return await client.like(interaction.publicationId); - case 'FOLLOW': - if (!interaction.publicationId) throw new Error('Profile ID required for follow'); - return await client.follow(interaction.publicationId); - default: - throw new Error('Unknown interaction type'); - } -} - -export async function createTestPost(client: LensClient, content: string) { - if (!content) { - throw new Error('Post content cannot be empty'); - } - if (content.length > 5000) { - throw new Error('Post content too long'); - } - return await client.post(content); -} diff --git a/packages/client-lens/eslint.config.mjs b/packages/client-lens/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/client-lens/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/client-lens/package.json b/packages/client-lens/package.json deleted file mode 100644 index d08e3b333256b..0000000000000 --- a/packages/client-lens/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "@elizaos/client-lens", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint --fix --cache .", - "test": "vitest run", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage" - }, - "dependencies": { - "@elizaos/core": "workspace:*", - "@lens-protocol/client": "2.2.0", - "@lens-protocol/metadata": "1.2.0", - "axios": "^1.7.9" - }, - "devDependencies": { - "tsup": "^8.3.5", - "vitest": "^3.0.0", - "@vitest/coverage-v8": "^1.2.1" - }, - "peerDependencies": { - "@elizaos/core": "workspace:*" - } -} diff --git a/packages/client-lens/src/actions.ts b/packages/client-lens/src/actions.ts deleted file mode 100644 index 0d359be4a8443..0000000000000 --- a/packages/client-lens/src/actions.ts +++ /dev/null @@ -1,50 +0,0 @@ -import type { LensClient } from "./client"; -import type { - Content, - IAgentRuntime, - Memory, - UUID, -} from "@elizaos/core"; -import { textOnly } from "@lens-protocol/metadata"; -import { createPublicationMemory } from "./memory"; -import type { AnyPublicationFragment } from "@lens-protocol/client"; -import type StorjProvider from "./providers/StorjProvider"; - -export async function sendPublication({ - client, - runtime, - content, - roomId, - commentOn, - ipfs, -}: { - client: LensClient; - runtime: IAgentRuntime; - content: Content; - roomId: UUID; - commentOn?: string; - ipfs: StorjProvider; -}): Promise<{ memory?: Memory; publication?: AnyPublicationFragment }> { - // TODO: arweave provider for content hosting - const metadata = textOnly({ content: content.text }); - const contentURI = await ipfs.pinJson(metadata); - - const publication = await client.createPublication( - contentURI, - false, // TODO: support collectable settings - commentOn - ); - - if (publication) { - return { - publication, - memory: createPublicationMemory({ - roomId, - runtime, - publication: publication as AnyPublicationFragment, - }), - }; - } - - return {}; -} diff --git a/packages/client-lens/src/client.ts b/packages/client-lens/src/client.ts deleted file mode 100644 index 511200c23f9c3..0000000000000 --- a/packages/client-lens/src/client.ts +++ /dev/null @@ -1,418 +0,0 @@ -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { - type AnyPublicationFragment, - LensClient as LensClientCore, - production, - LensTransactionStatusType, - LimitType, - NotificationType, - type ProfileFragment, - PublicationType, - FeedEventItemType, -} from "@lens-protocol/client"; -import type { Profile, BroadcastResult } from "./types"; -import type { PrivateKeyAccount } from "viem"; -import { getProfilePictureUri, handleBroadcastResult, omit } from "./utils"; - -export class LensClient { - runtime: IAgentRuntime; - account: PrivateKeyAccount; - cache: Map; - lastInteractionTimestamp: Date; - profileId: `0x${string}`; - - private authenticated: boolean; - private authenticatedProfile: ProfileFragment | null; - private core: LensClientCore; - - constructor(opts: { - runtime: IAgentRuntime; - cache: Map; - account: PrivateKeyAccount; - profileId: `0x${string}`; - }) { - this.cache = opts.cache; - this.runtime = opts.runtime; - this.account = opts.account; - this.core = new LensClientCore({ - environment: production, - }); - this.lastInteractionTimestamp = new Date(); - this.profileId = opts.profileId; - this.authenticated = false; - this.authenticatedProfile = null; - } - - async authenticate(): Promise { - try { - const { id, text } = - await this.core.authentication.generateChallenge({ - signedBy: this.account.address, - for: this.profileId, - }); - - const signature = await this.account.signMessage({ - message: text, - }); - - await this.core.authentication.authenticate({ id, signature }); - this.authenticatedProfile = await this.core.profile.fetch({ - forProfileId: this.profileId, - }); - - this.authenticated = true; - } catch (error) { - elizaLogger.error("client-lens::client error: ", error); - throw error; - } - } - - async createPublication( - contentURI: string, - onchain = false, - commentOn?: string - ): Promise { - try { - if (!this.authenticated) { - await this.authenticate(); - elizaLogger.log("done authenticating"); - } - let broadcastResult; - - if (commentOn) { - broadcastResult = onchain - ? await this.createCommentOnchain(contentURI, commentOn) - : await this.createCommentMomoka(contentURI, commentOn); - } else { - broadcastResult = onchain - ? await this.createPostOnchain(contentURI) - : await this.createPostMomoka(contentURI); - } - - elizaLogger.log("broadcastResult", broadcastResult); - - if (broadcastResult.id) { - return await this.core.publication.fetch({ - forId: broadcastResult.id, - }); - } - - const completion = await this.core.transaction.waitUntilComplete({ - forTxHash: broadcastResult.txHash, - }); - - if (completion?.status === LensTransactionStatusType.Complete) { - return await this.core.publication.fetch({ - forTxHash: completion?.txHash, - }); - } - } catch (error) { - elizaLogger.error("client-lens::client error: ", error); - throw error; - } - } - - async getPublication( - pubId: string - ): Promise { - if (this.cache.has(`lens/publication/${pubId}`)) { - return this.cache.get(`lens/publication/${pubId}`); - } - - const publication = await this.core.publication.fetch({ forId: pubId }); - - if (publication) - this.cache.set(`lens/publication/${pubId}`, publication); - - return publication; - } - - async getPublicationsFor( - profileId: string, - limit = 50 - ): Promise { - const timeline: AnyPublicationFragment[] = []; - let next: any | undefined = undefined; - - do { - const { items, next: newNext } = next - ? await next() - : await this.core.publication.fetchAll({ - limit: LimitType.Fifty, - where: { - from: [profileId], - publicationTypes: [PublicationType.Post], - }, - }); - - items.forEach((publication) => { - this.cache.set( - `lens/publication/${publication.id}`, - publication - ); - timeline.push(publication); - }); - - next = newNext; - } while (next && timeline.length < limit); - - return timeline; - } - - async getMentions(): Promise<{ - mentions: AnyPublicationFragment[]; - next?: () => object; - }> { - if (!this.authenticated) { - await this.authenticate(); - } - // TODO: we should limit to new ones or at least latest n - const result = await this.core.notifications.fetch({ - where: { - highSignalFilter: false, // true, - notificationTypes: [ - NotificationType.Mentioned, - NotificationType.Commented, - ], - }, - }); - const mentions: AnyPublicationFragment[] = []; - - const { items, next } = result.unwrap(); - - items.map((notification) => { - let item; - if ('publication' in notification) { - item = notification.publication; - } else if ('comment' in notification) { - item = notification.comment; - } else { - return; // Skip notifications without the relevant properties - } - if (!item.isEncrypted) { - mentions.push(item); - this.cache.set(`lens/publication/${item.id}`, item); - } - }); - - return { mentions, next }; - } - - async getProfile(profileId: string): Promise { - if (this.cache.has(`lens/profile/${profileId}`)) { - return this.cache.get(`lens/profile/${profileId}`) as Profile; - } - - const result = await this.core.profile.fetch({ - forProfileId: profileId, - }); - if (!result?.id) { - elizaLogger.error("Error fetching user by profileId"); - - throw "getProfile ERROR"; - } - - const profile: Profile = { - id: "", - profileId, - name: "", - handle: "", - }; - - profile.id = result.id; - profile.name = result.metadata?.displayName; - profile.handle = result.handle?.localName; - profile.bio = result.metadata?.bio; - profile.pfp = getProfilePictureUri(result.metadata?.picture); - - this.cache.set(`lens/profile/${profileId}`, profile); - - return profile; - } - - async getTimeline( - profileId: string, - limit = 10 - ): Promise { - try { - if (!this.authenticated) { - await this.authenticate(); - } - const timeline: AnyPublicationFragment[] = []; - let next: any | undefined = undefined; - - do { - const result = next - ? await next() - : await this.core.feed.fetch({ - where: { - for: profileId, - feedEventItemTypes: [FeedEventItemType.Post], - }, - }); - - const data = result.unwrap(); - - data.items.forEach((item) => { - // private posts in orb clubs are encrypted - if (timeline.length < limit && !item.root.isEncrypted) { - this.cache.set( - `lens/publication/${item.id}`, - item.root - ); - timeline.push(item.root as AnyPublicationFragment); - } - }); - - next = data.pageInfo.next; - } while (next && timeline.length < limit); - - return timeline; - } catch (error) { - elizaLogger.error(error); - throw new Error("client-lens:: getTimeline"); - } - } - - private async createPostOnchain( - contentURI: string - ): Promise { - // gasless + signless if they enabled the lens profile manager - if (this.authenticatedProfile?.signless) { - const broadcastResult = await this.core.publication.postOnchain({ - contentURI, - openActionModules: [], // TODO: if collectable - }); - return handleBroadcastResult(broadcastResult); - } - - // gasless with signed type data - const typedDataResult = - await this.core.publication.createOnchainPostTypedData({ - contentURI, - openActionModules: [], // TODO: if collectable - }); - const { id, typedData } = typedDataResult.unwrap(); - - const signedTypedData = await this.account.signTypedData({ - domain: omit(typedData.domain as any, "__typename"), - types: omit(typedData.types, "__typename"), - primaryType: "Post", - message: omit(typedData.value, "__typename"), - }); - - const broadcastResult = await this.core.transaction.broadcastOnchain({ - id, - signature: signedTypedData, - }); - return handleBroadcastResult(broadcastResult); - } - - private async createPostMomoka( - contentURI: string - ): Promise { - elizaLogger.log("createPostMomoka"); - // gasless + signless if they enabled the lens profile manager - if (this.authenticatedProfile?.signless) { - const broadcastResult = await this.core.publication.postOnMomoka({ - contentURI, - }); - return handleBroadcastResult(broadcastResult); - } - - // gasless with signed type data - const typedDataResult = - await this.core.publication.createMomokaPostTypedData({ - contentURI, - }); - elizaLogger.log("typedDataResult", typedDataResult); - const { id, typedData } = typedDataResult.unwrap(); - - const signedTypedData = await this.account.signTypedData({ - domain: omit(typedData.domain as any, "__typename"), - types: omit(typedData.types, "__typename"), - primaryType: "Post", - message: omit(typedData.value, "__typename"), - }); - - const broadcastResult = await this.core.transaction.broadcastOnMomoka({ - id, - signature: signedTypedData, - }); - return handleBroadcastResult(broadcastResult); - } - - private async createCommentOnchain( - contentURI: string, - commentOn: string - ): Promise { - // gasless + signless if they enabled the lens profile manager - if (this.authenticatedProfile?.signless) { - const broadcastResult = await this.core.publication.commentOnchain({ - commentOn, - contentURI, - }); - return handleBroadcastResult(broadcastResult); - } - - // gasless with signed type data - const typedDataResult = - await this.core.publication.createOnchainCommentTypedData({ - commentOn, - contentURI, - }); - - const { id, typedData } = typedDataResult.unwrap(); - - const signedTypedData = await this.account.signTypedData({ - domain: omit(typedData.domain as any, "__typename"), - types: omit(typedData.types, "__typename"), - primaryType: "Comment", - message: omit(typedData.value, "__typename"), - }); - - const broadcastResult = await this.core.transaction.broadcastOnchain({ - id, - signature: signedTypedData, - }); - return handleBroadcastResult(broadcastResult); - } - - private async createCommentMomoka( - contentURI: string, - commentOn: string - ): Promise { - // gasless + signless if they enabled the lens profile manager - if (this.authenticatedProfile?.signless) { - const broadcastResult = await this.core.publication.commentOnMomoka( - { - commentOn, - contentURI, - } - ); - return handleBroadcastResult(broadcastResult); - } - - // gasless with signed type data - const typedDataResult = - await this.core.publication.createMomokaCommentTypedData({ - commentOn, - contentURI, - }); - - const { id, typedData } = typedDataResult.unwrap(); - - const signedTypedData = await this.account.signTypedData({ - domain: omit(typedData.domain as any, "__typename"), - types: omit(typedData.types, "__typename"), - primaryType: "Comment", - message: omit(typedData.value, "__typename"), - }); - - const broadcastResult = await this.core.transaction.broadcastOnMomoka({ - id, - signature: signedTypedData, - }); - return handleBroadcastResult(broadcastResult); - } -} diff --git a/packages/client-lens/src/index.ts b/packages/client-lens/src/index.ts deleted file mode 100644 index 05049d2e4fc0e..0000000000000 --- a/packages/client-lens/src/index.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { type Client, type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { privateKeyToAccount } from "viem/accounts"; -import { LensClient } from "./client"; -import { LensPostManager } from "./post"; -import { LensInteractionManager } from "./interactions"; -import StorjProvider from "./providers/StorjProvider"; - -export class LensAgentClient implements Client { - client: LensClient; - posts: LensPostManager; - interactions: LensInteractionManager; - - private profileId: `0x${string}`; - private ipfs: StorjProvider; - - constructor(public runtime: IAgentRuntime) { - const cache = new Map(); - - const privateKey = runtime.getSetting( - "EVM_PRIVATE_KEY" - ) as `0x${string}`; - if (!privateKey) { - throw new Error("EVM_PRIVATE_KEY is missing"); - } - const account = privateKeyToAccount(privateKey); - - this.profileId = runtime.getSetting( - "LENS_PROFILE_ID" - )! as `0x${string}`; - - this.client = new LensClient({ - runtime: this.runtime, - account, - cache, - profileId: this.profileId, - }); - - elizaLogger.info("Lens client initialized."); - - this.ipfs = new StorjProvider(runtime); - - this.posts = new LensPostManager( - this.client, - this.runtime, - this.profileId, - cache, - this.ipfs - ); - - this.interactions = new LensInteractionManager( - this.client, - this.runtime, - this.profileId, - cache, - this.ipfs - ); - } - - async start() { - await Promise.all([this.posts.start(), this.interactions.start()]); - } - - async stop() { - await Promise.all([this.posts.stop(), this.interactions.stop()]); - } -} diff --git a/packages/client-lens/src/interactions.ts b/packages/client-lens/src/interactions.ts deleted file mode 100644 index af1fbc114fcba..0000000000000 --- a/packages/client-lens/src/interactions.ts +++ /dev/null @@ -1,310 +0,0 @@ -import { - composeContext, - generateMessageResponse, - generateShouldRespond, - type Memory, - ModelClass, - stringToUuid, - elizaLogger, - type HandlerCallback, - type Content, - type IAgentRuntime, -} from "@elizaos/core"; -import type { LensClient } from "./client"; -import { toHex } from "viem"; -import { buildConversationThread, createPublicationMemory } from "./memory"; -import { - formatPublication, - formatTimeline, - messageHandlerTemplate, - shouldRespondTemplate, -} from "./prompts"; -import { publicationUuid } from "./utils"; -import { sendPublication } from "./actions"; -import type { AnyPublicationFragment } from "@lens-protocol/client"; -import type { Profile } from "./types"; -import type StorjProvider from "./providers/StorjProvider"; - -export class LensInteractionManager { - private timeout: NodeJS.Timeout | undefined; - constructor( - public client: LensClient, - public runtime: IAgentRuntime, - private profileId: string, - public cache: Map, - private ipfs: StorjProvider - ) {} - - public async start() { - const handleInteractionsLoop = async () => { - try { - await this.handleInteractions(); - } catch (error) { - elizaLogger.error(error); - return; - } - - this.timeout = setTimeout( - handleInteractionsLoop, - Number(this.runtime.getSetting("LENS_POLL_INTERVAL") || 120) * - 1000 // Default to 2 minutes - ); - }; - - handleInteractionsLoop(); - } - - public async stop() { - if (this.timeout) clearTimeout(this.timeout); - } - - private async handleInteractions() { - elizaLogger.info("Handle Lens interactions"); - // TODO: handle next() for pagination - const { mentions } = await this.client.getMentions(); - - const agent = await this.client.getProfile(this.profileId); - for (const mention of mentions) { - const messageHash = toHex(mention.id); - const conversationId = `${messageHash}-${this.runtime.agentId}`; - const roomId = stringToUuid(conversationId); - const userId = stringToUuid(mention.by.id); - - const pastMemoryId = publicationUuid({ - agentId: this.runtime.agentId, - pubId: mention.id, - }); - - const pastMemory = - await this.runtime.messageManager.getMemoryById(pastMemoryId); - - if (pastMemory) { - continue; - } - - await this.runtime.ensureConnection( - userId, - roomId, - mention.by.id, - mention.by.metadata?.displayName || - mention.by.handle?.localName, - "lens" - ); - - const thread = await buildConversationThread({ - client: this.client, - runtime: this.runtime, - publication: mention, - }); - - function hasContent(metadata: any): metadata is { content: string } { - return metadata && typeof metadata.content === 'string'; - } - - let memory: Memory; - if ( - (mention.__typename === 'Post' || mention.__typename === 'Comment' || mention.__typename === 'Quote') && - hasContent(mention.metadata) - ) { - memory = { - content: { text: mention.metadata.content, hash: mention.id }, - agentId: this.runtime.agentId, - userId, - roomId, - }; - } else { - memory = { - content: { text: '[No Content]', hash: mention.id }, - agentId: this.runtime.agentId, - userId, - roomId, - }; - } - - await this.handlePublication({ - agent, - publication: mention, - memory, - thread, - }); - } - - this.client.lastInteractionTimestamp = new Date(); - } - - private async handlePublication({ - agent, - publication, - memory, - thread, - }: { - agent: Profile; - publication: AnyPublicationFragment; - memory: Memory; - thread: AnyPublicationFragment[]; - }) { - if (publication.by.id === agent.id) { - elizaLogger.info("skipping cast from bot itself", publication.id); - return; - } - - if (!memory.content.text) { - elizaLogger.info("skipping cast with no text", publication.id); - return { text: "", action: "IGNORE" }; - } - - const currentPost = formatPublication(publication); - - const timeline = await this.client.getTimeline(this.profileId); - - const formattedTimeline = formatTimeline( - this.runtime.character, - timeline - ); - - function hasContent(metadata: any): metadata is { content: string } { - return metadata && typeof metadata.content === 'string'; - } - - const formattedConversation = thread - .map((pub) => { - if ('metadata' in pub && hasContent(pub.metadata)) { - const content = pub.metadata.content; - return `@${pub.by.handle?.localName} (${new Date( - pub.createdAt - ).toLocaleString("en-US", { - hour: "2-digit", - minute: "2-digit", - month: "short", - day: "numeric", - })}): - ${content}`; - } - return `@${pub.by.handle?.localName} (${new Date( - pub.createdAt - ).toLocaleString("en-US", { - hour: "2-digit", - minute: "2-digit", - month: "short", - day: "numeric", - })}): - [No Content Available]`; - }) - .join("\n\n"); - - const state = await this.runtime.composeState(memory, { - lensHandle: agent.handle, - timeline: formattedTimeline, - currentPost, - formattedConversation, - }); - - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates?.lensShouldRespondTemplate || - this.runtime.character?.templates?.shouldRespondTemplate || - shouldRespondTemplate, - }); - - const memoryId = publicationUuid({ - agentId: this.runtime.agentId, - pubId: publication.id, - }); - - const castMemory = - await this.runtime.messageManager.getMemoryById(memoryId); - - if (!castMemory) { - await this.runtime.messageManager.createMemory( - createPublicationMemory({ - roomId: memory.roomId, - runtime: this.runtime, - publication, - }) - ); - } - - const shouldRespondResponse = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - if ( - shouldRespondResponse === "IGNORE" || - shouldRespondResponse === "STOP" - ) { - elizaLogger.info( - `Not responding to publication because generated ShouldRespond was ${shouldRespondResponse}` - ); - return; - } - - const context = composeContext({ - state, - template: - this.runtime.character.templates?.lensMessageHandlerTemplate ?? - this.runtime.character?.templates?.messageHandlerTemplate ?? - messageHandlerTemplate, - }); - - const responseContent = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - - responseContent.inReplyTo = memoryId; - - if (!responseContent.text) return; - - if (this.runtime.getSetting("LENS_DRY_RUN") === "true") { - elizaLogger.info( - `Dry run: would have responded to publication ${publication.id} with ${responseContent.text}` - ); - return; - } - - const callback: HandlerCallback = async ( - content: Content, - _files: any[] - ) => { - try { - if (memoryId && !content.inReplyTo) { - content.inReplyTo = memoryId; - } - const result = await sendPublication({ - runtime: this.runtime, - client: this.client, - content: content, - roomId: memory.roomId, - commentOn: publication.id, - ipfs: this.ipfs, - }); - if (!result.publication?.id) - throw new Error("publication not sent"); - - // sendPublication lost response action, so we need to add it back here? - result.memory!.content.action = content.action; - - await this.runtime.messageManager.createMemory(result.memory!); - return [result.memory!]; - } catch (error) { - console.error("Error sending response cast:", error); - return []; - } - }; - - const responseMessages = await callback(responseContent); - - const newState = await this.runtime.updateRecentMessageState(state); - - await this.runtime.processActions( - memory, - responseMessages, - newState, - callback - ); - } -} diff --git a/packages/client-lens/src/memory.ts b/packages/client-lens/src/memory.ts deleted file mode 100644 index 26b8c57ebe776..0000000000000 --- a/packages/client-lens/src/memory.ts +++ /dev/null @@ -1,112 +0,0 @@ -import { - elizaLogger, - getEmbeddingZeroVector, - type IAgentRuntime, - stringToUuid, - type Memory, - type UUID, -} from "@elizaos/core"; -import { publicationUuid } from "./utils"; -import type { LensClient } from "./client"; -import type { AnyPublicationFragment } from "@lens-protocol/client"; - -export function createPublicationMemory({ - roomId, - runtime, - publication, -}: { - roomId: UUID; - runtime: IAgentRuntime; - publication: AnyPublicationFragment; -}): Memory { - const commentOn = publication.commentOn - ? publicationUuid({ - pubId: publication.commentOn.id, - agentId: runtime.agentId, - }) - : undefined; - - return { - id: publicationUuid({ - pubId: publication.id, - agentId: runtime.agentId, - }), - agentId: runtime.agentId, - userId: runtime.agentId, - content: { - text: publication.metadata.content, - source: "lens", - url: "", - commentOn, - id: publication.id, - }, - roomId, - embedding: getEmbeddingZeroVector(), - }; -} - -export async function buildConversationThread({ - publication, - runtime, - client, -}: { - publication: AnyPublicationFragment; - runtime: IAgentRuntime; - client: LensClient; -}): Promise { - const thread: AnyPublicationFragment[] = []; - const visited: Set = new Set(); - async function processThread(currentPublication: AnyPublicationFragment) { - if (visited.has(currentPublication.id)) { - return; - } - - visited.add(currentPublication.id); - - const roomId = publicationUuid({ - pubId: currentPublication.id, - agentId: runtime.agentId, - }); - - // Check if the current cast has already been saved - const memory = await runtime.messageManager.getMemoryById(roomId); - - if (!memory) { - elizaLogger.log( - "Creating memory for publication", - currentPublication.id - ); - - const userId = stringToUuid(currentPublication.by.id); - - await runtime.ensureConnection( - userId, - roomId, - currentPublication.by.id, - currentPublication.by.metadata?.displayName || - currentPublication.by.handle?.localName, - "lens" - ); - - await runtime.messageManager.createMemory( - createPublicationMemory({ - roomId, - runtime, - publication: currentPublication, - }) - ); - } - - thread.unshift(currentPublication); - - if (currentPublication.commentOn) { - const parentPublication = await client.getPublication( - currentPublication.commentOn.id - ); - if (parentPublication) await processThread(parentPublication); - } - } - - await processThread(publication); - return thread; -} diff --git a/packages/client-lens/src/post.ts b/packages/client-lens/src/post.ts deleted file mode 100644 index a7ffdb7f59f9b..0000000000000 --- a/packages/client-lens/src/post.ts +++ /dev/null @@ -1,141 +0,0 @@ -import { - composeContext, - generateText, - type IAgentRuntime, - ModelClass, - stringToUuid, - elizaLogger, -} from "@elizaos/core"; -import type { LensClient } from "./client"; -import { formatTimeline, postTemplate } from "./prompts"; -import { publicationUuid } from "./utils"; -import { createPublicationMemory } from "./memory"; -import { sendPublication } from "./actions"; -import type StorjProvider from "./providers/StorjProvider"; - -export class LensPostManager { - private timeout: NodeJS.Timeout | undefined; - - constructor( - public client: LensClient, - public runtime: IAgentRuntime, - private profileId: string, - public cache: Map, - private ipfs: StorjProvider - ) {} - - public async start() { - const generateNewPubLoop = async () => { - try { - await this.generateNewPublication(); - } catch (error) { - elizaLogger.error(error); - return; - } - - this.timeout = setTimeout( - generateNewPubLoop, - (Math.floor(Math.random() * (4 - 1 + 1)) + 1) * 60 * 60 * 1000 - ); // Random interval between 1 and 4 hours - }; - - generateNewPubLoop(); - } - - public async stop() { - if (this.timeout) clearTimeout(this.timeout); - } - - private async generateNewPublication() { - elizaLogger.info("Generating new publication"); - try { - const profile = await this.client.getProfile(this.profileId); - await this.runtime.ensureUserExists( - this.runtime.agentId, - profile.handle!, - this.runtime.character.name, - "lens" - ); - - const timeline = await this.client.getTimeline(this.profileId); - - // this.cache.set("lens/timeline", timeline); - - const formattedHomeTimeline = formatTimeline( - this.runtime.character, - timeline - ); - - const generateRoomId = stringToUuid("lens_generate_room"); - - const state = await this.runtime.composeState( - { - roomId: generateRoomId, - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { text: "", action: "" }, - }, - { - lensHandle: profile.handle, - timeline: formattedHomeTimeline, - } - ); - - const context = composeContext({ - state, - template: - this.runtime.character.templates?.lensPostTemplate || - postTemplate, - }); - - const content = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - if (this.runtime.getSetting("LENS_DRY_RUN") === "true") { - elizaLogger.info(`Dry run: would have posted: ${content}`); - return; - } - - try { - const { publication } = await sendPublication({ - client: this.client, - runtime: this.runtime, - roomId: generateRoomId, - content: { text: content }, - ipfs: this.ipfs, - }); - - if (!publication) throw new Error("failed to send publication"); - - const roomId = publicationUuid({ - agentId: this.runtime.agentId, - pubId: publication.id, - }); - - await this.runtime.ensureRoomExists(roomId); - - await this.runtime.ensureParticipantInRoom( - this.runtime.agentId, - roomId - ); - - elizaLogger.info(`[Lens Client] Published ${publication.id}`); - - await this.runtime.messageManager.createMemory( - createPublicationMemory({ - roomId, - runtime: this.runtime, - publication, - }) - ); - } catch (error) { - elizaLogger.error("Error sending publication:", error); - } - } catch (error) { - elizaLogger.error("Error generating new publication:", error); - } - } -} diff --git a/packages/client-lens/src/prompts.ts b/packages/client-lens/src/prompts.ts deleted file mode 100644 index f9a0ccc06616c..0000000000000 --- a/packages/client-lens/src/prompts.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { - type Character, - messageCompletionFooter, - shouldRespondFooter, -} from "@elizaos/core"; -import type { AnyPublicationFragment } from "@lens-protocol/client"; - -export const formatPublication = (publication: AnyPublicationFragment) => { - return `ID: ${publication.id} - From: ${publication.by.metadata?.displayName} (@${publication.by.handle?.localName})${publication.by.handle?.localName})${publication.commentOn ? `\nIn reply to: @${publication.commentOn.by.handle?.localName}` : ""} -Text: ${publication.metadata.content}`; -}; - -export const formatTimeline = ( - character: Character, - timeline: AnyPublicationFragment[] -) => `# ${character.name}'s Home Timeline -${timeline.map(formatPublication).join("\n")} -`; - -export const headerTemplate = ` -{{timeline}} - -# Knowledge -{{knowledge}} - -About {{agentName}} (@{{lensHandle}}): -{{bio}} -{{lore}} -{{postDirections}} - -{{providers}} - -{{recentPosts}} - -{{characterPostExamples}}`; - -export const postTemplate = - headerTemplate + - ` -# Task: Generate a post in the voice and style of {{agentName}}, aka @{{lensHandle}} -Write a single sentence post that is {{adjective}} about {{topic}} (without mentioning {{topic}} directly), from the perspective of {{agentName}}. -Try to write something totally different than previous posts. Do not add commentary or ackwowledge this request, just write the post. - -Your response should not contain any questions. Brief, concise statements only. No emojis. Use \\n\\n (double spaces) between statements.`; - -export const messageHandlerTemplate = - headerTemplate + - ` -Recent interactions between {{agentName}} and other users: -{{recentPostInteractions}} - -Thread of publications You Are Replying To: -{{formattedConversation}} - -# Task: Generate a post in the voice, style and perspective of {{agentName}} (@{{lensHandle}}): -{{currentPost}}` + - messageCompletionFooter; - -export const shouldRespondTemplate = - // - `# Task: Decide if {{agentName}} should respond. - About {{agentName}}: - {{bio}} - - # INSTRUCTIONS: Determine if {{agentName}} (@{{lensHandle}}) should respond to the message and participate in the conversation. Do not comment. Just respond with "RESPOND" or "IGNORE" or "STOP". - -Response options are RESPOND, IGNORE and STOP. - -{{agentName}} should respond to messages that are directed at them, or participate in conversations that are interesting or relevant to their background, IGNORE messages that are irrelevant to them, and should STOP if the conversation is concluded. - -{{agentName}} is in a room with other users and wants to be conversational, but not annoying. -{{agentName}} should RESPOND to messages that are directed at them, or participate in conversations that are interesting or relevant to their background. -If a message is not interesting or relevant, {{agentName}} should IGNORE. -If a message thread has become repetitive, {{agentName}} should IGNORE. -Unless directly RESPONDing to a user, {{agentName}} should IGNORE messages that are very short or do not contain much information. -If a user asks {{agentName}} to stop talking, {{agentName}} should STOP. -If {{agentName}} concludes a conversation and isn't part of the conversation anymore, {{agentName}} should STOP. - -IMPORTANT: {{agentName}} (aka @{{lensHandle}}) is particularly sensitive about being annoying, so if there is any doubt, it is better to IGNORE than to RESPOND. - -Thread of messages You Are Replying To: -{{formattedConversation}} - -Current message: -{{currentPost}} - -` + shouldRespondFooter; diff --git a/packages/client-lens/src/providers/StorjProvider.ts b/packages/client-lens/src/providers/StorjProvider.ts deleted file mode 100644 index bdd2aa5ede13d..0000000000000 --- a/packages/client-lens/src/providers/StorjProvider.ts +++ /dev/null @@ -1,84 +0,0 @@ -import axios, { type AxiosInstance } from "axios"; -import FormData from "form-data"; -import type { IAgentRuntime } from "@elizaos/core"; - -// ipfs pinning service: https://storj.dev/dcs/api/storj-ipfs-pinning -class StorjProvider { - private STORJ_API_URL = "https://www.storj-ipfs.com"; - private STORJ_API_USERNAME: string; - private STORJ_API_PASSWORD: string; - private baseURL: string; - private client: AxiosInstance; - - constructor(runtime: IAgentRuntime) { - this.STORJ_API_USERNAME = runtime.getSetting("STORJ_API_USERNAME")!; - this.STORJ_API_PASSWORD = runtime.getSetting("STORJ_API_PASSWORD")!; - this.baseURL = `${this.STORJ_API_URL}/api/v0`; - this.client = this.createClient(); - } - - private createClient(): AxiosInstance { - return axios.create({ - baseURL: this.baseURL, - auth: { - username: this.STORJ_API_USERNAME, - password: this.STORJ_API_PASSWORD, - }, - }); - } - - private hash(uriOrHash: string): string { - return typeof uriOrHash === "string" && uriOrHash.startsWith("ipfs://") - ? uriOrHash.split("ipfs://")[1] - : uriOrHash; - } - - public gatewayURL(uriOrHash: string): string { - return `${this.STORJ_API_URL}/ipfs/${this.hash(uriOrHash)}`; - } - - public async pinJson(json: any): Promise { - if (typeof json !== "string") { - json = JSON.stringify(json); - } - const formData = new FormData(); - formData.append("path", Buffer.from(json, "utf-8").toString()); - - const headers = { - "Content-Type": "multipart/form-data", - ...formData.getHeaders(), - }; - - const { data } = await this.client.post( - "add?cid-version=1", - formData.getBuffer(), - { headers } - ); - - return this.gatewayURL(data.Hash); - } - - public async pinFile(file: { - buffer: Buffer; - originalname: string; - mimetype: string; - }): Promise { - const formData = new FormData(); - formData.append("file", file.buffer, { - filename: file.originalname, - contentType: file.mimetype, - }); - - const response = await this.client.post("add?cid-version=1", formData, { - headers: { - "Content-Type": `multipart/form-data; boundary=${formData.getBoundary()}`, - }, - maxContentLength: Number.POSITIVE_INFINITY, - maxBodyLength: Number.POSITIVE_INFINITY, - }); - - return this.gatewayURL(response.data.Hash); - } -} - -export default StorjProvider; diff --git a/packages/client-lens/src/types.ts b/packages/client-lens/src/types.ts deleted file mode 100644 index ef22c5dff4b4a..0000000000000 --- a/packages/client-lens/src/types.ts +++ /dev/null @@ -1,14 +0,0 @@ -export type Profile = { - id: string; - profileId: string; - name?: string | null; - handle?: string; - pfp?: string; - bio?: string | null; - url?: string; -}; - -export type BroadcastResult = { - id?: string; - txId?: string; -}; diff --git a/packages/client-lens/src/utils.ts b/packages/client-lens/src/utils.ts deleted file mode 100644 index d9818a00a8570..0000000000000 --- a/packages/client-lens/src/utils.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { stringToUuid } from "@elizaos/core"; -import type { BroadcastResult } from "./types"; - -export function publicationId({ - pubId, - agentId, -}: { - pubId: string; - agentId: string; -}) { - return `${pubId}-${agentId}`; -} - -export function publicationUuid(props: { pubId: string; agentId: string }) { - return stringToUuid(publicationId(props)); -} - -export function populateMentions( - text: string, - userIds: number[], - positions: number[], - userMap: Record -) { - // Validate input arrays have same length - if (userIds.length !== positions.length) { - throw new Error( - "User IDs and positions arrays must have the same length" - ); - } - - // Create array of mention objects with position and user info - const mentions = userIds - .map((userId, index) => ({ - position: positions[index], - userId, - displayName: userMap[userId]!, - })) - .sort((a, b) => b.position - a.position); // Sort in reverse order to prevent position shifting - - // Create the resulting string by inserting mentions - let result = text; - mentions.forEach((mention) => { - const mentionText = `@${mention.displayName}`; - result = - result.slice(0, mention.position) + - mentionText + - result.slice(mention.position); - }); - - return result; -} - -export const handleBroadcastResult = ( - broadcastResult: any -): BroadcastResult | undefined => { - const broadcastValue = broadcastResult.unwrap(); - - if ("id" in broadcastValue || "txId" in broadcastValue) { - return broadcastValue; - } else { - throw new Error(); - } -}; - -export const getProfilePictureUri = (picture: any): string | undefined => { - if ("optimized" in picture) { - return picture.optimized?.uri || picture.raw?.uri || picture.uri; - } else { - return picture.uri; - } -}; - -export function omit( - obj: T, - key: K -): Omit { - const result: any = {}; - Object.keys(obj).forEach((currentKey) => { - if (currentKey !== key) { - result[currentKey] = obj[currentKey]; - } - }); - return result; -} diff --git a/packages/client-lens/tsconfig.json b/packages/client-lens/tsconfig.json deleted file mode 100644 index fbd90f995d367..0000000000000 --- a/packages/client-lens/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "jsx": "react", - "outDir": "dist", - "rootDir": "./src", - "strict": true - }, - "include": [ - "src/**/*.ts", - "__tests__/test-utils.ts" - ] -} \ No newline at end of file diff --git a/packages/client-lens/tsup.config.ts b/packages/client-lens/tsup.config.ts deleted file mode 100644 index a2fbfc4a0f6d2..0000000000000 --- a/packages/client-lens/tsup.config.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "util", - "form-data", - "axios", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-lens/vitest.config.ts b/packages/client-lens/vitest.config.ts deleted file mode 100644 index c69c01120e3ba..0000000000000 --- a/packages/client-lens/vitest.config.ts +++ /dev/null @@ -1,17 +0,0 @@ -/// -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - coverage: { - provider: 'v8', - reporter: ['text', 'json', 'html'], - include: ['src/**/*.ts'], - exclude: ['**/*.d.ts', '**/*.test.ts', '**/types.ts'] - }, - setupFiles: [], - testTimeout: 10000 - } -}); diff --git a/packages/client-simsai/.npmignore b/packages/client-simsai/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-simsai/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-simsai/eslint.config.mjs b/packages/client-simsai/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/client-simsai/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/client-simsai/package.json b/packages/client-simsai/package.json deleted file mode 100644 index d69f6e9b96e40..0000000000000 --- a/packages/client-simsai/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "@elizaos/client-simsai", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "glob": "11.0.0" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "peerDependencies": { - "@elizaos/core": "workspace:*" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint --fix" - } -} diff --git a/packages/client-simsai/src/index.ts b/packages/client-simsai/src/index.ts deleted file mode 100644 index 1c1f9a7825e80..0000000000000 --- a/packages/client-simsai/src/index.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { JeeterPostClient } from "./jeeter/post.ts"; -import { JeeterSearchClient } from "./jeeter/search.ts"; -import { JeeterInteractionClient } from "./jeeter/interactions.ts"; -import { IAgentRuntime, Client, elizaLogger } from "@elizaos/core"; -import { validateJeeterConfig } from "./jeeter/environment.ts"; -import { ClientBase } from "./jeeter/base.ts"; - -class SimsAIManager { - client: ClientBase; - post: JeeterPostClient; - search: JeeterSearchClient; - interaction: JeeterInteractionClient; - - constructor(runtime: IAgentRuntime) { - this.client = new ClientBase(runtime); - this.post = new JeeterPostClient(this.client, runtime); - this.search = new JeeterSearchClient(this.client, runtime); - this.interaction = new JeeterInteractionClient(this.client, runtime); - } -} - -let activeManager: SimsAIManager | null = null; - -export const JeeterClientInterface: Client = { - async start(runtime: IAgentRuntime) { - if (activeManager) { - elizaLogger.warn("SimsAI client already started"); - return activeManager; - } - - await validateJeeterConfig(runtime); - - elizaLogger.log("SimsAI client started"); - - activeManager = new SimsAIManager(runtime); - - await activeManager.client.init(); - - await activeManager.post.start(); - - await activeManager.search.start(); - - await activeManager.interaction.start(); - - return activeManager; - }, - async stop(_runtime: IAgentRuntime) { - elizaLogger.log("Stopping SimsAI client"); - if (activeManager) { - try { - await activeManager.interaction.stop(); - await activeManager.search.stop(); - await activeManager.post.stop(); - activeManager = null; - elizaLogger.log("SimsAI client stopped successfully"); - } catch (error) { - elizaLogger.error("Error stopping SimsAI client:", error); - throw error; - } - } - elizaLogger.log("SimsAI client stopped"); - }, -}; - -export default JeeterClientInterface; diff --git a/packages/client-simsai/src/jeeter/base.ts b/packages/client-simsai/src/jeeter/base.ts deleted file mode 100644 index df7a534b82fd3..0000000000000 --- a/packages/client-simsai/src/jeeter/base.ts +++ /dev/null @@ -1,515 +0,0 @@ -import { - Content, - IAgentRuntime, - IImageDescriptionService, - Memory, - State, - UUID, - getEmbeddingZeroVector, - elizaLogger, - stringToUuid, -} from "@elizaos/core"; -import { Agent, Jeet, JeetResponse, Pagination, SimsAIProfile } from "./types"; -import { EventEmitter } from "events"; -import { SimsAIClient } from "./client"; - -export function extractAnswer(text: string): string { - const startIndex = text.indexOf("Answer: ") + 8; - const endIndex = text.indexOf("<|endoftext|>", 11); - return text.slice(startIndex, endIndex); -} - -class RequestQueue { - private queue: (() => Promise)[] = []; - private processing: boolean = false; - - async add(request: () => Promise): Promise { - return new Promise((resolve, reject) => { - this.queue.push(async () => { - try { - const result = await request(); - resolve(result); - } catch (error) { - reject(error); - } - }); - this.processQueue(); - }); - } - - private async processQueue(): Promise { - if (this.processing || this.queue.length === 0) { - return; - } - this.processing = true; - - while (this.queue.length > 0) { - const request = this.queue.shift()!; - try { - await request(); - } catch (error) { - console.error("Error processing request:", error); - this.queue.unshift(request); - await this.exponentialBackoff(this.queue.length); - } - await this.randomDelay(); - } - - this.processing = false; - } - - private async exponentialBackoff(retryCount: number): Promise { - const delay = Math.pow(2, retryCount) * 1000; - await new Promise((resolve) => setTimeout(resolve, delay)); - } - - private async randomDelay(): Promise { - const delay = Math.floor(Math.random() * 2000) + 1500; - await new Promise((resolve) => setTimeout(resolve, delay)); - } -} - -export class ClientBase extends EventEmitter { - static _simsAIClients: { [accountIdentifier: string]: SimsAIClient } = {}; - simsAIClient: SimsAIClient; - runtime: IAgentRuntime; - directions: string; - lastCheckedJeetId: string | null = null; - imageDescriptionService: IImageDescriptionService; - temperature: number = 0.5; - - requestQueue: RequestQueue = new RequestQueue(); - profile: Agent | null; - - callback: (self: ClientBase) => any = () => {}; - - constructor(runtime: IAgentRuntime) { - super(); - this.runtime = runtime; - const userId = this.runtime.getSetting("SIMSAI_AGENT_ID"); - - if (ClientBase._simsAIClients[userId]) { - this.simsAIClient = ClientBase._simsAIClients[userId]; - } else { - const apiKey = this.runtime.getSetting("SIMSAI_API_KEY"); - if (!apiKey) { - throw new Error("SimsAI API key not configured"); - } - this.simsAIClient = new SimsAIClient(apiKey, userId); - ClientBase._simsAIClients[userId] = this.simsAIClient; - } - - this.directions = - "- " + - this.runtime.character.style.all.join("\n- ") + - "- " + - this.runtime.character.style.post.join(); - } - - async init() { - const userId = this.runtime.getSetting("SIMSAI_AGENT_ID"); - if (!userId) { - throw new Error("SimsAI userId not configured"); - } - - elizaLogger.log("Initializing SimsAI client"); - this.profile = await this.fetchProfile(userId); - - if (this.profile) { - elizaLogger.log("SimsAI user ID:", this.profile.id); - const simsaiProfile: SimsAIProfile = { - id: this.profile.id, - username: this.profile.username, - screenName: this.profile.name, - bio: this.profile.bio, - }; - - this.runtime.character.simsaiProfile = simsaiProfile; - this.simsAIClient.updateProfile(simsaiProfile); - } else { - throw new Error("Failed to load profile"); - } - - await this.loadLatestCheckedJeetId(); - await this.populateTimeline(); - } - - async cacheJeet(jeet: Jeet): Promise { - if (!jeet) { - console.warn("Jeet is undefined, skipping cache"); - return; - } - await this.runtime.cacheManager.set(`jeeter/jeets/${jeet.id}`, jeet); - } - - async getCachedJeet(jeetId: string): Promise { - return await this.runtime.cacheManager.get( - `jeeter/jeets/${jeetId}` - ); - } - - async getJeet(jeetId: string): Promise { - const cachedJeet = await this.getCachedJeet(jeetId); - if (cachedJeet) return cachedJeet; - - const jeet = await this.requestQueue.add(() => - this.simsAIClient.getJeet(jeetId) - ); - - await this.cacheJeet(jeet); - return jeet; - } - - async fetchHomeTimeline(count: number): Promise { - elizaLogger.debug("fetching home timeline"); - const response = await this.simsAIClient.getHomeTimeline(count); - return response.jeets || []; - } - - async fetchDiscoveryTimeline(count: number): Promise { - elizaLogger.debug("fetching discovery timeline"); - const response = await this.simsAIClient.getDiscoveryTimeline(count); - return response.jeets || []; - } - - async fetchSearchJeets( - query: string, - maxResults: number = 20, - startTime?: string, - endTime?: string - ): Promise<{ jeets: Jeet[]; pagination: Pagination }> { - try { - const timeoutPromise = new Promise((resolve) => - setTimeout( - () => - resolve({ - jeets: [], - nextCursor: "", - }), - 10000 - ) - ); - - const result = await this.requestQueue.add( - async () => - await Promise.race([ - this.simsAIClient.searchJeets(query, maxResults), - timeoutPromise, - ]) - ); - - return { - jeets: result.jeets || [], - pagination: { - next_cursor: result.nextCursor || "", - has_more: Boolean(result.nextCursor), - }, - }; - } catch (error) { - elizaLogger.error("Error fetching search jeets:", error); - return { - jeets: [], - pagination: { next_cursor: "", has_more: false }, - }; - } - } - - private async populateTimeline() { - elizaLogger.debug("populating timeline..."); - - const cachedTimeline = await this.getCachedTimeline(); - - if (cachedTimeline) { - const existingMemories = - await this.getExistingMemories(cachedTimeline); - const existingMemoryIds = new Set( - existingMemories.map((memory) => memory.id.toString()) - ); - - if ( - await this.processCachedTimeline( - cachedTimeline, - existingMemoryIds - ) - ) { - return; - } - } - - const timeline = await this.fetchHomeTimeline(cachedTimeline ? 10 : 50); - - // Get mentions - const mentionsResponse = await this.requestQueue.add(async () => { - const mentions = await this.simsAIClient.getMentions(20); - - // Get full Jeet objects - const mentionJeets = await Promise.all( - (mentions.jeets || []).map(async (jeet) => { - try { - return await this.getJeet(jeet.id); - } catch (error) { - elizaLogger.error( - `Error fetching jeet ${jeet.id}:`, - error - ); - return null; - } - }) - ); - - const validMentionJeets = mentionJeets.filter( - (jeet): jeet is Jeet => jeet !== null - ); - - return { - jeets: validMentionJeets, - }; - }); - - const allJeets = [...timeline, ...(mentionsResponse.jeets || [])]; - await this.processNewJeets(allJeets); - - // Cache results - await this.cacheTimeline(timeline); - await this.cacheMentions(mentionsResponse.jeets); - } - - private async getExistingMemories(jeets: Jeet[]) { - return await this.runtime.messageManager.getMemoriesByRoomIds({ - roomIds: jeets.map((jeet) => - stringToUuid(jeet.id + "-" + this.runtime.agentId) - ), - }); - } - - private async processCachedTimeline( - timeline: Jeet[], - existingMemoryIds: Set - ): Promise { - const jeetsToSave = timeline.filter( - (jeet) => - !existingMemoryIds.has( - stringToUuid(jeet.id + "-" + this.runtime.agentId) - ) - ); - - if (jeetsToSave.length > 0) { - await this.processNewJeets(jeetsToSave); - elizaLogger.log( - `Populated ${jeetsToSave.length} missing jeets from cache.` - ); - return true; - } - - return false; - } - - private async processNewJeets(jeets: Jeet[]) { - const validJeets = jeets.filter((jeet) => jeet && jeet.id); - - const roomIds = new Set(); - validJeets.forEach((jeet) => { - if (jeet.id) { - roomIds.add(stringToUuid(jeet.id + "-" + this.runtime.agentId)); - } - }); - - const existingMemories = - await this.runtime.messageManager.getMemoriesByRoomIds({ - roomIds: Array.from(roomIds), - }); - - const existingMemoryIds = new Set( - existingMemories.map((memory) => memory.id) - ); - - const jeetsToSave = validJeets.filter( - (jeet) => - jeet.id && - !existingMemoryIds.has( - stringToUuid(jeet.id + "-" + this.runtime.agentId) - ) - ); - - if (this.profile?.id) { - await this.runtime.ensureUserExists( - this.runtime.agentId, - this.profile.id, - this.runtime.character.name, - "simsai" - ); - } - - for (const jeet of jeetsToSave) { - await this.saveJeetAsMemory(jeet); - } - } - - private async saveJeetAsMemory(jeet: Jeet) { - if (!jeet.id) { - elizaLogger.error("No valid ID found for jeet:", jeet); - return; - } - - const roomId = stringToUuid(jeet.id + "-" + this.runtime.agentId); - const userId = stringToUuid(jeet.agentId || jeet.userId); - - if (jeet.agent) { - await this.runtime.ensureConnection( - userId, - roomId, - jeet.agent.username, - jeet.agent.name, - "jeeter" - ); - } - - const content: Content = { - text: jeet.text || "", - url: jeet.permanentUrl, - source: "simsai", - inReplyTo: jeet.inReplyToStatusId - ? stringToUuid( - jeet.inReplyToStatusId + "-" + this.runtime.agentId - ) - : undefined, - }; - - await this.runtime.messageManager.createMemory({ - id: stringToUuid(jeet.id + "-" + this.runtime.agentId), - userId, - content, - agentId: this.runtime.agentId, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: jeet.createdAt - ? new Date(jeet.createdAt).getTime() - : Date.now(), - }); - - await this.cacheJeet(jeet); - } - - async saveRequestMessage(message: Memory, state: State) { - if (message.content.text) { - const recentMessage = await this.runtime.messageManager.getMemories( - { - roomId: message.roomId, - count: 1, - unique: false, - } - ); - - if ( - recentMessage.length > 0 && - recentMessage[0].content === message.content - ) { - elizaLogger.debug("Message already saved", recentMessage[0].id); - } else { - await this.runtime.messageManager.createMemory({ - ...message, - embedding: getEmbeddingZeroVector(), - }); - } - - await this.runtime.evaluate(message, { - ...state, - simsAIClient: this.simsAIClient, - }); - } - } - - async loadLatestCheckedJeetId(): Promise { - this.lastCheckedJeetId = await this.runtime.cacheManager.get( - `jeeter/${this.profile?.id}/latest_checked_jeet_id` - ); - } - - async cacheLatestCheckedJeetId() { - if (this.lastCheckedJeetId && this.profile?.id) { - await this.runtime.cacheManager.set( - `jeeter/${this.profile.id}/latest_checked_jeet_id`, - this.lastCheckedJeetId - ); - } - } - - async getCachedTimeline(): Promise { - return this.profile?.id - ? await this.runtime.cacheManager.get( - `jeeter/${this.profile.id}/timeline` - ) - : undefined; - } - - async cacheTimeline(timeline: Jeet[]) { - if (this.profile?.id) { - await this.runtime.cacheManager.set( - `jeeter/${this.profile.id}/timeline`, - timeline, - { expires: 10 * 1000 } - ); - } - } - - async cacheMentions(mentions: Jeet[]) { - if (this.profile?.id) { - await this.runtime.cacheManager.set( - `jeeter/${this.profile.id}/mentions`, - mentions, - { expires: 10 * 1000 } - ); - } - } - - async getCachedProfile(userId: string) { - return await this.runtime.cacheManager.get( - `jeeter/${userId}/profile` - ); - } - - async cacheProfile(profile: Agent) { - await this.runtime.cacheManager.set( - `jeeter/${profile.id}/profile`, - profile - ); - } - - async fetchProfile(userId: string): Promise { - const cached = await this.getCachedProfile(userId); - if (cached) return cached; - - try { - const profile = await this.requestQueue.add(async () => { - const response = await this.simsAIClient.getAgent(userId); - const agent: Agent = { - id: response.id, - builder_id: response.builder_id, - username: response.username, - name: response.name || this.runtime.character.name, - bio: - response.bio || - (typeof this.runtime.character.bio === "string" - ? this.runtime.character.bio - : this.runtime.character.bio[0] || ""), - avatar_url: response.avatar_url, - created_at: response.created_at, - updated_at: response.updated_at, - }; - return agent; - }); - - await this.cacheProfile(profile); - return profile; - } catch (error) { - elizaLogger.error("Error fetching SimsAI profile:", error); - throw error; - } - } - - onReady() { - throw new Error( - "Not implemented in base class, please call from subclass" - ); - } -} diff --git a/packages/client-simsai/src/jeeter/client.ts b/packages/client-simsai/src/jeeter/client.ts deleted file mode 100644 index 73ceaf6d3fa40..0000000000000 --- a/packages/client-simsai/src/jeeter/client.ts +++ /dev/null @@ -1,280 +0,0 @@ -import { EventEmitter } from "events"; -import { SIMSAI_API_URL } from "./constants"; -import { elizaLogger } from "@elizaos/core"; -import { - Agent, - ApiLikeResponse, - ApiRejeetResponse, - ApiSearchResponse, - ApiConversationResponse, - Jeet, - JeetResponse, - SimsAIProfile, - ApiError, - ApiPostJeetResponse, -} from "./types"; -import { wait } from "./utils"; - -export class SimsAIClient extends EventEmitter { - private apiKey: string; - private baseUrl: string; - private agentId: string; - profile: SimsAIProfile; - - constructor(apiKey: string, agentId: string, profile?: SimsAIProfile) { - super(); - this.apiKey = apiKey; - this.agentId = agentId; - this.baseUrl = SIMSAI_API_URL.replace(/\/$/, ""); - this.profile = profile; - } - - private isRateLimitError(error: any): boolean { - return error?.statusCode === 429; - } - - private async makeRequest( - endpoint: string, - options: RequestInit = {} - ): Promise { - const url = `${this.baseUrl}${endpoint}`; - const maxRetries = 3; - let attempt = 0; - while (attempt < maxRetries) { - try { - const response = await fetch(url, { - ...options, - headers: { - Authorization: `Bearer ${this.apiKey}`, - "Content-Type": "application/json", - ...options.headers, - }, - credentials: "include", - }); - - if (!response.ok) { - const error = new Error( - `SimsAI API error: ${response.statusText} (${response.status})` - ) as ApiError; - error.statusCode = response.status; - error.endpoint = endpoint; - throw error; - } - - return (await response.json()) as T; - } catch (error) { - elizaLogger.error(`Error in makeRequest to ${endpoint}:`, { - message: error.message, - stack: error.stack, - endpoint, - options, - }); - - if (error && this.isRateLimitError(error)) { - const waitTime = Math.pow(2, attempt) * 1000; - elizaLogger.warn( - `Rate limit hit for endpoint ${endpoint}, retrying in ${waitTime}ms` - ); - await wait(waitTime); - attempt++; - continue; - } - throw error; - } - } - } - - updateProfile(profile: SimsAIProfile) { - this.profile = profile; - } - - async getAgent(agentId: string): Promise { - return await this.makeRequest(`/agents/${agentId}`); - } - - async getJeet(jeetId: string): Promise { - return await this.makeRequest(`/public/jeets/${jeetId}`); - } - - async getJeetConversation(jeetId: string): Promise { - const response = await this.makeRequest( - `/jeets/${jeetId}/conversation` - ); - - return response.data.map((jeet) => { - const author = response.includes.users.find( - (user) => user.id === jeet.author_id - ); - - return { - id: jeet.id, - text: jeet.text, - createdAt: jeet.created_at, - agentId: jeet.author_id, - inReplyToStatusId: jeet.in_reply_to_status_id, - agent: author - ? { - id: author.id, - name: author.name, - username: author.username, - type: author.type, - avatar_url: author.avatar_url, - } - : undefined, - public_metrics: jeet.public_metrics, - media: [], - hashtags: [], - mentions: [], - photos: [], - thread: [], - urls: [], - videos: [], - }; - }); - } - - async getHomeTimeline( - count: number, - cursor?: string - ): Promise { - return await this.makeRequest( - `/public/agents/${this.agentId}/jeets?limit=${count}${cursor ? `&cursor=${cursor}` : ""}` - ); - } - - async getDiscoveryTimeline(count: number): Promise { - return await this.makeRequest( - `/public/timeline?limit=${count}` - ); - } - - async searchJeets( - query: string, - maxResults: number = 10 - ): Promise { - const params = new URLSearchParams({ - query, - max_results: Math.min(maxResults, 100).toString(), - }); - - const response = await this.makeRequest( - `/jeets/search/recent?${params.toString()}` - ); - - const jeets: Jeet[] = response.data.map((jeet) => { - const author = response.includes.users.find( - (user) => user.id === jeet.author_id - ); - - return { - id: jeet.id, - text: jeet.text, - type: "jeet", - createdAt: jeet.created_at, - agentId: jeet.author_id, - agent: author - ? { - id: author.id, - name: author.name, - username: author.username, - type: author.type, - avatar_url: author.avatar_url, - } - : undefined, - public_metrics: jeet.public_metrics, - media: [], - hashtags: [], - mentions: [], - photos: [], - thread: [], - urls: [], - videos: [], - }; - }); - - return { - jeets, - nextCursor: - response.meta?.result_count > maxResults - ? response.data[response.data.length - 1]?.created_at - : undefined, - }; - } - - async getMentions(maxResults: number = 20): Promise { - try { - return await this.searchJeets( - `@${this.profile.username}`, - maxResults - ); - } catch (error) { - elizaLogger.error("Error fetching mentions:", error); - return { jeets: [] }; - } - } - - async postJeet( - text: string, - inReplyToJeetId?: string, - mediaUrls?: string[], - quoteJeetId?: string - ): Promise { - const payload = { - text, - ...(inReplyToJeetId && { - reply: { - in_reply_to_jeet_id: inReplyToJeetId, - }, - }), - ...(mediaUrls?.length && { media_urls: mediaUrls }), - ...(quoteJeetId && { quote_jeet_id: quoteJeetId }), - }; - - return await this.makeRequest("/jeets", { - method: "POST", - body: JSON.stringify(payload), - }); - } - - async likeJeet(jeetId: string): Promise { - const response = await this.makeRequest("/likes", { - method: "POST", - body: JSON.stringify({ jeetId }), - }); - - return response.data.liked; - } - - async rejeetJeet(jeetId: string): Promise { - const response = await this.makeRequest( - `/jeets/${jeetId}/rejeets`, - { - method: "POST", - } - ); - - return { - id: response.data.id, - createdAt: response.data.created_at, - agentId: response.data.author_id, - type: "rejeet", - media: [], - hashtags: [], - mentions: [], - photos: [], - thread: [], - urls: [], - videos: [], - }; - } - - async quoteRejeet(jeetId: string, text: string): Promise { - return await this.makeRequest("/jeets", { - method: "POST", - body: JSON.stringify({ - text, - quote_jeet_id: jeetId, - }), - }); - } -} diff --git a/packages/client-simsai/src/jeeter/constants.ts b/packages/client-simsai/src/jeeter/constants.ts deleted file mode 100644 index 0f313f4903437..0000000000000 --- a/packages/client-simsai/src/jeeter/constants.ts +++ /dev/null @@ -1,246 +0,0 @@ -const DEFAULT_SIMSAI_API_URL = "https://api.jeeter.social/2/"; -const DEFAULT_JEETER_API_URL = "https://jeeter.social"; - -export const SIMSAI_API_URL = - process.env.SIMSAI_API_URL || DEFAULT_SIMSAI_API_URL; -export const JEETER_API_URL = - process.env.JEETER_API_URL || DEFAULT_JEETER_API_URL; - -export const MAX_JEET_LENGTH = 280; -export const MAX_COMMENT_LENGTH = 280; - -export const MIN_INTERVAL = parseInt(process.env.MIN_INTERVAL || "120000", 10); // Default: 2 minutes -export const MAX_INTERVAL = parseInt(process.env.MAX_INTERVAL || "300000", 10); // Default: 5 minutes - -// Base template for deciding whether to respond to interactions -export const JEETER_SHOULD_RESPOND_BASE = `# INSTRUCTIONS: Determine if {{agentName}} (@{{jeeterUserName}}) should respond to the message and participate in the conversation. - -Response options are RESPOND, IGNORE and STOP. - -RESPONSE CRITERIA: -- RESPOND if you can add unique value or perspective to the conversation -- RESPOND to direct questions or mentions that warrant engagement -- IGNORE if you would just be repeating others or have nothing unique to add -- IGNORE messages that are irrelevant or where you can't contribute meaningfully -- STOP if the conversation has reached its natural conclusion -- STOP if further interaction would be redundant - -{{agentName}} should be conversational but selective, prioritizing quality interactions over quantity. -If there's any doubt about having meaningful value to add, choose IGNORE over RESPOND. - -{{recentPosts}} - -Thread of Jeets You Are Replying To: -{{formattedConversation}} - -Current Post: -{{currentPost}} - -# INSTRUCTIONS: Respond with [RESPOND], [IGNORE], or [STOP] based on whether you can make a unique, valuable contribution to this conversation.`; - -// Base template for search-based engagement -// In constants.ts - -export const JEETER_SEARCH_BASE = `{{timeline}} - -{{providers}} - -Recent interactions between {{agentName}} and other users: -{{recentPostInteractions}} - -About {{agentName}} (@{{jeeterUserName}}): -{{bio}} -{{lore}} -{{topics}} - -{{postDirections}} - -{{recentPosts}} - -# Task: As {{agentName}}, evaluate the post and create a response that builds upon it with your unique expertise and perspective. - -Key Requirements: -1. Identify what you can uniquely add based on your expertise -2. Share a specific insight or relevant experience that expands the discussion -3. Build on the core point without repeating it -4. Connect it to your knowledge and experience - -AVOID: -- Restating or paraphrasing the original post -- Generic agreement or disagreement -- Surface-level observations - -Current Post to Evaluate: -{{currentPost}}`; - -// Base template for handling direct interactions -export const JEETER_INTERACTION_BASE = `{{timeline}} - -{{providers}} - -Recent interactions between {{agentName}} and other users: -{{recentPostInteractions}} - -About {{agentName}} (@{{jeeterUserName}}): -{{bio}} -{{lore}} -{{topics}} - -{{postDirections}} - -{{recentPosts}} - -# Task: Respond as {{agentName}} to this conversation in a way that moves it forward with your unique expertise. - -Current Context: -{{currentPost}} - -Thread Context: -{{formattedConversation}} - -Key Guidelines: -1. Connect this topic to your unique knowledge or experience -2. Share a concrete example or specific insight others haven't mentioned -3. Move the conversation in a productive direction -4. Make a point that hasn't been made yet - -Remember: -- Directly address the core topic while expanding it -- Draw from your expertise to provide unique value -- Focus on quality of insight over agreement/disagreement -- Be concise and clear`; - -// Base template for standard message handling -export const JEETER_MESSAGE_HANDLER_BASE = `{{timeline}} - -# Knowledge -{{knowledge}} - -About {{agentName}} (@{{jeeterUserName}}): -{{bio}} -{{lore}} -{{topics}} - -{{providers}} - -{{characterPostExamples}} - -{{postDirections}} - -Recent interactions: -{{recentPostInteractions}} - -{{recentPosts}} - -# Task: Generate a unique and meaningful response as {{agentName}} that advances the conversation. - -Current Context: -{{currentPost}} - -Thread History: -{{formattedConversation}} - -RESPONSE REQUIREMENTS: -1. Add new perspectives or insights -2. Never repeat or rephrase existing content -3. Build upon previous points meaningfully -4. Consider full conversation context -5. Maintain character voice while adding value - -{{actions}}`; - -// Footer template for interaction responses -export const JEETER_INTERACTION_MESSAGE_COMPLETION_FOOTER = ` -Your response MUST be in this JSON format: - -\`\`\`json -{ - "text": "your perspective that expands the discussion with new information", - "action": "CONTINUE" or "END" or "IGNORE", - "shouldLike": true or false, - "interactions": [ - { - "type": "reply" | "rejeet" | "quote" | "none", - "text": "response that introduces new information or insights" - } - ] -} -\`\`\` - -For each interaction, ask yourself: -- What new information am I adding? -- How does this expand on the topic? -- What unique perspective am I providing? - -FOR REPLIES: -- Must share new information or examples -- Build on the topic, don't just agree/disagree -- Connect to your specific knowledge/experience - -FOR QUOTES: -- Must add substantial new context -- Explain why this connects to your expertise -- Expand the discussion in a new direction - -FOR REJEETS: -- Only use when you can add expert context -- Include your own analysis or insight -- Make clear why you're amplifying this - -FOR LIKES: -- Use when content aligns with your expertise -- No need for additional commentary -- Save for genuinely valuable content - -Choose "none" if you can't materially expand the discussion.`; - -// Footer template specifically for search interactions -export const JEETER_SEARCH_MESSAGE_COMPLETION_FOOTER = ` -Response must be in this JSON format: - -\`\`\`json -{ - "text": "your unique insight or perspective that builds on the discussion", - "action": "CONTINUE" or "END" or "IGNORE", - "shouldLike": true or false, - "interactions": [ - { - "type": "reply" | "rejeet" | "quote" | "none", - "text": "your response that adds new information or perspective" - } - ] -} -\`\`\` - -Before responding, ask yourself: -1. What unique perspective can I add from my expertise? -2. What specific example or insight can I share? -3. How does this advance the conversation? - -Response Requirements: -- Replies: Must add new information or perspective -- Quotes: Must contribute additional insight -- Rejeets: Only for content where you can add expert context -- Likes: Use for good content that doesn't need expansion - -Choose "none" if you cannot add meaningful value to the discussion.`; - -export const JEETER_POST_TEMPLATE = `{{timeline}} - -# Knowledge -{{knowledge}} - -About {{agentName}} (@{{jeeterUserName}}): -{{bio}} -{{lore}} -{{postDirections}} - -{{providers}} - -{{recentPosts}} - -{{characterPostExamples}} - -# Task: Generate a post in the voice and style of {{agentName}}, aka @{{jeeterUserName}} -Write a single sentence post that is {{adjective}} about {{topic}} (without mentioning {{topic}} directly), from the perspective of {{agentName}}. Try to write something totally different than previous posts. Do not add commentary or acknowledge this request, just write the post. -Your response should not contain any questions. Brief, concise statements only. No emojis. Use \\n\\n (double spaces) between statements.`; diff --git a/packages/client-simsai/src/jeeter/environment.ts b/packages/client-simsai/src/jeeter/environment.ts deleted file mode 100644 index baf351ffb1acd..0000000000000 --- a/packages/client-simsai/src/jeeter/environment.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { elizaLogger, IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const jeeterEnvSchema = z.object({ - SIMSAI_USERNAME: z.string().min(1, "SimsAI username is required"), - SIMSAI_AGENT_ID: z.string().min(1, "SimsAI agent ID is required"), - SIMSAI_API_KEY: z.string().min(1, "SimsAI API key is required"), - SIMSAI_DRY_RUN: z - .string() - .optional() - .default("false") - .transform((val) => val.toLowerCase() === "true" || val === "1"), -}); - -export type JeeterConfig = z.infer; - -export async function validateJeeterConfig( - runtime: IAgentRuntime -): Promise { - // Validate environment variables early - const requiredEnvVars = [ - "SIMSAI_USERNAME", - "SIMSAI_AGENT_ID", - "SIMSAI_API_KEY", - ]; - const missingEnvVars = requiredEnvVars.filter( - (envVar) => !(runtime.getSetting(envVar) || process.env[envVar]) - ); - if (missingEnvVars.length > 0) { - throw new Error( - `Missing required environment variables: ${missingEnvVars.join(", ")}` - ); - } - - try { - const config = { - SIMSAI_DRY_RUN: - runtime.getSetting("SIMSAI_DRY_RUN") || - process.env.SIMSAI_DRY_RUN, - SIMSAI_USERNAME: - runtime.getSetting("SIMSAI_USERNAME") || - process.env.SIMSAI_USERNAME, - SIMSAI_AGENT_ID: - runtime.getSetting("SIMSAI_AGENT_ID") || - process.env.SIMSAI_AGENT_ID, - SIMSAI_API_KEY: - runtime.getSetting("SIMSAI_API_KEY") || - process.env.SIMSAI_API_KEY, - }; - - return jeeterEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - elizaLogger.error( - `SimsAI configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-simsai/src/jeeter/interactions.ts b/packages/client-simsai/src/jeeter/interactions.ts deleted file mode 100644 index d3f2983d28c80..0000000000000 --- a/packages/client-simsai/src/jeeter/interactions.ts +++ /dev/null @@ -1,654 +0,0 @@ -import { - composeContext, - generateMessageResponse, - generateShouldRespond, - shouldRespondFooter, - IAgentRuntime, - Memory, - ModelClass, - stringToUuid, - elizaLogger, -} from "@elizaos/core"; -import { ClientBase } from "./base"; -import { buildConversationThread, sendJeet, wait } from "./utils"; -import { Jeet, EnhancedResponseContent, JeetInteraction } from "./types"; -import { - JEETER_SHOULD_RESPOND_BASE, - JEETER_INTERACTION_MESSAGE_COMPLETION_FOOTER, - MAX_INTERVAL, - MIN_INTERVAL, - JEETER_INTERACTION_BASE, -} from "./constants"; - -export const jeeterMessageHandlerTemplate = - JEETER_INTERACTION_BASE + JEETER_INTERACTION_MESSAGE_COMPLETION_FOOTER; -export const jeeterShouldRespondTemplate = - JEETER_SHOULD_RESPOND_BASE + shouldRespondFooter; - -export class JeeterInteractionClient { - private likedJeets: Set = new Set(); - private rejeetedJeets: Set = new Set(); - private quotedJeets: Set = new Set(); - private repliedJeets: Set = new Set(); - private isRunning: boolean = false; - private timeoutHandle?: NodeJS.Timeout; - - constructor( - private client: ClientBase, - private runtime: IAgentRuntime - ) {} - - private async hasInteracted( - jeetId: string, - type: JeetInteraction["type"], - inReplyToStatusId?: string - ): Promise { - // If this is a reply to our jeet, always allow the agent to decide whether to respond - if (type === "reply" && inReplyToStatusId) { - const parentJeet = await this.client.getJeet(inReplyToStatusId); - if (parentJeet?.agentId === this.client.profile.id) { - return false; // Let the agent decide through generateResponse - } - } - - // For other interactions, check if we've already done them - switch (type) { - case "like": - return this.likedJeets.has(jeetId); - case "rejeet": - return this.rejeetedJeets.has(jeetId); - case "quote": - return this.quotedJeets.has(jeetId); - case "reply": - return this.repliedJeets.has(jeetId); - default: - return false; - } - } - - private recordInteraction(jeetId: string, type: JeetInteraction["type"]) { - switch (type) { - case "like": - this.likedJeets.add(jeetId); - break; - case "rejeet": - this.rejeetedJeets.add(jeetId); - break; - case "quote": - this.quotedJeets.add(jeetId); - break; - case "reply": - this.repliedJeets.add(jeetId); - break; - } - } - - async start() { - if (this.isRunning) { - elizaLogger.warn("JeeterInteractionClient is already running"); - return; - } - - this.isRunning = true; - elizaLogger.log("Starting Jeeter Interaction Client"); - - const handleJeeterInteractionsLoop = async () => { - if (!this.isRunning) { - elizaLogger.log("JeeterInteractionClient has been stopped"); - return; - } - - try { - await this.handleJeeterInteractions().catch((error) => { - elizaLogger.error("Error in interaction loop:", error); - }); - - const nextInterval = - Math.floor( - Math.random() * (MAX_INTERVAL - MIN_INTERVAL + 1) - ) + MIN_INTERVAL; - - elizaLogger.log( - `Next check scheduled in ${nextInterval / 1000} seconds` - ); - - // Store the timeout handle so we can clear it when stopping - this.timeoutHandle = setTimeout(() => { - handleJeeterInteractionsLoop(); - }, nextInterval); - } catch (error) { - elizaLogger.error("Error in loop scheduling:", error); - if (this.isRunning) { - this.timeoutHandle = setTimeout( - () => { - handleJeeterInteractionsLoop(); - }, - 5 * 60 * 1000 - ); - } - } - }; - - // Start the loop - handleJeeterInteractionsLoop(); - } - - public async stop() { - elizaLogger.log("Stopping JeeterInteractionClient..."); - this.isRunning = false; - - // Clear any pending timeout - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = undefined; - } - - // Clear interaction sets - this.likedJeets.clear(); - this.rejeetedJeets.clear(); - this.quotedJeets.clear(); - this.repliedJeets.clear(); - - // Wait for any ongoing operations to complete - await new Promise((resolve) => setTimeout(resolve, 1000)); - - elizaLogger.log("JeeterInteractionClient stopped successfully"); - } - - async handleJeeterInteractions() { - elizaLogger.log("Checking Jeeter interactions"); - - try { - const { username: jeeterUsername } = this.client.profile; - elizaLogger.log( - `Fetching mentions and comments for @${jeeterUsername}` - ); - - // Fetch mentions - const searchResponse = await this.client.fetchSearchJeets( - `@${jeeterUsername}`, - 20 - ); - - // Fetch user's own posts - const homeTimeline = await this.getHomeTimeline(); - - // Get comments on user's posts - const commentsOnPosts = await this.getCommentsOnPosts(homeTimeline); - - // Combine mentions and comments, remove duplicates - const allInteractions = [ - ...(searchResponse?.jeets || []), - ...commentsOnPosts, - ]; - - const uniqueJeets = Array.from( - new Map(allInteractions.map((jeet) => [jeet.id, jeet])).values() - ) - .sort((a, b) => a.id.localeCompare(b.id)) - .filter((jeet) => jeet.agentId !== this.client.profile.id); - - elizaLogger.log( - `Found ${uniqueJeets.length} unique interactions to process` - ); - - const interactionPromises = uniqueJeets.map(async (jeet) => { - if (!this.isRunning) { - elizaLogger.log( - "Stopping jeet processing due to client stop" - ); - return; - } - - elizaLogger.log( - "Processing interaction:", - JSON.stringify(jeet) - ); - - if (!jeet.id) { - elizaLogger.warn("Skipping interaction without ID"); - return; - } - - if ( - this.client.lastCheckedJeetId && - parseInt(jeet.id) <= parseInt(this.client.lastCheckedJeetId) - ) { - elizaLogger.log( - `Skipping already processed interaction ${jeet.id}` - ); - return; - } - - try { - const roomId = stringToUuid( - `${jeet.conversationId ?? jeet.id}-${this.runtime.agentId}` - ); - const userIdUUID = stringToUuid(jeet.agentId); - - elizaLogger.log( - `Ensuring connection for user ${jeet.agent?.username}` - ); - await this.runtime.ensureConnection( - userIdUUID, - roomId, - jeet.agent?.username || "", - jeet.agent?.name || "", - "jeeter" - ); - - elizaLogger.log( - `Building conversation thread for interaction ${jeet.id}` - ); - const thread = await buildConversationThread( - jeet, - this.client - ); - - const message: Memory = { - content: { text: jeet.text }, - agentId: this.runtime.agentId, - userId: userIdUUID, - roomId, - }; - - elizaLogger.log(`Handling interaction ${jeet.id}`); - await this.handleJeet({ - jeet, - message, - thread, - }); - - this.client.lastCheckedJeetId = jeet.id; - - elizaLogger.log( - `Successfully processed interaction ${jeet.id}` - ); - } catch (error) { - elizaLogger.error( - `Error processing interaction ${jeet.id}:`, - error - ); - if (error instanceof Error) { - elizaLogger.error("Error details:", { - message: error.message, - stack: error.stack, - }); - } - } - }); - - await Promise.all(interactionPromises); - - await this.client.cacheLatestCheckedJeetId(); - elizaLogger.log("Finished checking Jeeter interactions"); - } catch (error) { - elizaLogger.error("Error in handleJeeterInteractions:", error); - if (error instanceof Error) { - elizaLogger.error("Error details:", { - message: error.message, - stack: error.stack, - }); - } - } - } - - private async getCommentsOnPosts(posts: Jeet[]): Promise { - const comments: Jeet[] = []; - - for (const post of posts) { - try { - if (!post.public_metrics?.reply_count) { - continue; - } - - elizaLogger.log(`Fetching conversation for post ${post.id}`); - const conversation = - await this.client.simsAIClient.getJeetConversation(post.id); - - if (conversation) { - // Filter out the original post and the agent's own replies - const validComments = conversation - .filter( - (reply) => - reply.id !== post.id && // Not the original post - reply.agentId !== this.client.profile.id && // Not our own replies - !reply.isRejeet // Not a rejeet - ) - .sort((a, b) => { - const timeA = new Date(a.createdAt || 0).getTime(); - const timeB = new Date(b.createdAt || 0).getTime(); - return timeB - timeA; // Newest first - }); - - comments.push(...validComments); - } - - await wait(1000, 2000); // Rate limiting delay - } catch (error) { - elizaLogger.error( - `Error fetching comments for post ${post.id}:`, - error - ); - } - } - - return comments; - } - - private async handleJeet({ - jeet, - message, - thread, - }: { - jeet: Jeet; - message: Memory; - thread: Jeet[]; - }): Promise { - elizaLogger.log(`Starting handleJeet for ${jeet.id}`); - - // If dry run is enabled, skip processing - if (this.runtime.getSetting("SIMSAI_DRY_RUN") === "true") { - elizaLogger.info(`Dry run: would have handled jeet: ${jeet.id}`); - return { - text: "", - shouldLike: false, - interactions: [], - action: "IGNORE", - } as EnhancedResponseContent; - } - - try { - if (!message.content.text) { - elizaLogger.log(`Skipping jeet ${jeet.id} - no text content`); - return { - text: "", - shouldLike: false, - interactions: [], - action: "IGNORE", - } as EnhancedResponseContent; - } - - const homeTimeline = await this.getHomeTimeline(); - - const formatJeet = (j: Jeet) => - `ID: ${j.id}\nFrom: ${j.agent?.name || "Unknown"} (@${ - j.agent?.username || "Unknown" - })\nText: ${j.text}`; - - const formattedHomeTimeline = homeTimeline - .map((j) => `${formatJeet(j)}\n---\n`) - .join("\n"); - - const formattedConversation = thread - .map( - (j) => - `@${j.agent?.username || "unknown"} (${new Date( - j.createdAt - ? new Date(j.createdAt).getTime() - : Date.now() - ).toLocaleString()}): ${j.text}` - ) - .join("\n\n"); - - elizaLogger.log("Composing state"); - let state = await this.runtime.composeState(message, { - jeeterClient: this.client.simsAIClient, - jeeterUserName: this.client.profile.username, - currentPost: formatJeet(jeet), - formattedConversation, - timeline: `# ${this.runtime.character.name}'s Home Timeline\n\n${formattedHomeTimeline}`, - }); - - elizaLogger.log("Checking if should respond"); - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character?.templates - ?.jeeterShouldRespondTemplate || - jeeterShouldRespondTemplate, - }); - - const shouldRespond = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.MEDIUM, - }); - - if (shouldRespond !== "RESPOND") { - elizaLogger.log(`Not responding to jeet ${jeet.id}`); - return { - text: "Response Decision:", - shouldLike: false, - interactions: [], - action: shouldRespond, - } as EnhancedResponseContent; - } - - // Only create memory and process interaction if we're going to respond - const jeetId = stringToUuid(jeet.id + "-" + this.runtime.agentId); - elizaLogger.log(`Checking if memory exists for jeetId: ${jeetId}`); - const jeetExists = - await this.runtime.messageManager.getMemoryById(jeetId); - elizaLogger.log(`Memory exists: ${jeetExists}`); - - if (!jeetExists) { - elizaLogger.log(`Creating new memory for jeetId: ${jeetId}`); - const memoryMessage = { - id: jeetId, - agentId: this.runtime.agentId, - content: { - text: jeet.text, - inReplyTo: jeet.inReplyToStatusId - ? stringToUuid( - jeet.inReplyToStatusId + - "-" + - this.runtime.agentId - ) - : undefined, - }, - userId: stringToUuid(jeet.agentId), - roomId: message.roomId, - createdAt: jeet.createdAt - ? new Date(jeet.createdAt).getTime() - : Date.now(), - }; - await this.client.saveRequestMessage(memoryMessage, state); - } else { - elizaLogger.log( - `Already have memory interacting with this jeet: ${jeetId}` - ); - } - - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.jeeterMessageHandlerTemplate || - this.runtime.character?.templates?.messageHandlerTemplate || - jeeterMessageHandlerTemplate, - }); - - const response = (await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.MEDIUM, - })) as EnhancedResponseContent; - - response.interactions = response.interactions || []; - - // Process interactions - if (response.interactions.length > 0) { - for (const interaction of response.interactions) { - try { - if ( - await this.hasInteracted( - jeet.id, - interaction.type, - jeet.inReplyToStatusId - ) - ) { - elizaLogger.log( - `Skipping ${interaction.type} for jeet ${jeet.id} - already performed` - ); - continue; - } - - switch (interaction.type) { - case "like": - try { - await this.client.simsAIClient.likeJeet( - jeet.id - ); - - this.recordInteraction(jeet.id, "like"); - } catch (error) { - elizaLogger.error( - `Error liking interaction ${jeet.id}:`, - error - ); - } - break; - - case "rejeet": - try { - const rejeetResult = - await this.client.simsAIClient.rejeetJeet( - jeet.id - ); - if (rejeetResult?.id) { - elizaLogger.log( - `Rejeeted jeet ${jeet.id}` - ); - this.recordInteraction( - jeet.id, - "rejeet" - ); - } else { - elizaLogger.error( - `Failed to rejeet jeet ${jeet.id}: Invalid response` - ); - } - } catch (error) { - elizaLogger.error( - `Error rejeeting jeet ${jeet.id}:`, - error - ); - } - break; - - case "quote": - if (interaction.text) { - await this.client.simsAIClient.quoteRejeet( - jeet.id, - interaction.text - ); - elizaLogger.log( - `Quote rejeeted jeet ${jeet.id}` - ); - this.recordInteraction(jeet.id, "quote"); - } - break; - - case "reply": - if (interaction.text) { - const replyResponse = { - ...response, - text: interaction.text, - }; - - const responseMessages = await sendJeet( - this.client, - replyResponse, - message.roomId, - this.client.profile.username, - jeet.id - ); - - state = - await this.runtime.updateRecentMessageState( - state - ); - - for (const [ - idx, - responseMessage, - ] of responseMessages.entries()) { - responseMessage.content.action = - idx === responseMessages.length - 1 - ? response.action - : "CONTINUE"; - await this.runtime.messageManager.createMemory( - responseMessage - ); - } - - await this.runtime.evaluate(message, state); - await this.runtime.processActions( - message, - responseMessages, - state - ); - - this.recordInteraction(jeet.id, "reply"); - } - break; - - case "none": - elizaLogger.log( - `Chose not to interact with jeet ${jeet.id}` - ); - break; - } - } catch (error) { - elizaLogger.error( - `Error processing interaction ${interaction.type} for jeet ${jeet.id}:`, - error - ); - } - } - } - - const responseInfo = `Context:\n\n${context}\n\nSelected Post: ${ - jeet.id - } - @${jeet.agent?.username || "unknown"}: ${ - jeet.text - }\nAgent's Output:\n${JSON.stringify(response)}`; - - await this.runtime.cacheManager.set( - `jeeter/jeet_generation_${jeet.id}.txt`, - responseInfo - ); - - await wait(); - - const interactionSummary = { - jeetId: jeet.id, - liked: response.shouldLike, - interactions: response.interactions.map((i) => i.type), - replyText: response.text, - quoteTexts: response.interactions - .filter((i) => i.type === "quote") - .map((i) => i.text), - }; - elizaLogger.debug( - `Interaction summary: ${JSON.stringify(interactionSummary)}` - ); - - return response; - } catch (error) { - elizaLogger.error(`Error generating/sending response: ${error}`); - throw error; - } - } - - private async getHomeTimeline(): Promise { - let homeTimeline = await this.client.getCachedTimeline(); - if (!homeTimeline) { - elizaLogger.log("Fetching home timeline"); - homeTimeline = await this.client.fetchHomeTimeline(50); - await this.client.cacheTimeline(homeTimeline); - } - return homeTimeline; - } -} diff --git a/packages/client-simsai/src/jeeter/post.ts b/packages/client-simsai/src/jeeter/post.ts deleted file mode 100644 index 051a6d8bde7a4..0000000000000 --- a/packages/client-simsai/src/jeeter/post.ts +++ /dev/null @@ -1,325 +0,0 @@ -import { Jeet, ApiPostJeetResponse } from "./types"; -import { - composeContext, - generateText, - getEmbeddingZeroVector, - IAgentRuntime, - ModelClass, - stringToUuid, - elizaLogger, -} from "@elizaos/core"; -import { ClientBase } from "./base"; -import { JEETER_API_URL, MAX_JEET_LENGTH } from "./constants"; -import { truncateToCompleteSentence } from "./utils"; -import { JEETER_POST_TEMPLATE } from "./constants"; - -export class JeeterPostClient { - private client: ClientBase; - private runtime: IAgentRuntime; - private isRunning: boolean = false; - private timeoutHandle?: NodeJS.Timeout; - - constructor(client: ClientBase, runtime: IAgentRuntime) { - this.client = client; - this.runtime = runtime; - } - - async start(postImmediately: boolean = false) { - if (this.isRunning) { - elizaLogger.warn("JeeterPostClient is already running"); - return; - } - - this.isRunning = true; - - if (!this.client.profile) { - await this.client.init(); - } - - const generateNewJeetLoop = async () => { - if (!this.isRunning) { - elizaLogger.log("JeeterPostClient has been stopped"); - return; - } - - try { - const lastPost = await this.runtime.cacheManager.get<{ - timestamp: number; - }>(`jeeter/${this.client.profile.username}/lastPost`); - const lastPostTimestamp = lastPost?.timestamp ?? 0; - const minMinutes = - parseInt(this.runtime.getSetting("POST_INTERVAL_MIN")) || - 90; - const maxMinutes = - parseInt(this.runtime.getSetting("POST_INTERVAL_MAX")) || - 180; - const randomMinutes = - Math.floor(Math.random() * (maxMinutes - minMinutes + 1)) + - minMinutes; - const targetInterval = randomMinutes * 60 * 1000; - - // Calculate the actual delay needed to reach next post time - const timeElapsed = Date.now() - lastPostTimestamp; - const delay = Math.max(0, targetInterval - timeElapsed); - - // Post immediately if we're past the target interval - if (timeElapsed >= targetInterval) { - await this.generateNewJeet(); - // Schedule next post with full interval - if (this.isRunning) { - this.timeoutHandle = setTimeout(() => { - generateNewJeetLoop(); - }, targetInterval); - elizaLogger.log( - `Next jeet scheduled in ${randomMinutes} minutes` - ); - } - } else { - // Schedule for the remaining time until next post - if (this.isRunning) { - this.timeoutHandle = setTimeout(() => { - generateNewJeetLoop(); - }, delay); - elizaLogger.log( - `Next jeet scheduled in ${Math.round(delay / 60000)} minutes` - ); - } - } - } catch (error) { - elizaLogger.error("Error in generateNewJeetLoop:", error); - if (this.isRunning) { - this.timeoutHandle = setTimeout( - () => { - generateNewJeetLoop(); - }, - 5 * 60 * 1000 - ); - } - } - }; - - if (postImmediately) { - await this.generateNewJeet(); - } - - generateNewJeetLoop(); - } - - public async stop() { - elizaLogger.log("Stopping JeeterPostClient..."); - this.isRunning = false; - - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = undefined; - } - - // Wait for any ongoing operations to complete - await new Promise((resolve) => setTimeout(resolve, 1000)); - - elizaLogger.log("JeeterPostClient stopped successfully"); - } - - private async getHomeTimeline(): Promise { - const cachedTimeline = await this.client.getCachedTimeline(); - if (cachedTimeline) { - return cachedTimeline; - } - const homeTimeline = await this.client.fetchHomeTimeline(50); - await this.client.cacheTimeline(homeTimeline); - return homeTimeline; - } - - private formatHomeTimeline(homeTimeline: Jeet[]): string { - return ( - `# ${this.runtime.character.name}'s Home Timeline\n\n` + - homeTimeline - .map((jeet) => { - const timestamp = jeet.createdAt - ? new Date(jeet.createdAt).toDateString() - : new Date().toDateString(); - return `#${jeet.id} -${jeet.agent?.name || "Unknown"} (@${jeet.agent?.username || "Unknown"})${ - jeet.inReplyToStatusId - ? `\nIn reply to: ${jeet.inReplyToStatusId}` - : "" - } -${timestamp}\n\n${jeet.text}\n---\n`; - }) - .join("\n") - ); - } - - private async generateJeetContent(): Promise { - const topics = this.runtime.character.topics.join(", "); - const homeTimeline = await this.getHomeTimeline(); - const formattedHomeTimeline = this.formatHomeTimeline(homeTimeline); - const state = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId: stringToUuid("SIMSAI_generate_room"), - agentId: this.runtime.agentId, - content: { - text: topics, - action: "", - }, - }, - { - jeeterUserName: this.client.profile.username, - timeline: formattedHomeTimeline, - } - ); - - const context = composeContext({ - state, - template: - this.runtime.character.templates?.jeeterPostTemplate || - JEETER_POST_TEMPLATE, - }); - - elizaLogger.debug("generate post prompt:\n" + context); - const newJeetContent = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - // Replace \n with proper line breaks and trim excess spaces - const formattedJeet = newJeetContent.replace(/\\n/g, "\n").trim(); - // Use the helper function to truncate to complete sentence - return truncateToCompleteSentence(formattedJeet, MAX_JEET_LENGTH); - } - - private async createMemoryForJeet( - jeet: Jeet, - content: string - ): Promise { - const roomId = stringToUuid(jeet.id + "-" + this.runtime.agentId); - await this.runtime.ensureRoomExists(roomId); - await this.runtime.ensureParticipantInRoom( - this.runtime.agentId, - roomId - ); - await this.runtime.messageManager.createMemory({ - id: stringToUuid(jeet.id + "-" + this.runtime.agentId), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - text: content, - url: jeet.permanentUrl, - source: "jeeter", - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: new Date(jeet.createdAt).getTime(), - }); - } - - private async postJeet(content: string): Promise { - const response = await this.client.requestQueue.add(async () => { - const result = await this.client.simsAIClient.postJeet(content); - return result as unknown as ApiPostJeetResponse; - }); - - if (!response?.data?.id) { - throw new Error( - `Failed to get valid response from postJeet: ${JSON.stringify(response)}` - ); - } - - elizaLogger.log(`Jeet posted with ID: ${response.data.id}`); - - // Extract the author information from includes - const author = response.includes.users.find( - (user) => user.id === response.data.author_id - ); - - // Construct the jeet from the response data - return { - id: response.data.id, - text: response.data.text, - createdAt: response.data.created_at, - agentId: response.data.author_id, - agent: author, - permanentUrl: `${JEETER_API_URL}/${this.client.profile.username}/status/${response.data.id}`, - public_metrics: response.data.public_metrics, - hashtags: [], - mentions: [], - photos: [], - thread: [], - urls: [], - videos: [], - media: [], - type: response.data.type, - }; - } - - private async generateNewJeet() { - if (!this.isRunning) { - elizaLogger.log("Skipping jeet generation - client is stopped"); - return; - } - - elizaLogger.log("Generating new jeet"); - try { - await this.runtime.ensureUserExists( - this.runtime.agentId, - this.client.profile.username, - this.runtime.character.name, - "jeeter" - ); - - const content = await this.generateJeetContent(); - - const dryRun = ( - this.runtime.getSetting("SIMSAI_DRY_RUN") || "false" - ).toLowerCase(); - if (dryRun === "true" || dryRun === "1") { - elizaLogger.info(`Dry run: would have posted jeet: ${content}`); - return; - } - - try { - if (!this.isRunning) { - elizaLogger.log( - "Skipping jeet posting - client is stopped" - ); - return; - } - - elizaLogger.log(`Posting new jeet:\n ${content}`); - const jeet = await this.postJeet(content); - await this.runtime.cacheManager.set( - `jeeter/${this.client.profile.username}/lastPost`, - { - id: jeet.id, - timestamp: Date.now(), - } - ); - await this.client.cacheJeet(jeet); - const homeTimeline = await this.getHomeTimeline(); - homeTimeline.push(jeet); - await this.client.cacheTimeline(homeTimeline); - elizaLogger.log(`Jeet posted at: ${jeet.permanentUrl}`); - await this.createMemoryForJeet(jeet, content); - } catch (error) { - elizaLogger.error("Error sending jeet:", error); - if (error instanceof Error) { - elizaLogger.error("Error details:", { - message: error.message, - stack: error.stack, - }); - } - throw error; // Re-throw to be handled by outer try-catch - } - } catch (error) { - elizaLogger.error("Error generating new jeet:", error); - if (error instanceof Error) { - elizaLogger.error("Error details:", { - message: error.message, - stack: error.stack, - }); - } - } - } -} diff --git a/packages/client-simsai/src/jeeter/search.ts b/packages/client-simsai/src/jeeter/search.ts deleted file mode 100644 index ebbe753f10108..0000000000000 --- a/packages/client-simsai/src/jeeter/search.ts +++ /dev/null @@ -1,902 +0,0 @@ -import { - composeContext, - elizaLogger, - generateMessageResponse, - generateText, - IAgentRuntime, - IImageDescriptionService, - ModelClass, - ServiceType, - State, - stringToUuid, -} from "@elizaos/core"; -import { buildConversationThread, sendJeet, wait } from "./utils"; -import { - EnhancedResponseContent, - Jeet, - JeetInteraction, - JeetResponse, -} from "./types"; -import { ClientBase } from "./base"; -import { - JEETER_SEARCH_BASE, - JEETER_SEARCH_MESSAGE_COMPLETION_FOOTER, - MAX_INTERVAL, - MIN_INTERVAL, -} from "./constants"; - -export const jeeterSearchTemplate = - JEETER_SEARCH_BASE + JEETER_SEARCH_MESSAGE_COMPLETION_FOOTER; - -export class JeeterSearchClient { - private repliedJeets: Set = new Set(); - private likedJeets: Set = new Set(); - private rejeetedJeets: Set = new Set(); - private quotedJeets: Set = new Set(); - private isRunning: boolean = false; - private timeoutHandle?: NodeJS.Timeout; - - constructor( - private client: ClientBase, - private runtime: IAgentRuntime - ) {} - - private async hasInteracted( - jeetId: string, - type: JeetInteraction["type"] - ): Promise { - switch (type) { - case "reply": - return this.repliedJeets.has(jeetId); - case "like": - return this.likedJeets.has(jeetId); - case "rejeet": - return this.rejeetedJeets.has(jeetId); - case "quote": - return this.quotedJeets.has(jeetId); - default: - return false; - } - } - - private recordInteraction(jeetId: string, type: JeetInteraction["type"]) { - switch (type) { - case "reply": - this.repliedJeets.add(jeetId); - break; - case "like": - this.likedJeets.add(jeetId); - break; - case "rejeet": - this.rejeetedJeets.add(jeetId); - break; - case "quote": - this.quotedJeets.add(jeetId); - break; - } - } - - async start() { - if (this.isRunning) { - elizaLogger.warn("JeeterSearchClient is already running"); - return; - } - - this.isRunning = true; - elizaLogger.log("Starting JeeterSearchClient"); - - const handleJeeterInteractionsLoop = async () => { - if (!this.isRunning) { - elizaLogger.log("JeeterSearchClient has been stopped"); - return; - } - - try { - await this.engageWithSearchTerms(); - } catch (error) { - elizaLogger.error("Error in engagement loop:", error); - } - - if (this.isRunning) { - this.timeoutHandle = setTimeout( - handleJeeterInteractionsLoop, - Math.floor( - Math.random() * (MAX_INTERVAL - MIN_INTERVAL + 1) - ) + MIN_INTERVAL - ); - } - }; - - // Start the loop - handleJeeterInteractionsLoop(); - } - - public async stop() { - elizaLogger.log("Stopping JeeterSearchClient..."); - this.isRunning = false; - - // Clear any pending timeout - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = undefined; - } - - // Clear interaction sets - this.repliedJeets.clear(); - this.likedJeets.clear(); - this.rejeetedJeets.clear(); - this.quotedJeets.clear(); - - // Wait for any ongoing operations to complete - await new Promise((resolve) => setTimeout(resolve, 1000)); - - elizaLogger.log("JeeterSearchClient stopped successfully"); - } - - private async engageWithSearchTerms() { - if (!this.isRunning) { - elizaLogger.log( - "Skipping search terms engagement - client is stopped" - ); - return; - } - - elizaLogger.log("Engaging with search terms"); - try { - if (!this.runtime.character.topics?.length) { - elizaLogger.log("No topics available for search"); - return; - } - - const searchTerm = [...this.runtime.character.topics][ - Math.floor(Math.random() * this.runtime.character.topics.length) - ]; - - elizaLogger.log("Fetching search jeets"); - await wait(5000); - - let searchResponse: JeetResponse = { jeets: [] }; - try { - searchResponse = await this.client.simsAIClient.searchJeets( - searchTerm, - 20 - ); - if (!searchResponse?.jeets?.length) { - elizaLogger.log( - `No jeets found for search term: "${searchTerm}"` - ); - } - } catch (error) { - elizaLogger.error("Error fetching search jeets:", error); - } - - if (!this.isRunning) return; - - const discoveryTimeline = - await this.client.simsAIClient.getDiscoveryTimeline(50); - if (!discoveryTimeline) { - elizaLogger.log("No discovery timeline available"); - return; - } - - await this.client.cacheTimeline(discoveryTimeline.jeets || []); - - const formattedTimeline = this.formatDiscoveryTimeline( - discoveryTimeline.jeets || [] - ); - - // Get combined jeets and rank them - const jeetsToProcess = - (searchResponse.jeets?.length ?? 0) > 0 - ? searchResponse.jeets - : discoveryTimeline.jeets || []; - - if (!this.isRunning) return; - - // Use our new ranking method - elizaLogger.log("Ranking jeets for engagement"); - const rankedJeets = await this.filterAndRankJeets(jeetsToProcess); - - if (rankedJeets.length === 0) { - elizaLogger.log("No valid jeets found for processing"); - return; - } - - elizaLogger.log( - `Found ${rankedJeets.length} ranked jeets to consider` - ); - const prompt = this.generateSelectionPrompt( - rankedJeets, - searchTerm - ); - - if (!this.isRunning) return; - - const mostInterestingJeetResponse = await generateText({ - runtime: this.runtime, - context: prompt, - modelClass: ModelClass.SMALL, - }); - - const jeetId = mostInterestingJeetResponse.trim(); - const selectedJeet = rankedJeets.find( - (jeet) => - jeet.id.toString().includes(jeetId) || - jeetId.includes(jeet.id.toString()) - ); - - if (!selectedJeet) { - elizaLogger.log("No matching jeet found for ID:", jeetId); - return; - } - - if (!this.isRunning) return; - - elizaLogger.log(`Selected jeet ${selectedJeet.id} for interaction`); - - const previousInteractions = { - replied: await this.hasInteracted(selectedJeet.id, "reply"), - liked: await this.hasInteracted(selectedJeet.id, "like"), - rejeeted: await this.hasInteracted(selectedJeet.id, "rejeet"), - quoted: await this.hasInteracted(selectedJeet.id, "quote"), - }; - - // Skip if we've already interacted with this jeet - if (Object.values(previousInteractions).some((v) => v)) { - elizaLogger.log( - `Already interacted with jeet ${selectedJeet.id}, skipping` - ); - return; - } - - if (!this.isRunning) return; - - await this.processSelectedJeet( - selectedJeet, - formattedTimeline, - previousInteractions - ); - } catch (error) { - elizaLogger.error("Error engaging with search terms:", error); - if (error instanceof Error && error.stack) { - elizaLogger.error("Stack trace:", error.stack); - } - } - } - - private formatDiscoveryTimeline(jeets: Jeet[]): string { - if (!jeets?.length) - return `# ${this.runtime.character.name}'s Home Timeline\n\nNo jeets available`; - - return ( - `# ${this.runtime.character.name}'s Home Timeline\n\n` + - jeets - .map((jeet) => { - return `ID: ${jeet.id} -From: ${jeet.agent?.name || "Unknown"} (@${jeet.agent?.username || "Unknown"}) -Text: ${jeet.text} ----`; - }) - .join("\n\n") - ); - } - - private generateSelectionPrompt(jeets: Jeet[], searchTerm: string): string { - return ` - Here are some jeets related to "${searchTerm}". As ${this.runtime.character.name}, you're looking for jeets that would benefit from your engagement and expertise. - - ${jeets - .map( - (jeet) => ` - ID: ${jeet.id} - From: ${jeet.agent?.name || "Unknown"} (@${jeet.agent?.username || "Unknown"}) - Text: ${jeet.text} - Metrics: ${JSON.stringify(jeet.public_metrics || {})}` - ) - .join("\n---\n")} - - Which jeet would be most valuable to respond to as ${this.runtime.character.name}? Consider: - - Posts that raise questions or points you can meaningfully contribute to - - Posts that align with your expertise - - Posts that could start a productive discussion - - Posts in English without excessive hashtags/links - - Avoid already heavily discussed posts or simple announcements - - Avoid rejeets when possible - - Please ONLY respond with the ID of the single most promising jeet to engage with.`; - } - - private scoreJeetForEngagement(jeet: Jeet): number { - let score = 0; - - // Prefer jeets without too many replies already - if (jeet.public_metrics?.reply_count < 3) score += 3; - else if (jeet.public_metrics?.reply_count < 5) score += 1; - - // Avoid heavily rejeeted/quoted content - if (jeet.public_metrics?.rejeet_count > 10) score -= 2; - if (jeet.public_metrics?.quote_count > 5) score -= 1; - - // Prefer original content over rejeets - if (jeet.isRejeet) score -= 3; - - // Avoid jeets with lots of hashtags/links - const hashtagCount = (jeet.text?.match(/#/g) || []).length; - const urlCount = (jeet.text?.match(/https?:\/\//g) || []).length; - score -= hashtagCount + urlCount; - - // Prefer jeets with meaningful length (not too short, not too long) - const textLength = jeet.text?.length || 0; - if (textLength > 50 && textLength < 200) score += 2; - - // Prefer jeets that seem to ask questions or invite discussion - if (jeet.text?.includes("?")) score += 2; - const discussionWords = [ - "thoughts", - "opinion", - "what if", - "how about", - "discuss", - ]; - if ( - discussionWords.some((word) => - jeet.text?.toLowerCase().includes(word) - ) - ) - score += 2; - - return score; - } - - private async filterAndRankJeets(jeets: Jeet[]): Promise { - if (!this.isRunning) return []; - - // First filter out basic invalid jeets - const basicValidJeets = jeets.filter( - (jeet) => - jeet?.text && - jeet.agent?.username !== - this.runtime.getSetting("SIMSAI_USERNAME") - ); - - // Then check ALL interaction types before processing - const validJeets = []; - for (const jeet of basicValidJeets) { - if (!this.isRunning) return []; - - const hasReplied = await this.hasInteracted(jeet.id, "reply"); - const hasLiked = await this.hasInteracted(jeet.id, "like"); - const hasRejeeted = await this.hasInteracted(jeet.id, "rejeet"); - const hasQuoted = await this.hasInteracted(jeet.id, "quote"); - - // Only include jeets we haven't interacted with at all - if (!hasReplied && !hasLiked && !hasRejeeted && !hasQuoted) { - validJeets.push(jeet); - } - } - - // Score and sort jeets - const scoredJeets = validJeets - .map((jeet) => ({ - jeet, - score: this.scoreJeetForEngagement(jeet), - })) - .sort((a, b) => b.score - a.score); - - // Take top 20 and add slight randomization while maintaining general score order - const topJeets = scoredJeets - .slice(0, 20) - .map(({ jeet }, index) => ({ - jeet, - randomScore: Math.random() * 0.3 + (1 - index / 20), - })) - .sort((a, b) => b.randomScore - a.randomScore); - - return topJeets.map(({ jeet }) => jeet); - } - - private async processSelectedJeet( - selectedJeet: Jeet, - formattedTimeline: string, - previousInteractions: { - replied: boolean; - liked: boolean; - rejeeted: boolean; - quoted: boolean; - } - ) { - if (!this.isRunning) return; - - // If dry run is enabled, skip processing - if (this.runtime.getSetting("SIMSAI_DRY_RUN") === "true") { - elizaLogger.info( - `Dry run: would have processed jeet: ${selectedJeet.id}` - ); - return; - } - - const roomId = stringToUuid( - `${selectedJeet.conversationId || selectedJeet.id}-${this.runtime.agentId}` - ); - const userIdUUID = stringToUuid(selectedJeet.agentId); - - await this.runtime.ensureConnection( - userIdUUID, - roomId, - selectedJeet.agent?.username || "", - selectedJeet.agent?.name || "", - "jeeter" - ); - - if (!this.isRunning) return; - - const thread = await buildConversationThread(selectedJeet, this.client); - elizaLogger.log( - `Retrieved conversation thread with ${thread.length} messages:`, - { - messages: thread.map((t) => ({ - id: t.id, - username: t.agent?.username, - text: - t.text?.slice(0, 50) + - (t.text?.length > 50 ? "..." : ""), - timestamp: t.createdAt, - })), - } - ); - - // Sort thread chronologically and handle timestamps - const sortedThread = thread.sort((a, b) => { - const timeA = new Date(a.createdAt || 0).getTime(); - const timeB = new Date(b.createdAt || 0).getTime(); - return timeA - timeB; - }); - - if (!this.isRunning) return; - - // Enhanced formatting of conversation context with clear conversation flow - const formattedConversation = sortedThread - .map((j, index) => { - const timestamp = j.createdAt - ? new Date(j.createdAt).getTime() - : Date.now(); - const isCurrentJeet = j.id === selectedJeet.id; - const arrow = index > 0 ? "↪ " : ""; // Show reply chain - return `[${new Date(timestamp).toLocaleString()}] ${arrow}@${ - j.agent?.username || "unknown" - }${isCurrentJeet ? " (current message)" : ""}: ${j.text}`; - }) - .join("\n\n"); - - // Log conversation context for debugging - elizaLogger.log("Conversation context:", { - originalJeet: selectedJeet.id, - totalMessages: thread.length, - participants: [...new Set(thread.map((j) => j.agent?.username))], - timespan: - thread.length > 1 - ? { - first: new Date( - Math.min( - ...thread.map((j) => - new Date(j.createdAt || 0).getTime() - ) - ) - ), - last: new Date( - Math.max( - ...thread.map((j) => - new Date(j.createdAt || 0).getTime() - ) - ) - ), - } - : null, - }); - - const message = { - id: stringToUuid(selectedJeet.id + "-" + this.runtime.agentId), - agentId: this.runtime.agentId, - content: { - text: selectedJeet.text, - inReplyTo: undefined, - }, - userId: userIdUUID, - roomId, - createdAt: selectedJeet.createdAt - ? new Date(selectedJeet.createdAt).getTime() - : Date.now(), - }; - - if (!message.content.text) { - return { text: "", action: "IGNORE" }; - } - - if (!this.isRunning) return; - - await this.handleJeetInteractions( - message, - selectedJeet, - formattedTimeline, - previousInteractions, - formattedConversation, - thread - ); - } - - private async handleJeetInteractions( - message: any, - selectedJeet: Jeet, - formattedTimeline: string, - previousInteractions: { - replied: boolean; - liked: boolean; - rejeeted: boolean; - quoted: boolean; - }, - formattedConversation: string, - thread: Jeet[] - ) { - if (!this.isRunning) return; - - try { - elizaLogger.log(`Composing state for jeet ${selectedJeet.id}`); - let state = await this.runtime.composeState(message, { - jeeterClient: this.client, - jeeterUserName: this.runtime.getSetting("SIMSAI_USERNAME"), - timeline: formattedTimeline, - jeetContext: await this.buildJeetContext(selectedJeet), - formattedConversation, - conversationContext: { - messageCount: thread.length, - participants: [ - ...new Set(thread.map((j) => j.agent?.username)), - ], - timespan: - thread.length > 1 - ? { - start: new Date( - Math.min( - ...thread.map((j) => - new Date( - j.createdAt || 0 - ).getTime() - ) - ) - ).toISOString(), - end: new Date( - Math.max( - ...thread.map((j) => - new Date( - j.createdAt || 0 - ).getTime() - ) - ) - ).toISOString(), - } - : null, - }, - previousInteractions, - }); - - if (!this.isRunning) return; - - elizaLogger.log( - `Saving request message for jeet ${selectedJeet.id}` - ); - await this.client.saveRequestMessage(message, state as State); - - const context = composeContext({ - state, - template: - this.runtime.character.templates?.jeeterSearchTemplate || - jeeterSearchTemplate, - }); - - if (!this.isRunning) return; - - elizaLogger.log( - `Generating message response for jeet ${selectedJeet.id}` - ); - const rawResponse = (await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - })) as EnhancedResponseContent; - - elizaLogger.debug("Raw response:", rawResponse); - const response = { - text: rawResponse.text, - action: rawResponse.action, - shouldLike: rawResponse.shouldLike, - interactions: rawResponse.interactions || [], - }; - - if (!response.interactions) { - throw new TypeError("Response interactions are undefined"); - } - - if (!this.isRunning) return; - - if (response.interactions.length > 0) { - for (const interaction of response.interactions) { - if (!this.isRunning) return; - - try { - if ( - (interaction.type === "reply" && - previousInteractions.replied) || - (interaction.type === "rejeet" && - previousInteractions.rejeeted) || - (interaction.type === "quote" && - previousInteractions.quoted) || - (interaction.type === "like" && - previousInteractions.liked) - ) { - elizaLogger.log( - `Skipping ${interaction.type} for jeet ${selectedJeet.id} - already performed` - ); - continue; - } - - elizaLogger.log( - `Attempting ${interaction.type} interaction for jeet ${selectedJeet.id}` - ); - - switch (interaction.type) { - case "rejeet": - try { - if (!this.isRunning) return; - const rejeetResult = - await this.client.simsAIClient.rejeetJeet( - selectedJeet.id - ); - if (rejeetResult?.id) { - elizaLogger.log( - `Rejeeted jeet ${selectedJeet.id}` - ); - this.recordInteraction( - selectedJeet.id, - "rejeet" - ); - } else { - elizaLogger.error( - `Failed to rejeet jeet ${selectedJeet.id}:`, - rejeetResult - ); - } - } catch (error) { - elizaLogger.error( - `Error processing rejeet for jeet ${selectedJeet.id}:`, - error - ); - } - break; - - case "quote": - if (interaction.text) { - if (!this.isRunning) return; - await this.client.simsAIClient.quoteRejeet( - selectedJeet.id, - interaction.text - ); - elizaLogger.log( - `Quote rejeeted jeet ${selectedJeet.id}` - ); - this.recordInteraction( - selectedJeet.id, - "quote" - ); - } - break; - - case "reply": - if (interaction.text) { - if (!this.isRunning) return; - const replyResponse = { - ...response, - text: interaction.text, - }; - - const responseMessages = await sendJeet( - this.client, - replyResponse, - message.roomId, - this.client.profile.username, - selectedJeet.id - ); - - state = - await this.runtime.updateRecentMessageState( - state - ); - - for (const [ - idx, - responseMessage, - ] of responseMessages.entries()) { - if (!this.isRunning) return; - responseMessage.content.action = - idx === responseMessages.length - 1 - ? response.action - : "CONTINUE"; - await this.runtime.messageManager.createMemory( - responseMessage - ); - } - - await this.runtime.evaluate(message, state); - await this.runtime.processActions( - message, - responseMessages, - state - ); - - this.recordInteraction( - selectedJeet.id, - "reply" - ); - } - break; - - case "like": - try { - if (!this.isRunning) return; - await this.client.simsAIClient.likeJeet( - selectedJeet.id - ); - elizaLogger.log( - `Liked jeet ${selectedJeet.id}` - ); - this.recordInteraction( - selectedJeet.id, - "like" - ); - } catch (error) { - elizaLogger.error( - `Error liking jeet ${selectedJeet.id}:`, - error - ); - } - break; - - case "none": - elizaLogger.log( - `Chose not to interact with jeet ${selectedJeet.id}` - ); - break; - } - - elizaLogger.log( - `Successfully performed ${interaction.type} interaction for jeet ${selectedJeet.id}` - ); - } catch (error) { - elizaLogger.error( - `Error processing interaction ${interaction.type} for jeet ${selectedJeet.id}:`, - error - ); - } - } - } - - if (!this.isRunning) return; - - const responseInfo = `Context:\n\n${context}\n\nSelected Post: ${ - selectedJeet.id - } - @${selectedJeet.agent?.username || "unknown"}: ${ - selectedJeet.text - }\nAgent's Output:\n${JSON.stringify(response)}`; - - elizaLogger.log( - `Caching response info for jeet ${selectedJeet.id}` - ); - await this.runtime.cacheManager.set( - `jeeter/jeet_generation_${selectedJeet.id}.txt`, - responseInfo - ); - - await wait(); - - const interactionSummary = { - jeetId: selectedJeet.id, - liked: response.shouldLike, - interactions: response.interactions.map((i) => i.type), - replyText: response.text, - quoteTexts: response.interactions - .filter((i) => i.type === "quote") - .map((i) => i.text), - }; - elizaLogger.debug( - `Interaction summary: ${JSON.stringify(interactionSummary)}` - ); - } catch (error) { - elizaLogger.error(`Error generating/sending response: ${error}`); - throw error; - } - } - - private async buildJeetContext(selectedJeet: Jeet): Promise { - if (!this.isRunning) return ""; - - let context = `Original Post:\nBy @${selectedJeet.agent?.username || "unknown"}\n${selectedJeet.text}`; - - if (selectedJeet.thread?.length) { - const replyContext = selectedJeet.thread - .filter( - (reply: Jeet) => - reply.agent?.username !== - this.runtime.getSetting("SIMSAI_USERNAME") - ) - .map( - (reply: Jeet) => - `@${reply.agent?.username || "unknown"}: ${reply.text}` - ) - .join("\n"); - - if (replyContext) { - context += `\nReplies to original post:\n${replyContext}`; - } - } - - if (!this.isRunning) return ""; - - // Add media descriptions if they exist - if (selectedJeet.media?.length) { - const imageDescriptions = []; - for (const media of selectedJeet.media) { - if (!this.isRunning) return ""; - // Check if the media has a URL and we can process it - if ("url" in media) { - const imageDescriptionService = - this.runtime.getService( - ServiceType.IMAGE_DESCRIPTION - ); - - const description = - await imageDescriptionService.describeImage(media.url); - imageDescriptions.push(description); - } - } - - if (imageDescriptions.length > 0) { - context += `\nMedia in Post (Described): ${imageDescriptions.join(", ")}`; - } - } - - // Add URLs if they exist - if (selectedJeet.urls?.length) { - context += `\nURLs: ${selectedJeet.urls.join(", ")}`; - } - - if (!this.isRunning) return ""; - - // Add photos if they exist - if (selectedJeet.photos?.length) { - const photoDescriptions = []; - for (const photo of selectedJeet.photos) { - if (!this.isRunning) return ""; - if (photo.url) { - const imageDescriptionService = - this.runtime.getService( - ServiceType.IMAGE_DESCRIPTION - ); - - const description = - await imageDescriptionService.describeImage(photo.url); - photoDescriptions.push(description); - } - } - - if (photoDescriptions.length > 0) { - context += `\nPhotos in Post (Described): ${photoDescriptions.join(", ")}`; - } - } - - // Add videos if they exist (just mentioning their presence) - if (selectedJeet.videos?.length) { - context += `\nVideos: ${selectedJeet.videos.length} video(s) attached`; - } - - return context; - } -} diff --git a/packages/client-simsai/src/jeeter/types.ts b/packages/client-simsai/src/jeeter/types.ts deleted file mode 100644 index 34293950c2642..0000000000000 --- a/packages/client-simsai/src/jeeter/types.ts +++ /dev/null @@ -1,181 +0,0 @@ -import { Content } from "@elizaos/core"; - -export interface ApiError extends Error { - statusCode?: number; - endpoint?: string; -} - -export type SimsAIProfile = { - id: string; - username: string; - screenName: string; - bio: string; -}; - -export interface Agent { - id: string; - builder_id: string; - username: string; - name: string; - bio: string; - avatar_url: string; - created_at: string; - updated_at: string; -} - -export interface ApiAgent { - id: string; - name: string; - username: string; - type: string; - avatar_url: string; -} - -export interface Jeet { - readonly id?: string; - readonly agentId?: string; - readonly text?: string; - readonly createdAt?: string; - readonly agent?: ApiAgent | Agent; - readonly public_metrics?: { - reply_count: number; - like_count: number; - quote_count: number; - rejeet_count: number; - }; - readonly conversationId?: string; - readonly hashtags: string[]; - readonly inReplyToStatusId?: string; - readonly isRejeet?: boolean; - readonly name?: string; - readonly mentions: Jeet[]; - readonly permanentUrl?: string; - readonly photos: Photo[]; - readonly thread: Jeet[]; - readonly timestamp?: number; - readonly urls: string[]; - readonly userId?: string; - readonly username?: string; - readonly videos: Video[]; - media: Array<{ - type: string; - url: string; - preview_url?: string; - }>; - readonly type?: string; -} - -export interface Video { - id: string; - preview: string; - url?: string; -} - -export interface Pagination { - next_cursor: string; - has_more: boolean; -} - -export interface Photo { - id: string; - url: string; - alt_text: string | undefined; -} - -export interface JeetInteraction { - type: "reply" | "like" | "rejeet" | "quote" | "none"; - text?: string; -} - -export interface EnhancedResponseContent extends Content { - text: string; - shouldLike?: boolean; - interactions: JeetInteraction[]; - action: ValidAction; -} - -export type ValidAction = "CONTINUE" | "END" | "IGNORE"; - -export interface JeetResponse { - jeets: Jeet[]; - nextCursor?: string; -} - -export interface ApiSearchResponse { - data: Array<{ - id: string; - text: string; - created_at: string; - author_id: string; - in_reply_to_status_id?: string; - public_metrics: { - reply_count: number; - like_count: number; - quote_count: number; - rejeet_count: number; - }; - }>; - includes: { - users: Array; - }; - meta: { - result_count: number; - }; -} - -export interface ApiConversationResponse { - data: Array<{ - id: string; - text: string; - created_at: string; - author_id: string; - in_reply_to_status_id?: string; - public_metrics: { - reply_count: number; - like_count: number; - quote_count: number; - rejeet_count: number; - }; - }>; - includes: { - users: Array; - }; -} - -export interface ApiLikeResponse { - data: { - liked: boolean; - }; -} - -export interface ApiRejeetResponse { - data: { - id: string; - created_at: string; - author_id: string; - }; -} - -export interface ApiPostJeetResponse { - data: { - id: string; - text: string; - type: string; - created_at: string; - author_id: string; - public_metrics: { - reply_count: number; - like_count: number; - quote_count: number; - rejeet_count: number; - }; - }; - includes: { - users: Array; - media: Array<{ - type: string; - url: string; - preview_url?: string; - }>; - }; -} diff --git a/packages/client-simsai/src/jeeter/utils.ts b/packages/client-simsai/src/jeeter/utils.ts deleted file mode 100644 index d14053ad4b354..0000000000000 --- a/packages/client-simsai/src/jeeter/utils.ts +++ /dev/null @@ -1,470 +0,0 @@ -import { getEmbeddingZeroVector } from "@elizaos/core"; -import { Content, Memory, UUID } from "@elizaos/core"; -import { stringToUuid } from "@elizaos/core"; -import { ClientBase } from "./base"; -import { elizaLogger } from "@elizaos/core"; -import { SIMSAI_API_URL, MAX_JEET_LENGTH } from "./constants"; -import { ApiPostJeetResponse, Jeet } from "./types"; - -/** - * Waits for a random amount of time between the specified minimum and maximum duration. - * @param minTime The minimum wait time in milliseconds (default: 1000). - * @param maxTime The maximum wait time in milliseconds (default: 3000). - * @returns A promise that resolves after the random wait time. - */ -export const wait = ( - minTime: number = 1000, - maxTime: number = 3000 -): Promise => { - // Prevent situation where user sets minTime > maxTime - if (minTime > maxTime) { - [minTime, maxTime] = [maxTime, minTime]; - } - - const waitTime = - Math.floor(Math.random() * (maxTime - minTime + 1)) + minTime; - return new Promise((resolve) => setTimeout(resolve, waitTime)); -}; - -/** - * Checks if a jeet is valid based on the number of hashtags, at mentions, and dollar signs. - * @param jeet The jeet to validate. - * @returns A boolean indicating whether the jeet is valid. - */ -export const isValidJeet = (jeet: Jeet): boolean => { - const text = jeet.text || ""; - const hashtagCount = (text.match(/#/g) || []).length; - const atCount = (text.match(/@/g) || []).length; - const dollarSignCount = (text.match(/\$/g) || []).length; - const totalCount = hashtagCount + atCount + dollarSignCount; - - return ( - hashtagCount <= 1 && - atCount <= 2 && - dollarSignCount <= 1 && - totalCount <= 3 - ); -}; - -/** - * Builds a conversation thread by fetching the full conversation or recursively processing parent jeets. - * @param jeet The starting jeet of the conversation thread - * @param client The ClientBase instance - * @returns A promise that resolves to an array of jeets representing the conversation thread - */ -export async function buildConversationThread( - jeet: Jeet, - client: ClientBase -): Promise { - const thread: Jeet[] = []; - const visited: Set = new Set(); - - // Try to fetch the full conversation first if we have a conversation ID - if (jeet.conversationId || jeet.id) { - try { - elizaLogger.log( - `Attempting to fetch conversation for jeet ${jeet.id}` - ); - const conversationId = jeet.conversationId || jeet.id; - const conversation = - await client.simsAIClient.getJeetConversation(conversationId); - - // Process each jeet in the conversation - for (const conversationJeet of conversation) { - await processJeetMemory(conversationJeet, client); - thread.push(conversationJeet); - } - - elizaLogger.debug("Conversation context:", { - totalMessages: thread.length, - conversationId: jeet.conversationId || jeet.id, - participants: [ - ...new Set(thread.map((j) => j.agent?.username)), - ], - threadDepth: thread.length, - }); - - return thread.sort((a, b) => { - const timeA = new Date(a.createdAt || 0).getTime(); - const timeB = new Date(b.createdAt || 0).getTime(); - return timeA - timeB; - }); - } catch (error) { - elizaLogger.error( - `Error fetching conversation, falling back to recursive method:`, - error - ); - // Clear thread and fall back to recursive method - thread.length = 0; - } - } - - // Fall back to recursive method if conversation fetch fails or isn't available - async function processThread(currentJeet: Jeet, depth: number = 0) { - try { - validateJeet(currentJeet); - - // Check if we've already processed this jeet - if (visited.has(currentJeet.id)) { - elizaLogger.debug(`Already visited jeet: ${currentJeet.id}`); - return; - } - - // Process the current jeet's memory - await processJeetMemory(currentJeet, client); - - // Add to visited set and thread - visited.add(currentJeet.id); - thread.unshift(currentJeet); - - elizaLogger.debug("Thread state:", { - length: thread.length, - currentDepth: depth, - jeetId: currentJeet.id, - }); - - // Process parent jeet if it exists - if (currentJeet.inReplyToStatusId) { - try { - const parentJeet = await client.simsAIClient.getJeet( - currentJeet.inReplyToStatusId - ); - if (parentJeet) { - await processThread(parentJeet, depth + 1); - } - } catch (error) { - elizaLogger.error( - `Error processing parent jeet ${currentJeet.inReplyToStatusId}:`, - error - ); - } - } - } catch (error) { - elizaLogger.error( - `Error in processThread for jeet ${currentJeet.id}:`, - error - ); - if (error instanceof Error) { - elizaLogger.error("Error details:", { - message: error.message, - stack: error.stack, - }); - } - } - } - - // Start processing with the initial jeet - await processThread(jeet, 0); - - elizaLogger.debug("Final thread built:", { - totalJeets: thread.length, - jeetIds: thread.map((t) => ({ - id: t.id, - text: t.text?.slice(0, 50), - })), - }); - - return thread; -} - -/** - * Validates a jeet object has required properties - * @param jeet The jeet to validate - * @throws TypeError if required properties are missing or invalid - */ -function validateJeet(jeet: Jeet) { - if (typeof jeet.id !== "string") { - elizaLogger.error("Jeet ID is not a string:", jeet.id); - throw new TypeError("Jeet ID must be a string"); - } - - if (typeof jeet.agentId !== "string") { - elizaLogger.error("Agent ID is not a string:", jeet.agentId); - throw new TypeError("Agent ID must be a string"); - } - - if (jeet.conversationId && typeof jeet.conversationId !== "string") { - elizaLogger.error( - "Conversation ID is not a string:", - jeet.conversationId - ); - throw new TypeError("Conversation ID must be a string"); - } -} - -/** - * Processes and stores a jeet's memory in the runtime - * @param jeet The jeet to process - * @param client The ClientBase instance - */ -async function processJeetMemory(jeet: Jeet, client: ClientBase) { - const roomId = stringToUuid( - `${jeet.conversationId || jeet.id}-${client.runtime.agentId}` - ); - const userId = stringToUuid(jeet.agentId); - - // Ensure connection exists - if (jeet.agent) { - await client.runtime.ensureConnection( - userId, - roomId, - jeet.agent.username, - jeet.agent.name, - "jeeter" - ); - } - - // Create memory if it doesn't exist - const existingMemory = await client.runtime.messageManager.getMemoryById( - stringToUuid(jeet.id + "-" + client.runtime.agentId) - ); - - if (!existingMemory) { - await client.runtime.messageManager.createMemory({ - id: stringToUuid(jeet.id + "-" + client.runtime.agentId), - agentId: client.runtime.agentId, - content: { - text: jeet.text || "", - source: "jeeter", - url: jeet.permanentUrl, - inReplyTo: jeet.inReplyToStatusId - ? stringToUuid( - jeet.inReplyToStatusId + "-" + client.runtime.agentId - ) - : undefined, - }, - createdAt: jeet.createdAt - ? new Date(jeet.createdAt).getTime() - : jeet.timestamp - ? jeet.timestamp * 1000 - : Date.now(), - roomId, - userId: userId, - embedding: getEmbeddingZeroVector(), - }); - } -} - -/** - * Sends a jeet by splitting the content into chunks and posting each chunk separately. - * @param client The ClientBase instance. - * @param content The content of the jeet. - * @param roomId The room ID associated with the jeet. - * @param jeetUsername The username of the user posting the jeet. - * @param inReplyToJeetId The ID of the jeet being replied to (optional). - * @returns A promise that resolves to an array of memory objects representing the sent jeets. - */ -export async function sendJeet( - client: ClientBase, - content: Content, - roomId: UUID, - jeetUsername: string, - inReplyToJeetId?: string -): Promise { - const jeetChunks = splitJeetContent(content.text); - const sentJeets: Jeet[] = []; - let currentReplyToId = inReplyToJeetId; // Track current reply parent - - for (const chunk of jeetChunks) { - const response = await client.requestQueue.add(async () => { - try { - const result = await client.simsAIClient.postJeet( - chunk.trim(), - currentReplyToId // Use currentReplyToId for the chain - ); - return result as unknown as ApiPostJeetResponse; - } catch (error) { - elizaLogger.error(`Failed to post jeet chunk:`, error); - throw error; - } - }); - - if (!response?.data?.id) { - throw new Error( - `Failed to get valid response from postJeet: ${JSON.stringify(response)}` - ); - } - - const author = response.includes.users.find( - (user) => user.id === response.data.author_id - ); - - const finalJeet: Jeet = { - id: response.data.id, - text: response.data.text, - createdAt: response.data.created_at, - agentId: response.data.author_id, - agent: author, - type: response.data.type, - public_metrics: response.data.public_metrics, - permanentUrl: `${SIMSAI_API_URL}/${jeetUsername}/status/${response.data.id}`, - inReplyToStatusId: currentReplyToId, // Track reply chain - hashtags: [], - mentions: [], - photos: [], - thread: [], - urls: [], - videos: [], - media: [], - }; - - sentJeets.push(finalJeet); - currentReplyToId = finalJeet.id; // Update reply chain to the last sent jeet - await wait(1000, 2000); - } - - const memories: Memory[] = sentJeets.map((jeet, index) => ({ - id: stringToUuid(jeet.id + "-" + client.runtime.agentId), - agentId: client.runtime.agentId, - userId: client.runtime.agentId, - content: { - text: jeet.text, - source: "jeeter", - url: jeet.permanentUrl, - inReplyTo: - index === 0 - ? inReplyToJeetId - ? stringToUuid( - inReplyToJeetId + "-" + client.runtime.agentId - ) - : undefined - : stringToUuid( - sentJeets[index - 1].id + "-" + client.runtime.agentId - ), - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: jeet.createdAt - ? new Date(jeet.createdAt).getTime() - : Date.now(), - })); - - return memories; -} - -/** - * Splits the jeet content into chunks based on the maximum length. - * @param content The content to split. - * @returns An array of jeet chunks. - */ -export function splitJeetContent(content: string): string[] { - const maxLength = MAX_JEET_LENGTH; - const paragraphs = content.split("\n\n").map((p) => p.trim()); - const jeets: string[] = []; - let currentJeet = ""; - - for (const paragraph of paragraphs) { - if (!paragraph) continue; - - if ((currentJeet + "\n\n" + paragraph).trim().length <= maxLength) { - currentJeet = currentJeet - ? currentJeet + "\n\n" + paragraph - : paragraph; - } else { - if (currentJeet) { - jeets.push(currentJeet.trim()); - } - if (paragraph.length <= maxLength) { - currentJeet = paragraph; - } else { - const chunks = splitParagraph(paragraph, maxLength); - jeets.push(...chunks.slice(0, -1)); - currentJeet = chunks[chunks.length - 1]; - } - } - } - - if (currentJeet) { - jeets.push(currentJeet.trim()); - } - - return jeets; -} - -/** - * Splits a paragraph into chunks based on the maximum length. - * @param paragraph The paragraph to split. - * @param maxLength The maximum length of each chunk. - * @returns An array of paragraph chunks. - */ -export function splitParagraph(paragraph: string, maxLength: number): string[] { - const sentences = paragraph.match(/[^.!?]+[.!?]+|[^.!?]+$/g) || [paragraph]; - const chunks: string[] = []; - let currentChunk = ""; - - for (const sentence of sentences) { - if ((currentChunk + " " + sentence).trim().length <= maxLength) { - currentChunk = currentChunk - ? currentChunk + " " + sentence - : sentence; - } else { - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - if (sentence.length <= maxLength) { - currentChunk = sentence; - } else { - const words = sentence.split(" "); - currentChunk = ""; - for (const word of words) { - if ( - (currentChunk + " " + word).trim().length <= maxLength - ) { - currentChunk = currentChunk - ? currentChunk + " " + word - : word; - } else { - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - currentChunk = word; - } - } - } - } - } - - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - - return chunks; -} - -/** - * Truncates the given text to the last complete sentence within the specified maximum length. - * @param text The text to truncate. - * @param maxLength The maximum length of the truncated text. - * @returns The truncated text. - */ -export function truncateToCompleteSentence( - text: string, - maxLength: number -): string { - // To avoid negative indexing when subtracting 3 for the ellipsis - if (maxLength < 3) { - throw new Error("maxLength must be at least 3"); - } - - if (text.length <= maxLength) { - return text; - } - - const lastPeriodIndex = text.lastIndexOf(".", maxLength); - if (lastPeriodIndex !== -1) { - const truncatedAtPeriod = text.slice(0, lastPeriodIndex + 1).trim(); - if (truncatedAtPeriod.length > 0) { - return truncatedAtPeriod; - } - } - - const lastSpaceIndex = text.lastIndexOf(" ", maxLength); - if (lastSpaceIndex !== -1) { - const truncatedAtSpace = text.slice(0, lastSpaceIndex).trim(); - if (truncatedAtSpace.length > 0) { - return truncatedAtSpace + "..."; - } - } - - return text.slice(0, maxLength - 3).trim() + "..."; -} diff --git a/packages/client-simsai/tsconfig.json b/packages/client-simsai/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/client-simsai/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/client-simsai/tsup.config.ts b/packages/client-simsai/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/client-simsai/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-slack/README.md b/packages/client-slack/README.md deleted file mode 100644 index d1e06673ed261..0000000000000 --- a/packages/client-slack/README.md +++ /dev/null @@ -1,194 +0,0 @@ -# Eliza Slack Client - -This package provides Slack integration for the Eliza AI agent. - -## Setup Guide - -### Prerequisites - -- A Slack workspace where you have permissions to install apps -- ngrok installed for local development (`brew install ngrok` on macOS) -- Node.js and pnpm installed - -### Step 1: Start ngrok - -1. Open a terminal and start ngrok on port 3069 (or your configured port): - ```bash - ngrok http 3069 - ``` -2. Copy the HTTPS URL (e.g., `https://xxxx-xx-xx-xx-xx.ngrok-free.app`) -3. Keep this terminal open - closing it will invalidate the URL - -### Step 2: Create Slack App - -1. Go to [Slack API Apps page](https://api.slack.com/apps) -2. Click "Create New App" -3. Choose "From an app manifest" -4. Select your workspace -5. Copy this manifest, replacing `YOUR_NGROK_URL` with your ngrok HTTPS URL: - -```yaml -display_information: - name: eve - description: Eve elizaos - background_color: "#143187" -features: - app_home: - home_tab_enabled: true - messages_tab_enabled: true - messages_tab_read_only_enabled: false - bot_user: - display_name: eve - always_online: false -oauth_config: - scopes: - bot: - - app_mentions:read - - channels:history - - channels:join - - channels:read - - chat:write - - files:read - - files:write - - groups:history - - groups:read - - im:history - - im:read - - im:write - - mpim:history - - mpim:read - - mpim:write - - users:read -settings: - event_subscriptions: - request_url: YOUR_NGROK_URL/slack/events - bot_events: - - app_mention - - message.channels - - message.groups - - message.im - - message.mpim - - file_shared - interactivity: - is_enabled: true - request_url: YOUR_NGROK_URL/slack/interactions - org_deploy_enabled: false - socket_mode_enabled: false - token_rotation_enabled: false -``` - -6. Click "Create" -7. On the "Basic Information" page, scroll down to "App Credentials" -8. Copy all the credentials - you'll need them in Step 3 - -### Step 2.5: Verify Event Subscription - -Before proceeding to install the app, make sure to verify the event subscription: - -1. In your Slack App settings, go to "Event Subscriptions." -2. Enter the request URL (your ngrok HTTPS URL followed by /slack/events). -3. Slack will send a verification request to this URL. -4. Ensure your server is running and configured to respond to the url_verification event by echoing back the challenge token provided in the request. -5. Once verified, you will see a confirmation in your Slack app settings. - -### Step 3: Configure Environment Variables - -1. Create or edit `.env` file in your project root: - ```bash - SLACK_APP_ID= # From Basic Information > App Credentials > App ID - SLACK_CLIENT_ID= # From Basic Information > App Credentials > Client ID - SLACK_CLIENT_SECRET= # From Basic Information > App Credentials > Client Secret - SLACK_SIGNING_SECRET= # From Basic Information > App Credentials > Signing Secret - SLACK_BOT_TOKEN= # From OAuth & Permissions > Bot User OAuth Token (starts with xoxb-) - SLACK_VERIFICATION_TOKEN= # From Basic Information > App Credentials > Verification Token - SLACK_SERVER_PORT=3069 # Must match the port you used with ngrok - ``` - -### Step 4: Install the App - -1. In your Slack App settings, go to "Install App" -2. Click "Install to Workspace" -3. Review the permissions and click "Allow" - -### Step 5: Verify Installation - -1. Start your Eliza server -2. Check the logs for successful connection -3. Test the bot: - - In Slack, invite the bot to a channel: `/invite @eve` - - Try mentioning the bot: `@eve hello` - - Check your server logs for event reception - -### Common Issues and Solutions - -#### URL Verification Failed - -- Make sure ngrok is running and the URL in your app settings matches exactly -- Check that the `/slack/events` endpoint is accessible -- Verify your environment variables are set correctly - -#### Bot Not Responding - -1. Check server logs for incoming events -2. Verify the bot is in the channel -3. Ensure all required scopes are granted -4. Try reinstalling the app to refresh permissions - -#### Messages Not Received - -1. Verify Event Subscriptions are enabled -2. Check the Request URL is correct and verified -3. Confirm all bot events are subscribed -4. Ensure the bot token starts with `xoxb-` - -### Updating ngrok URL - -If you restart ngrok, you'll get a new URL. You'll need to: - -1. Copy the new ngrok HTTPS URL -2. Update the Request URLs in your Slack App settings: - - Event Subscriptions > Request URL - - Interactivity & Shortcuts > Request URL -3. Wait for URL verification to complete - -### Security Notes - -- Never commit your `.env` file or tokens to version control -- Rotate your tokens if they're ever exposed -- Use HTTPS URLs only for Request URLs -- Keep your ngrok and server running while testing - -## Development - -### Local Testing - -1. Start ngrok: `ngrok http 3069` -2. Update Slack App URLs with new ngrok URL -3. Start the server: `pnpm start` -4. Monitor logs for events and errors - -### Debugging - -Enable detailed logging by setting: - -```bash -DEBUG=eliza:* -``` - -### Adding New Features - -1. Update the manifest if adding new scopes -2. Reinstall the app to apply new permissions -3. Update documentation for any new environment variables - -## Support - -For issues or questions: - -1. Check the Common Issues section above -2. Review server logs for errors -3. Verify all setup steps are completed -4. Open an issue with: - - Error messages - - Server logs - - Steps to reproduce diff --git a/packages/client-slack/__tests__/message-manager.test.ts b/packages/client-slack/__tests__/message-manager.test.ts deleted file mode 100644 index f7a948f64646e..0000000000000 --- a/packages/client-slack/__tests__/message-manager.test.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { MessageManager } from '../src/messages'; -import type { WebClient } from '@slack/web-api'; -import type { IAgentRuntime } from '@elizaos/core'; - -// Mock dependencies -vi.mock('@slack/web-api'); -vi.mock('@elizaos/core'); - -describe('MessageManager', () => { - let mockWebClient: WebClient; - let mockRuntime: IAgentRuntime; - let messageManager: MessageManager; - const mockBotUserId = 'U123456'; - - beforeEach(() => { - // Setup mock WebClient - mockWebClient = { - chat: { - postMessage: vi.fn() - } - } as unknown as WebClient; - - // Setup mock runtime - mockRuntime = { - getSetting: vi.fn(), - character: { - name: 'TestBot' - } - } as unknown as IAgentRuntime; - - messageManager = new MessageManager(mockWebClient, mockRuntime, mockBotUserId); - }); - - it('should initialize with correct parameters', () => { - expect(messageManager).toBeDefined(); - }); - - it('should not process duplicate events', () => { - const eventId = 'evt_123'; - const result1 = messageManager['processedEvents'].has(eventId); - expect(result1).toBe(false); - - // Add event to processed set - messageManager['processedEvents'].add(eventId); - const result2 = messageManager['processedEvents'].has(eventId); - expect(result2).toBe(true); - }); - - it('should handle message processing lock correctly', () => { - const messageId = 'msg_123'; - const isLocked1 = messageManager['messageProcessingLock'].has(messageId); - expect(isLocked1).toBe(false); - - // Lock message - messageManager['messageProcessingLock'].add(messageId); - const isLocked2 = messageManager['messageProcessingLock'].has(messageId); - expect(isLocked2).toBe(true); - }); - - it('should clean up old processed messages', () => { - vi.useFakeTimers(); - const oldMessageId = 'old_msg'; - const newMessageId = 'new_msg'; - - // Add messages with different timestamps - messageManager['processedMessages'].set(oldMessageId, Date.now() - 3700000); // older than 1 hour - messageManager['processedMessages'].set(newMessageId, Date.now()); // current - - // Trigger cleanup by advancing time and running interval callback - const cleanupInterval = setInterval(() => { - const oneHourAgo = Date.now() - 3600000; - for (const [key, timestamp] of messageManager['processedMessages'].entries()) { - if (timestamp < oneHourAgo) { - messageManager['processedMessages'].delete(key); - } - } - }, 3600000); - - vi.advanceTimersByTime(3600000); - - // Check if old message was cleaned up - expect(messageManager['processedMessages'].has(oldMessageId)).toBe(false); - expect(messageManager['processedMessages'].has(newMessageId)).toBe(true); - - clearInterval(cleanupInterval); - vi.useRealTimers(); - }); -}); diff --git a/packages/client-slack/__tests__/slack-client.provider.test.ts b/packages/client-slack/__tests__/slack-client.provider.test.ts deleted file mode 100644 index 75ee32c0c9622..0000000000000 --- a/packages/client-slack/__tests__/slack-client.provider.test.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { describe, expect, test, beforeEach, vi } from "vitest"; -import { SlackClientProvider } from "../src/providers/slack-client.provider"; -import type { SlackConfig } from "../src/types/slack-types"; -import type { WebClient } from "@slack/web-api"; -import type { - AuthTestResponse, - ChatPostMessageResponse, -} from "@slack/web-api"; - -vi.mock("@slack/web-api"); - -// Mock setup functions -const createMockSlackResponse = (ok: boolean, additionalData = {}) => ({ - ok, - ...additionalData, -}); - -const getMockWebClient = () => { - return { - auth: { - test: vi.fn(), - }, - chat: { - postMessage: vi.fn(), - }, - } as unknown as WebClient; -}; - -describe("SlackClientProvider", () => { - let provider: SlackClientProvider; - let mockWebClient: WebClient; - let mockConfig: SlackConfig; - - beforeEach(() => { - vi.clearAllMocks(); - mockConfig = { - appId: "test-app-id", - clientId: "test-client-id", - clientSecret: "test-client-secret", - signingSecret: "test-signing-secret", - verificationToken: "test-verification-token", - botToken: "test-bot-token", - botId: "test-bot-id", - }; - mockWebClient = getMockWebClient(); - provider = new SlackClientProvider(mockConfig); - // @ts-ignore - setting mock client for testing - provider['client'] = mockWebClient; - }); - - describe("Initialization", () => { - test("should create a provider instance with default retry options", () => { - expect(provider).toBeInstanceOf(SlackClientProvider); - const context = provider.getContext(); - expect(context).toHaveProperty("client"); - expect(context).toHaveProperty("config"); - expect(context.config).toEqual(mockConfig); - }); - - test("should create a provider instance with custom retry options", () => { - const retryOptions = { - maxRetries: 5, - initialDelay: 2000, - maxDelay: 10000, - }; - const providerWithOptions = new SlackClientProvider(mockConfig, retryOptions); - // @ts-ignore - setting mock client for testing - providerWithOptions['client'] = mockWebClient; - - expect(providerWithOptions).toBeInstanceOf(SlackClientProvider); - const context = providerWithOptions.getContext(); - expect(context).toHaveProperty("client"); - expect(context).toHaveProperty("config"); - expect(context.config).toEqual(mockConfig); - }); - }); - - describe("Connection Validation", () => { - test("should validate connection successfully", async () => { - const mockResponse = createMockSlackResponse(true, { - user_id: "test-bot-id", - }) as AuthTestResponse; - const mockTest = mockWebClient.auth.test as vi.Mock; - mockTest.mockResolvedValue(mockResponse); - - const result = await provider.validateConnection(); - expect(result).toBe(true); - }); - - test("should handle failed validation", async () => { - const mockResponse = createMockSlackResponse(false) as AuthTestResponse; - const mockTest = mockWebClient.auth.test as vi.Mock; - mockTest.mockResolvedValue(mockResponse); - - const result = await provider.validateConnection(); - expect(result).toBe(false); - }); - - test("should handle connection errors", async () => { - const mockTest = mockWebClient.auth.test as vi.Mock; - mockTest.mockRejectedValue(new Error("Connection failed")); - - const result = await provider.validateConnection(); - expect(result).toBe(false); - }); - }); - - describe("Message Sending", () => { - const channelId = "test-channel"; - const text = "Hello, world!"; - - test("should successfully send a message", async () => { - const expectedResponse = createMockSlackResponse(true, { - ts: "1234567890.123456", - }) as ChatPostMessageResponse; - const mockPostMessage = mockWebClient.chat.postMessage as vi.Mock; - mockPostMessage.mockResolvedValue(expectedResponse); - - const result = await provider.sendMessage(channelId, text); - expect(result.ok).toBe(true); - expect(mockPostMessage).toHaveBeenCalledWith({ - channel: channelId, - text: text, - }); - }); - - test("should handle rate limiting", async () => { - const mockResponse = createMockSlackResponse(true) as ChatPostMessageResponse; - const mockPostMessage = mockWebClient.chat.postMessage as vi.Mock; - - mockPostMessage - .mockRejectedValueOnce(new Error("rate_limited")) - .mockResolvedValueOnce(mockResponse); - - const result = await provider.sendMessage(channelId, text); - expect(result.ok).toBe(true); - }); - - test("should handle network errors with retry", async () => { - const mockResponse = createMockSlackResponse(true) as ChatPostMessageResponse; - const mockPostMessage = mockWebClient.chat.postMessage as vi.Mock; - - mockPostMessage - .mockRejectedValueOnce(new Error("network_error")) - .mockResolvedValueOnce(mockResponse); - - const result = await provider.sendMessage(channelId, text); - expect(result.ok).toBe(true); - }); - }); -}); diff --git a/packages/client-slack/__tests__/slack-client.test.ts b/packages/client-slack/__tests__/slack-client.test.ts deleted file mode 100644 index 8eaa9dbdc6d94..0000000000000 --- a/packages/client-slack/__tests__/slack-client.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { SlackClient } from '../src/index'; -import { WebClient } from '@slack/web-api'; -import type { IAgentRuntime, Character } from '@elizaos/core'; - -// Mock dependencies -vi.mock('@slack/web-api'); -vi.mock('@elizaos/core'); - -describe('SlackClient', () => { - let mockRuntime: IAgentRuntime; - let slackClient: SlackClient; - - beforeEach(() => { - // Setup mock runtime - mockRuntime = { - getSetting: vi.fn((key: string) => { - const settings: { [key: string]: string } = { - 'SLACK_BOT_TOKEN': 'test-token', - 'SLACK_SIGNING_SECRET': 'test-secret' - }; - return settings[key]; - }), - character: {} as Character - } as unknown as IAgentRuntime; - }); - - it('should initialize with correct settings', () => { - slackClient = new SlackClient(mockRuntime); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('SLACK_BOT_TOKEN'); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('SLACK_SIGNING_SECRET'); - }); - - it('should throw error if SLACK_BOT_TOKEN is missing', () => { - mockRuntime.getSetting = vi.fn((key: string) => { - const settings: { [key: string]: string } = { - 'SLACK_SIGNING_SECRET': 'test-secret' - }; - return settings[key]; - }); - - expect(() => new SlackClient(mockRuntime)).toThrow('SLACK_BOT_TOKEN is required'); - }); - - it('should throw error if SLACK_SIGNING_SECRET is missing', () => { - mockRuntime.getSetting = vi.fn((key: string) => { - const settings: { [key: string]: string } = { - 'SLACK_BOT_TOKEN': 'test-token' - }; - return settings[key]; - }); - - expect(() => new SlackClient(mockRuntime)).toThrow('SLACK_SIGNING_SECRET is required'); - }); -}); diff --git a/packages/client-slack/jest.config.js b/packages/client-slack/jest.config.js deleted file mode 100644 index 2f606cb844987..0000000000000 --- a/packages/client-slack/jest.config.js +++ /dev/null @@ -1,22 +0,0 @@ -/** @type {import('ts-jest').JestConfigWithTsJest} */ -export default { - preset: "ts-jest", - testEnvironment: "node", - roots: ["/src"], - testMatch: ["**/__tests__/**/*.ts", "**/?(*.)+(spec|test).ts"], - setupFilesAfterEnv: ["/src/tests/setup.ts"], - collectCoverageFrom: [ - "src/**/*.ts", - "!src/tests/**", - "!src/examples/**", - "!src/**/*.d.ts", - ], - coverageThreshold: { - global: { - branches: 80, - functions: 80, - lines: 80, - statements: 80, - }, - }, -}; diff --git a/packages/client-slack/package.json b/packages/client-slack/package.json deleted file mode 100644 index 786fe4862fd06..0000000000000 --- a/packages/client-slack/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "@elizaos/client-slack", - "version": "0.25.6-alpha.1", - "description": "Slack client plugin for Eliza framework", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "scripts": { - "build": "tsup src/index.ts --format esm --dts", - "test": "vitest run", - "test:watch": "vitest", - "clean": "rimraf dist", - "dev": "tsup src/index.ts --watch", - "example": "ts-node src/examples/standalone-example.ts", - "example:attachment": "ts-node src/examples/standalone-attachment.ts", - "example:summarize": "ts-node src/examples/standalone-summarize.ts", - "example:transcribe": "ts-node src/examples/standalone-transcribe.ts" - }, - "dependencies": { - "@elizaos/core": "workspace:*", - "@ffmpeg-installer/ffmpeg": "^1.1.0", - "@slack/events-api": "^3.0.1", - "@slack/web-api": "^6.8.1", - "body-parser": "^1.20.2", - "dotenv": "^16.0.3", - "express": "^4.18.2", - "fluent-ffmpeg": "^2.1.2", - "node-fetch": "^2.6.9" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/fluent-ffmpeg": "^2.1.24", - "@types/node": "^18.15.11", - "rimraf": "^5.0.0", - "tsup": "^6.7.0", - "typescript": "^5.0.3", - "vitest": "^3.0.0" - }, - "engines": { - "node": ">=14.0.0" - } -} diff --git a/packages/client-slack/src/actions/chat_with_attachments.ts b/packages/client-slack/src/actions/chat_with_attachments.ts deleted file mode 100644 index 3a9392acbe363..0000000000000 --- a/packages/client-slack/src/actions/chat_with_attachments.ts +++ /dev/null @@ -1,289 +0,0 @@ -import { - composeContext, - generateText, - trimTokens, - parseJSONObjectFromText, - getModelSettings, -} from "@elizaos/core"; -import { - type Action, - type ActionExample, - type Content, - type HandlerCallback, - type Handler, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; - -export const summarizationTemplate = `# Summarized so far (we are adding to this) -{{currentSummary}} - -# Current attachments we are summarizing -{{attachmentsWithText}} - -Summarization objective: {{objective}} - -# Instructions: Summarize the attachments. Return the summary. Do not acknowledge this request, just summarize and continue the existing summary if there is one. Capture any important details based on the objective. Only respond with the new summary text.`; - -export const attachmentIdsTemplate = `# Messages we are summarizing -{{recentMessages}} - -# Instructions: {{senderName}} is requesting a summary of specific attachments. Your goal is to determine their objective, along with the list of attachment IDs to summarize. -The "objective" is a detailed description of what the user wants to summarize based on the conversation. -The "attachmentIds" is an array of attachment IDs that the user wants to summarize. If not specified, default to including all attachments from the conversation. - -Your response must be formatted as a JSON block with this structure: -\`\`\`json -{ - "objective": "", - "attachmentIds": ["", "", ...] -} -\`\`\` -`; - -const getAttachmentIds = async ( - runtime: IAgentRuntime, - message: Memory, - state: State -): Promise<{ objective: string; attachmentIds: string[] } | null> => { - const context = composeContext({ - state, - template: attachmentIdsTemplate, - }); - - for (let i = 0; i < 5; i++) { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - const parsedResponse = parseJSONObjectFromText(response) as { - objective: string; - attachmentIds: string[]; - } | null; - - if (parsedResponse?.objective && parsedResponse?.attachmentIds) { - return parsedResponse; - } - } - return null; -}; - -const summarizeAction: Action = { - name: "CHAT_WITH_ATTACHMENTS", - similes: [ - "CHAT_WITH_ATTACHMENT", - "SUMMARIZE_FILES", - "SUMMARIZE_FILE", - "SUMMARIZE_ATACHMENT", - "CHAT_WITH_PDF", - "ATTACHMENT_SUMMARY", - "RECAP_ATTACHMENTS", - "SUMMARIZE_FILE", - "SUMMARIZE_VIDEO", - "SUMMARIZE_AUDIO", - "SUMMARIZE_IMAGE", - "SUMMARIZE_DOCUMENT", - "SUMMARIZE_LINK", - "ATTACHMENT_SUMMARY", - "FILE_SUMMARY", - ], - description: - "Answer a user request informed by specific attachments based on their IDs. If a user asks to chat with a PDF, or wants more specific information about a link or video or anything else they've attached, this is the action to use.", - validate: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State | undefined - ): Promise => { - if (message.content.source !== "slack") { - return false; - } - - const keywords: string[] = [ - "attachment", - "summary", - "summarize", - "research", - "pdf", - "video", - "audio", - "image", - "document", - "link", - "file", - "attachment", - "summarize", - "code", - "report", - "write", - "details", - "information", - "talk", - "chat", - "read", - "listen", - "watch", - ]; - - return keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword.toLowerCase()) - ); - }, - handler: (async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - options: any, - callback: HandlerCallback - ): Promise => { - const currentState = - state ?? ((await runtime.composeState(message)) as State); - - const callbackData: Content = { - text: "", - action: "CHAT_WITH_ATTACHMENTS_RESPONSE", - source: message.content.source, - attachments: [], - }; - - const attachmentData = await getAttachmentIds( - runtime, - message, - currentState - ); - if (!attachmentData) { - console.error("Couldn't get attachment IDs from message"); - await callback(callbackData); - return callbackData; - } - - const { objective, attachmentIds } = attachmentData; - - const attachments = currentState.recentMessagesData - .filter( - (msg) => - msg.content.attachments && - msg.content.attachments.length > 0 - ) - .flatMap((msg) => msg.content.attachments) - .filter((attachment) => { - if (!attachment) return false; - return ( - attachmentIds - .map((attch) => attch.toLowerCase().slice(0, 5)) - .includes(attachment.id.toLowerCase().slice(0, 5)) || - attachmentIds.some((id) => { - const attachmentId = id.toLowerCase().slice(0, 5); - return attachment.id - .toLowerCase() - .includes(attachmentId); - }) - ); - }); - - const attachmentsWithText = attachments - .map((attachment) => { - if (!attachment) return ""; - return `# ${attachment.title}\n${attachment.text}`; - }) - .filter((text) => text !== "") - .join("\n\n"); - - let currentSummary = ""; - - const modelSettings = getModelSettings( - runtime.character.modelProvider, - ModelClass.SMALL - ); - const chunkSize = modelSettings.maxOutputTokens; - - currentState.attachmentsWithText = attachmentsWithText; - currentState.objective = objective; - - const template = await trimTokens( - summarizationTemplate, - chunkSize + 500, - runtime - ); - const context = composeContext({ - state: currentState, - template, - }); - - const summary = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - currentSummary = currentSummary + "\n" + summary; - - if (!currentSummary) { - console.error("No summary found!"); - await callback(callbackData); - return callbackData; - } - - callbackData.text = currentSummary.trim(); - - if ( - callbackData.text && - (currentSummary.trim()?.split("\n").length < 4 || - currentSummary.trim()?.split(" ").length < 100) - ) { - callbackData.text = `Here is the summary: -\`\`\`md -${currentSummary.trim()} -\`\`\` -`; - await callback(callbackData); - } else if (currentSummary.trim()) { - const summaryFilename = `content/summary_${Date.now()}`; - await runtime.cacheManager.set(summaryFilename, currentSummary); - - callbackData.text = `I've attached the summary of the requested attachments as a text file.`; - await callback(callbackData, [summaryFilename]); - } else { - await callback(callbackData); - } - - return callbackData; - }) as Handler, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Can you summarize the PDF I just shared?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll analyze the PDF and provide a summary for you.", - action: "CHAT_WITH_ATTACHMENTS", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Could you look at these documents and tell me what they're about?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll review the documents and provide a summary of their contents.", - action: "CHAT_WITH_ATTACHMENTS", - }, - }, - ], - ] as ActionExample[][], -}; - -export default summarizeAction; diff --git a/packages/client-slack/src/actions/send-message.action.ts b/packages/client-slack/src/actions/send-message.action.ts deleted file mode 100644 index 22bdf74701957..0000000000000 --- a/packages/client-slack/src/actions/send-message.action.ts +++ /dev/null @@ -1,61 +0,0 @@ -import type { SlackClientContext, SlackMessage } from "../types/slack-types"; -import { elizaLogger } from "@elizaos/core"; - -// Cache to store recently sent messages -const recentMessages = new Map(); -const MESSAGE_CACHE_TTL = 5000; // 5 seconds TTL - -export class SendMessageAction { - constructor(private context: SlackClientContext) {} - - private cleanupOldMessages() { - const now = Date.now(); - for (const [key, value] of recentMessages.entries()) { - if (now - value.timestamp > MESSAGE_CACHE_TTL) { - recentMessages.delete(key); - } - } - } - - private isDuplicate(message: SlackMessage): boolean { - this.cleanupOldMessages(); - - // Create a unique key for the message - const messageKey = `${message.channelId}:${message.threadTs || "main"}:${message.text}`; - - // Check if we've seen this message recently - const recentMessage = recentMessages.get(messageKey); - if (recentMessage) { - return true; - } - - // Store the new message - recentMessages.set(messageKey, { - text: message.text, - timestamp: Date.now(), - }); - - return false; - } - - public async execute(message: SlackMessage): Promise { - try { - // Skip duplicate messages - if (this.isDuplicate(message)) { - elizaLogger.debug("Skipping duplicate message:", message.text); - return true; // Return true to indicate "success" since we're intentionally skipping - } - - const result = await this.context.client.chat.postMessage({ - channel: message.channelId, - text: message.text, - thread_ts: message.threadTs, - }); - - return result.ok === true; - } catch (error) { - console.error("Failed to send message:", error); - return false; - } - } -} diff --git a/packages/client-slack/src/actions/summarize_conversation.ts b/packages/client-slack/src/actions/summarize_conversation.ts deleted file mode 100644 index 35cfb7b694d46..0000000000000 --- a/packages/client-slack/src/actions/summarize_conversation.ts +++ /dev/null @@ -1,437 +0,0 @@ -import { - composeContext, - generateText, - splitChunks, - trimTokens, - parseJSONObjectFromText, - getModelSettings, -} from "@elizaos/core"; -import { getActorDetails } from "@elizaos/core"; -import { - type Action, - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Media, - type Memory, - ModelClass, - type State, - elizaLogger, -} from "@elizaos/core"; -import { type ISlackService, SLACK_SERVICE_TYPE } from "../types/slack-types"; - -export const summarizationTemplate = `# Summarized so far (we are adding to this) -{{currentSummary}} - -# Current conversation chunk we are summarizing (includes attachments) -{{memoriesWithAttachments}} - -Summarization objective: {{objective}} - -# Instructions: Summarize the conversation so far. Return the summary. Do not acknowledge this request, just summarize and continue the existing summary if there is one. Capture any important details to the objective. Only respond with the new summary text. -Your response should be extremely detailed and include any and all relevant information.`; - -export const dateRangeTemplate = `# Messages we are summarizing (the conversation is continued after this) -{{recentMessages}} - -# Instructions: {{senderName}} is requesting a summary of the conversation. Your goal is to determine their objective, along with the range of dates that their request covers. -The "objective" is a detailed description of what the user wants to summarize based on the conversation. If they just ask for a general summary, you can either base it off the conversation if the summary range is very recent, or set the object to be general, like "a detailed summary of the conversation between all users". - -The "start" and "end" are the range of dates that the user wants to summarize, relative to the current time. The format MUST be a number followed by a unit, like: -- "5 minutes ago" -- "2 hours ago" -- "1 day ago" -- "30 seconds ago" - -For example: -\`\`\`json -{ - "objective": "a detailed summary of the conversation between all users", - "start": "2 hours ago", - "end": "0 minutes ago" -} -\`\`\` - -If the user asks for "today", use "24 hours ago" as start and "0 minutes ago" as end. -If no time range is specified, default to "2 hours ago" for start and "0 minutes ago" for end. -`; - -const getDateRange = async ( - runtime: IAgentRuntime, - message: Memory, - state: State -): Promise<{ objective: string; start: number; end: number } | undefined> => { - state = (await runtime.composeState(message)) as State; - - const context = composeContext({ - state, - template: dateRangeTemplate, - }); - - for (let i = 0; i < 5; i++) { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - const parsedResponse = parseJSONObjectFromText(response) as { - objective: string; - start: string | number; - end: string | number; - } | null; - - if ( - parsedResponse?.objective && - parsedResponse?.start && - parsedResponse?.end - ) { - // Parse time strings like "5 minutes ago", "2 hours ago", etc. - const parseTimeString = (timeStr: string): number | null => { - const match = timeStr.match( - /^(\d+)\s+(second|minute|hour|day)s?\s+ago$/i - ); - if (!match) return null; - - const [_, amount, unit] = match; - const value = Number.parseInt(amount); - - if (isNaN(value)) return null; - - const multipliers: { [key: string]: number } = { - second: 1000, - minute: 60 * 1000, - hour: 60 * 60 * 1000, - day: 24 * 60 * 60 * 1000, - }; - - const multiplier = multipliers[unit.toLowerCase()]; - if (!multiplier) return null; - - return value * multiplier; - }; - - const startTime = parseTimeString(parsedResponse.start as string); - const endTime = parseTimeString(parsedResponse.end as string); - - if (startTime === null || endTime === null) { - elizaLogger.error( - "Invalid time format in response", - parsedResponse - ); - continue; - } - - return { - objective: parsedResponse.objective, - start: Date.now() - startTime, - end: Date.now() - endTime, - }; - } - } - - return undefined; -}; - -const summarizeAction: Action = { - name: "SUMMARIZE_CONVERSATION", - similes: [ - "RECAP", - "RECAP_CONVERSATION", - "SUMMARIZE_CHAT", - "SUMMARIZATION", - "CHAT_SUMMARY", - "CONVERSATION_SUMMARY", - ], - description: "Summarizes the conversation and attachments.", - validate: async ( - _runtime: IAgentRuntime, - message: Memory, - _state: State | undefined - ): Promise => { - if (message.content.source !== "slack") { - return false; - } - - const keywords: string[] = [ - "summarize", - "summarization", - "summary", - "recap", - "report", - "overview", - "review", - "rundown", - "wrap-up", - "brief", - "debrief", - "abstract", - "synopsis", - "outline", - "digest", - "abridgment", - "condensation", - "encapsulation", - "essence", - "gist", - "main points", - "key points", - "key takeaways", - "bulletpoint", - "highlights", - "tldr", - "tl;dr", - "in a nutshell", - "bottom line", - "long story short", - "sum up", - "sum it up", - "short version", - "bring me up to speed", - "catch me up", - ]; - - return keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword.toLowerCase()) - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ): Promise => { - const currentState = (await runtime.composeState(message)) as State; - - const callbackData: Content = { - text: "", - action: "SUMMARIZATION_RESPONSE", - source: message.content.source, - attachments: [], - }; - - // 1. Extract date range from the message - const dateRange = await getDateRange(runtime, message, currentState); - if (!dateRange) { - elizaLogger.error("Couldn't determine date range from message"); - callbackData.text = - "I couldn't determine the time range to summarize. Please try asking for a specific period like 'last hour' or 'today'."; - await callback(callbackData); - return callbackData; - } - - const { objective, start, end } = dateRange; - - // 2. Get memories from the database - const memories = await runtime.messageManager.getMemories({ - roomId: message.roomId, - start, - end, - count: 10000, - unique: false, - }); - - if (!memories || memories.length === 0) { - callbackData.text = - "I couldn't find any messages in that time range to summarize."; - await callback(callbackData); - return callbackData; - } - - const actors = await getActorDetails({ - runtime: runtime as IAgentRuntime, - roomId: message.roomId, - }); - - const actorMap = new Map(actors.map((actor) => [actor.id, actor])); - - const formattedMemories = memories - .map((memory) => { - const actor = actorMap.get(memory.userId); - const userName = - actor?.name || actor?.username || "Unknown User"; - const attachments = memory.content.attachments - ?.map((attachment: Media) => { - if (!attachment) return ""; - return `---\nAttachment: ${attachment.id}\n${attachment.description || ""}\n${attachment.text || ""}\n---`; - }) - .filter((text) => text !== "") - .join("\n"); - return `${userName}: ${memory.content.text}\n${attachments || ""}`; - }) - .join("\n"); - - let currentSummary = ""; - - const modelSettings = getModelSettings( - runtime.character.modelProvider, - ModelClass.SMALL - ); - const chunkSize = modelSettings.maxOutputTokens; - - const chunks = await splitChunks(formattedMemories, chunkSize, 0); - - currentState.memoriesWithAttachments = formattedMemories; - currentState.objective = objective; - - // Only process one chunk at a time and stop after getting a valid summary - for (let i = 0; i < chunks.length; i++) { - const chunk = chunks[i]; - currentState.currentSummary = currentSummary; - currentState.currentChunk = chunk; - - const template = await trimTokens( - summarizationTemplate, - chunkSize + 500, - runtime - ); - - const context = composeContext({ - state: currentState, - template, - }); - - const summary = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - if (summary) { - currentSummary = currentSummary + "\n" + summary; - break; // Stop after getting first valid summary - } - } - - if (!currentSummary.trim()) { - callbackData.text = - "I wasn't able to generate a summary of the conversation."; - await callback(callbackData); - return callbackData; - } - - // Format dates consistently - const formatDate = (timestamp: number) => { - const date = new Date(timestamp); - const pad = (n: number) => (n < 10 ? `0${n}` : n); - return `${date.getFullYear()}-${pad(date.getMonth() + 1)}-${pad(date.getDate())} ${pad(date.getHours())}:${pad(date.getMinutes())}`; - }; - - try { - // Get the user's name for the summary header - const requestingUser = actorMap.get(message.userId); - const userName = - requestingUser?.name || - requestingUser?.username || - "Unknown User"; - - const summaryContent = `Summary of conversation from ${formatDate(start)} to ${formatDate(end)} - -Here is a detailed summary of the conversation between ${userName} and ${runtime.character.name}:\n\n${currentSummary.trim()}`; - - // If summary is long, upload as a file - if (summaryContent.length > 1000) { - const summaryFilename = `summary_${Date.now()}.txt`; - elizaLogger.debug("Uploading summary file to Slack..."); - - try { - // Save file content - await runtime.cacheManager.set( - summaryFilename, - summaryContent - ); - - // Get the Slack service from runtime - const slackService = runtime.getService( - SLACK_SERVICE_TYPE - ) as ISlackService; - if (!slackService?.client) { - elizaLogger.error( - "Slack service not found or not properly initialized" - ); - throw new Error("Slack service not found"); - } - - // Upload file using Slack's API - elizaLogger.debug( - `Uploading file ${summaryFilename} to channel ${message.roomId}` - ); - const uploadResult = await slackService.client.files.upload( - { - channels: message.roomId, - filename: summaryFilename, - title: "Conversation Summary", - content: summaryContent, - initial_comment: `I've created a summary of the conversation from ${formatDate(start)} to ${formatDate(end)}.`, - } - ); - - if (uploadResult.ok) { - elizaLogger.success( - "Successfully uploaded summary file to Slack" - ); - callbackData.text = `I've created a summary of the conversation from ${formatDate(start)} to ${formatDate(end)}. You can find it in the thread above.`; - } else { - elizaLogger.error( - "Failed to upload file to Slack:", - uploadResult.error - ); - throw new Error("Failed to upload file to Slack"); - } - } catch (error) { - elizaLogger.error("Error uploading summary file:", error); - // Fallback to sending as a message - callbackData.text = summaryContent; - } - } else { - // For shorter summaries, just send as a message - callbackData.text = summaryContent; - } - - await callback(callbackData); - return callbackData; - } catch (error) { - elizaLogger.error("Error in summary generation:", error); - callbackData.text = - "I encountered an error while generating the summary. Please try again."; - await callback(callbackData); - return callbackData; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Can you give me a detailed report on what we're talking about?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll analyze the conversation and provide a summary for you.", - action: "SUMMARIZE_CONVERSATION", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Please summarize our discussion from the last hour, including any shared files.", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll review the conversation and shared content to create a comprehensive summary.", - action: "SUMMARIZE_CONVERSATION", - }, - }, - ], - ] as ActionExample[][], -}; - -export default summarizeAction; diff --git a/packages/client-slack/src/actions/transcribe_media.ts b/packages/client-slack/src/actions/transcribe_media.ts deleted file mode 100644 index 49249b5a005ef..0000000000000 --- a/packages/client-slack/src/actions/transcribe_media.ts +++ /dev/null @@ -1,217 +0,0 @@ -import { - composeContext, - generateText, - parseJSONObjectFromText, -} from "@elizaos/core"; -import { - type Action, - type ActionExample, - type Content, - type HandlerCallback, - type Handler, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; - -export const transcriptionTemplate = `# Transcription of media file -{{mediaTranscript}} - -# Instructions: Return only the full transcript of the media file without any additional context or commentary.`; - -export const mediaAttachmentIdTemplate = `# Messages we are transcribing -{{recentMessages}} - -# Instructions: {{senderName}} is requesting a transcription of a specific media file (audio or video). Your goal is to determine the ID of the attachment they want transcribed. -The "attachmentId" is the ID of the media file attachment that the user wants transcribed. If not specified, return null. - -Your response must be formatted as a JSON block with this structure: -\`\`\`json -{ - "attachmentId": "" -} -\`\`\` -`; - -const getMediaAttachmentId = async ( - runtime: IAgentRuntime, - message: Memory, - state: State -): Promise => { - const context = composeContext({ - state, - template: mediaAttachmentIdTemplate, - }); - - for (let i = 0; i < 5; i++) { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - const parsedResponse = parseJSONObjectFromText(response) as { - attachmentId: string; - } | null; - - if (parsedResponse?.attachmentId) { - return parsedResponse.attachmentId; - } - } - return null; -}; - -const transcribeMediaAction: Action = { - name: "TRANSCRIBE_MEDIA", - similes: [ - "TRANSCRIBE_AUDIO", - "TRANSCRIBE_VIDEO", - "MEDIA_TRANSCRIPT", - "VIDEO_TRANSCRIPT", - "AUDIO_TRANSCRIPT", - ], - description: - "Transcribe the full text of an audio or video file that the user has attached.", - validate: async ( - _runtime: IAgentRuntime, - message: Memory, - _state: State | undefined - ): Promise => { - if (message.content.source !== "slack") { - return false; - } - - const keywords: string[] = [ - "transcribe", - "transcript", - "audio", - "video", - "media", - "youtube", - "meeting", - "recording", - "podcast", - "call", - "conference", - "interview", - "speech", - "lecture", - "presentation", - ]; - return keywords.some((keyword) => - message.content.text.toLowerCase().includes(keyword.toLowerCase()) - ); - }, - handler: (async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - _options: any, - callback: HandlerCallback - ): Promise => { - const currentState = (await runtime.composeState(message)) as State; - - const callbackData: Content = { - text: "", - action: "TRANSCRIBE_MEDIA_RESPONSE", - source: message.content.source, - attachments: [], - }; - - const attachmentId = await getMediaAttachmentId( - runtime, - message, - currentState - ); - if (!attachmentId) { - console.error("Couldn't get media attachment ID from message"); - await callback(callbackData); - return callbackData; - } - - const attachment = currentState.recentMessagesData - .filter( - (msg) => - msg.content.attachments && - msg.content.attachments.length > 0 - ) - .flatMap((msg) => msg.content.attachments) - .find((attachment) => { - if (!attachment) return false; - return ( - attachment.id.toLowerCase() === attachmentId.toLowerCase() - ); - }); - - if (!attachment) { - console.error(`Couldn't find attachment with ID ${attachmentId}`); - await callback(callbackData); - return callbackData; - } - - const mediaTranscript = attachment.text || ""; - callbackData.text = mediaTranscript.trim(); - - if ( - callbackData.text && - (callbackData.text?.split("\n").length < 4 || - callbackData.text?.split(" ").length < 100) - ) { - callbackData.text = `Here is the transcript: -\`\`\`md -${mediaTranscript.trim()} -\`\`\` -`; - await callback(callbackData); - } else if (callbackData.text) { - const transcriptFilename = `content/transcript_${Date.now()}`; - await runtime.cacheManager.set( - transcriptFilename, - callbackData.text - ); - - callbackData.text = `I've attached the transcript as a text file.`; - await callback(callbackData, [transcriptFilename]); - } else { - console.warn("Empty response from transcribe media action"); - await callback(callbackData); - } - - return callbackData; - }) as Handler, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Please transcribe the audio file I just shared.", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll transcribe the audio file for you.", - action: "TRANSCRIBE_MEDIA", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Can you get me a transcript of this meeting recording?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll generate a transcript of the meeting recording for you.", - action: "TRANSCRIBE_MEDIA", - }, - }, - ], - ] as ActionExample[][], -}; - -export default transcribeMediaAction; diff --git a/packages/client-slack/src/attachments.ts b/packages/client-slack/src/attachments.ts deleted file mode 100644 index 20f6db30964a1..0000000000000 --- a/packages/client-slack/src/attachments.ts +++ /dev/null @@ -1,375 +0,0 @@ -import { - generateText, - trimTokens, - parseJSONObjectFromText, -} from "@elizaos/core"; -import { - type IAgentRuntime, - type IImageDescriptionService, - type IPdfService, - type ITranscriptionService, - type IVideoService, - type Media, - ModelClass, - ServiceType, -} from "@elizaos/core"; -import type { WebClient } from "@slack/web-api"; -import ffmpeg from "fluent-ffmpeg"; -import fs from "fs"; - -async function generateSummary( - runtime: IAgentRuntime, - text: string -): Promise<{ title: string; description: string }> { - text = await trimTokens(text, 100000, runtime); - - const prompt = `Please generate a concise summary for the following text: - - Text: """ - ${text} - """ - - Respond with a JSON object in the following format: - \`\`\`json - { - "title": "Generated Title", - "summary": "Generated summary and/or description of the text" - } - \`\`\``; - - const response = await generateText({ - runtime, - context: prompt, - modelClass: ModelClass.SMALL, - }); - - const parsedResponse = parseJSONObjectFromText(response); - - if (parsedResponse?.title && parsedResponse?.summary) { - return { - title: parsedResponse.title, - description: parsedResponse.summary, - }; - } - - return { - title: "", - description: "", - }; -} - -interface SlackFile { - id: string; - url_private: string; - name: string; - size: number; - mimetype: string; - title?: string; -} - -export class AttachmentManager { - private attachmentCache: Map = new Map(); - private runtime: IAgentRuntime; - private client: WebClient; - - constructor(runtime: IAgentRuntime, client: WebClient) { - this.runtime = runtime; - this.client = client; - } - - async processAttachments(files: SlackFile[]): Promise { - const processedAttachments: Media[] = []; - - for (const file of files) { - const media = await this.processAttachment(file); - if (media) { - processedAttachments.push(media); - } - } - - return processedAttachments; - } - - async processAttachment(file: SlackFile): Promise { - if (this.attachmentCache.has(file.url_private)) { - return this.attachmentCache.get(file.url_private)!; - } - - let media: Media | null = null; - - try { - const videoService = this.runtime.getService( - ServiceType.VIDEO - ); - - if (file.mimetype.startsWith("application/pdf")) { - media = await this.processPdfAttachment(file); - } else if (file.mimetype.startsWith("text/plain")) { - media = await this.processPlaintextAttachment(file); - } else if ( - file.mimetype.startsWith("audio/") || - file.mimetype.startsWith("video/mp4") - ) { - media = await this.processAudioVideoAttachment(file); - } else if (file.mimetype.startsWith("image/")) { - media = await this.processImageAttachment(file); - } else if ( - file.mimetype.startsWith("video/") || - (videoService?.isVideoUrl(file.url_private) ?? false) - ) { - media = await this.processVideoAttachment(file); - } else { - media = await this.processGenericAttachment(file); - } - - if (media) { - this.attachmentCache.set(file.url_private, media); - } - } catch (error: unknown) { - const errorMessage = - error instanceof Error ? error.message : "Unknown error"; - console.error(`Error processing attachment: ${errorMessage}`); - media = await this.processGenericAttachment(file); - } - - return media; - } - - private async fetchFileContent(file: SlackFile): Promise { - const response = await fetch(file.url_private, { - headers: { - Authorization: `Bearer ${this.client.token}`, - }, - }); - const arrayBuffer = await response.arrayBuffer(); - return Buffer.from(arrayBuffer); - } - - private async processAudioVideoAttachment(file: SlackFile): Promise { - try { - const fileBuffer = await this.fetchFileContent(file); - let audioBuffer: Buffer; - - if (file.mimetype.startsWith("audio/")) { - audioBuffer = fileBuffer; - } else if (file.mimetype.startsWith("video/mp4")) { - audioBuffer = await this.extractAudioFromMP4(fileBuffer); - } else { - throw new Error("Unsupported audio/video format"); - } - - const transcriptionService = - this.runtime.getService( - ServiceType.TRANSCRIPTION - ); - if (!transcriptionService) { - throw new Error("Transcription service not found"); - } - - const transcription = - await transcriptionService.transcribeAttachment(audioBuffer); - if (!transcription) { - throw new Error("Transcription failed"); - } - - const { title, description } = await generateSummary( - this.runtime, - transcription - ); - - return { - id: file.id, - url: file.url_private, - title: title || "Audio/Video Attachment", - source: file.mimetype.startsWith("audio/") ? "Audio" : "Video", - description: - description || - "User-uploaded audio/video attachment which has been transcribed", - text: transcription, - }; - } catch (error: unknown) { - const errorMessage = - error instanceof Error ? error.message : "Unknown error"; - console.error( - `Error processing audio/video attachment: ${errorMessage}` - ); - return { - id: file.id, - url: file.url_private, - title: "Audio/Video Attachment", - source: file.mimetype.startsWith("audio/") ? "Audio" : "Video", - description: "An audio/video attachment (transcription failed)", - text: `This is an audio/video attachment. File name: ${file.name}, Size: ${file.size} bytes, Content type: ${file.mimetype}`, - }; - } - } - - private async extractAudioFromMP4(mp4Data: Buffer): Promise { - const tempMP4File = `temp_${Date.now()}.mp4`; - const tempAudioFile = `temp_${Date.now()}.mp3`; - - try { - fs.writeFileSync(tempMP4File, mp4Data); - - await new Promise((resolve, reject) => { - ffmpeg(tempMP4File) - .outputOptions("-vn") - .audioCodec("libmp3lame") - .save(tempAudioFile) - .on("end", () => resolve()) - .on("error", (err: Error) => reject(err)) - .run(); - }); - - return fs.readFileSync(tempAudioFile); - } finally { - if (fs.existsSync(tempMP4File)) { - fs.unlinkSync(tempMP4File); - } - if (fs.existsSync(tempAudioFile)) { - fs.unlinkSync(tempAudioFile); - } - } - } - - private async processPdfAttachment(file: SlackFile): Promise { - try { - const pdfBuffer = await this.fetchFileContent(file); - const pdfService = this.runtime.getService( - ServiceType.PDF - ); - - if (!pdfService) { - throw new Error("PDF service not found"); - } - - const text = await pdfService.convertPdfToText(pdfBuffer); - const { title, description } = await generateSummary( - this.runtime, - text - ); - - return { - id: file.id, - url: file.url_private, - title: title || "PDF Attachment", - source: "PDF", - description: description || "A PDF document", - text: text, - }; - } catch (error: unknown) { - const errorMessage = - error instanceof Error ? error.message : "Unknown error"; - console.error(`Error processing PDF attachment: ${errorMessage}`); - return { - id: file.id, - url: file.url_private, - title: "PDF Attachment (conversion failed)", - source: "PDF", - description: - "A PDF document that could not be converted to text", - text: `This is a PDF document. File name: ${file.name}, Size: ${file.size} bytes`, - }; - } - } - - private async processPlaintextAttachment(file: SlackFile): Promise { - try { - const textBuffer = await this.fetchFileContent(file); - const text = textBuffer.toString("utf-8"); - const { title, description } = await generateSummary( - this.runtime, - text - ); - - return { - id: file.id, - url: file.url_private, - title: title || "Text Attachment", - source: "Text", - description: description || "A text document", - text: text, - }; - } catch (error: unknown) { - const errorMessage = - error instanceof Error ? error.message : "Unknown error"; - console.error(`Error processing text attachment: ${errorMessage}`); - return this.processGenericAttachment(file); - } - } - - private async processImageAttachment(file: SlackFile): Promise { - try { - const imageService = - this.runtime.getService( - ServiceType.IMAGE_DESCRIPTION - ); - if (!imageService) { - throw new Error("Image description service not found"); - } - - const imageDescription = - (await imageService.describeImage(file.url_private)) || ""; - const descriptionText = - typeof imageDescription === "string" - ? imageDescription - : "Image description not available"; - - return { - id: file.id, - url: file.url_private, - title: "Image Attachment", - source: "Image", - description: descriptionText, - text: - descriptionText || - `This is an image. File name: ${file.name}, Size: ${file.size} bytes`, - }; - } catch (error: unknown) { - const errorMessage = - error instanceof Error ? error.message : "Unknown error"; - console.error(`Error processing image attachment: ${errorMessage}`); - return this.processGenericAttachment(file); - } - } - - private async processVideoAttachment(file: SlackFile): Promise { - try { - const videoService = this.runtime.getService( - ServiceType.VIDEO - ); - if (!videoService) { - throw new Error("Video service not found"); - } - - // Using a more generic approach since describeVideo isn't in the interface - const description = await this.processAudioVideoAttachment(file); - return { - id: file.id, - url: file.url_private, - title: "Video Attachment", - source: "Video", - description: description.text || "A video attachment", - text: - description.text || - `This is a video. File name: ${file.name}, Size: ${file.size} bytes`, - }; - } catch (error: unknown) { - const errorMessage = - error instanceof Error ? error.message : "Unknown error"; - console.error(`Error processing video attachment: ${errorMessage}`); - return this.processGenericAttachment(file); - } - } - - private async processGenericAttachment(file: SlackFile): Promise { - return { - id: file.id, - url: file.url_private, - title: file.title || "File Attachment", - source: "File", - description: `A file attachment of type: ${file.mimetype}`, - text: `This is a file attachment. File name: ${file.name}, Size: ${file.size} bytes, Type: ${file.mimetype}`, - }; - } -} diff --git a/packages/client-slack/src/environment.ts b/packages/client-slack/src/environment.ts deleted file mode 100644 index 0abbb9d2901d9..0000000000000 --- a/packages/client-slack/src/environment.ts +++ /dev/null @@ -1,71 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { elizaLogger } from "@elizaos/core"; -import { z } from "zod"; - -export const slackEnvSchema = z.object({ - SLACK_APP_ID: z.string().min(1, "Slack application ID is required"), - SLACK_CLIENT_ID: z.string().min(1, "Slack client ID is required"), - SLACK_CLIENT_SECRET: z.string().min(1, "Slack client secret is required"), - SLACK_SIGNING_SECRET: z.string().min(1, "Slack signing secret is required"), - SLACK_VERIFICATION_TOKEN: z - .string() - .min(1, "Slack verification token is required"), - SLACK_BOT_TOKEN: z.string().min(1, "Slack bot token is required"), - SLACK_SERVER_PORT: z - .string() - .optional() - .transform((val) => (val ? Number.parseInt(val) : 3000)), -}); - -export type SlackConfig = z.infer; - -export async function validateSlackConfig( - runtime: IAgentRuntime -): Promise { - try { - elizaLogger.debug( - "Validating Slack configuration with runtime settings" - ); - const config = { - SLACK_APP_ID: - runtime.getSetting("SLACK_APP_ID") || process.env.SLACK_APP_ID, - SLACK_CLIENT_ID: - runtime.getSetting("SLACK_CLIENT_ID") || - process.env.SLACK_CLIENT_ID, - SLACK_CLIENT_SECRET: - runtime.getSetting("SLACK_CLIENT_SECRET") || - process.env.SLACK_CLIENT_SECRET, - SLACK_SIGNING_SECRET: - runtime.getSetting("SLACK_SIGNING_SECRET") || - process.env.SLACK_SIGNING_SECRET, - SLACK_VERIFICATION_TOKEN: - runtime.getSetting("SLACK_VERIFICATION_TOKEN") || - process.env.SLACK_VERIFICATION_TOKEN, - SLACK_BOT_TOKEN: - runtime.getSetting("SLACK_BOT_TOKEN") || - process.env.SLACK_BOT_TOKEN, - SLACK_SERVER_PORT: - runtime.getSetting("SLACK_SERVER_PORT") || - process.env.SLACK_SERVER_PORT, - }; - - elizaLogger.debug("Parsing configuration with schema", config); - const validated = slackEnvSchema.parse(config); - elizaLogger.debug("Configuration validated successfully"); - return validated; - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((e) => `${e.path.join(".")}: ${e.message}`) - .join("\n"); - elizaLogger.error( - "Configuration validation failed:", - errorMessages - ); - throw new Error( - `Slack configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-slack/src/events.ts b/packages/client-slack/src/events.ts deleted file mode 100644 index a17eaf8f82290..0000000000000 --- a/packages/client-slack/src/events.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { createEventAdapter } from "@slack/events-api"; -import type { WebClient } from "@slack/web-api"; -import type { SlackConfig } from "./types/slack-types"; -import type { MessageManager } from "./messages"; -import { elizaLogger } from "@elizaos/core"; - -export class EventHandler { - private events: ReturnType; - private messageManager: MessageManager; - - constructor( - config: SlackConfig, - client: WebClient, - messageManager: MessageManager - ) { - elizaLogger.log("🎮 Initializing Slack event handler..."); - elizaLogger.debug( - "Creating event adapter with signing secret:", - config.signingSecret.slice(0, 4) + "..." - ); - this.events = createEventAdapter(config.signingSecret); - this.messageManager = messageManager; - - this.setupEventListeners(); - elizaLogger.log("✅ Event handler initialization complete"); - } - - private setupEventListeners() { - elizaLogger.log("📡 Setting up event listeners..."); - - // Handle URL verification - this.events.on("url_verification", (event: any) => { - elizaLogger.debug("🔍 [URL_VERIFICATION] Received challenge:", { - type: event.type, - challenge: event.challenge, - }); - return event.challenge; - }); - - // Handle messages - this.events.on("message", async (event: any) => { - try { - elizaLogger.debug("📨 [MESSAGE] Received message event:", { - type: event.type, - subtype: event.subtype, - user: event.user, - channel: event.channel, - text: event.text, - ts: event.ts, - thread_ts: event.thread_ts, - raw_event: JSON.stringify(event, null, 2), - }); - await this.messageManager.handleMessage(event); - } catch (error) { - elizaLogger.error( - "❌ [MESSAGE] Error handling message event:", - error - ); - } - }); - - // Handle app mentions - this.events.on("app_mention", async (event: any) => { - try { - elizaLogger.debug("🔔 [MENTION] Received app mention event:", { - type: event.type, - user: event.user, - channel: event.channel, - text: event.text, - ts: event.ts, - thread_ts: event.thread_ts, - raw_event: JSON.stringify(event, null, 2), - }); - await this.messageManager.handleMessage(event); - } catch (error) { - elizaLogger.error( - "❌ [MENTION] Error handling app mention event:", - error - ); - } - }); - - // Handle reactions - this.events.on("reaction_added", async (event: any) => { - try { - elizaLogger.debug("⭐ [REACTION] Reaction added:", { - type: event.type, - user: event.user, - reaction: event.reaction, - item: event.item, - raw_event: JSON.stringify(event, null, 2), - }); - // TODO: Implement reaction handling - } catch (error) { - elizaLogger.error( - "❌ [REACTION] Error handling reaction_added event:", - error - ); - } - }); - - this.events.on("reaction_removed", async (event: any) => { - try { - elizaLogger.debug("💫 [REACTION] Reaction removed:", { - type: event.type, - user: event.user, - reaction: event.reaction, - item: event.item, - raw_event: JSON.stringify(event, null, 2), - }); - // TODO: Implement reaction handling - } catch (error) { - elizaLogger.error( - "❌ [REACTION] Error handling reaction_removed event:", - error - ); - } - }); - - // Handle errors - this.events.on("error", (error: Error) => { - elizaLogger.error("❌ [ERROR] Slack Events API error:", error); - }); - - // Add debug logging for all events - this.events.on("*", (event: any) => { - elizaLogger.debug("🔄 [RAW] Raw Slack event received:", { - type: event.type, - subtype: event.subtype, - user: event.user, - channel: event.channel, - ts: event.ts, - raw_event: JSON.stringify(event, null, 2), - }); - }); - - elizaLogger.log("✅ Event listeners setup complete"); - } - - public getEventAdapter() { - elizaLogger.debug( - "🔌 [ADAPTER] Returning event adapter for express middleware" - ); - return this.events; - } -} diff --git a/packages/client-slack/src/examples/sc_01.png b/packages/client-slack/src/examples/sc_01.png deleted file mode 100644 index 23041a979c1b7..0000000000000 Binary files a/packages/client-slack/src/examples/sc_01.png and /dev/null differ diff --git a/packages/client-slack/src/examples/sc_02.png b/packages/client-slack/src/examples/sc_02.png deleted file mode 100644 index 2d91d022cff5d..0000000000000 Binary files a/packages/client-slack/src/examples/sc_02.png and /dev/null differ diff --git a/packages/client-slack/src/examples/standalone-attachment.ts b/packages/client-slack/src/examples/standalone-attachment.ts deleted file mode 100644 index 4214af226a2e9..0000000000000 --- a/packages/client-slack/src/examples/standalone-attachment.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { config } from "dotenv"; -import { SlackClientProvider } from "../providers/slack-client.provider"; -import { AttachmentManager } from "../attachments"; -import type { SlackConfig } from "../types/slack-types"; -import path from "path"; -import { elizaLogger } from "@elizaos/core"; - -// Load environment variables -config({ path: path.resolve(__dirname, "../../../.env") }); - -elizaLogger.log("\n=== Starting Slack Attachment Example ===\n"); - -// Load environment variables -const slackConfig: SlackConfig = { - appId: process.env.SLACK_APP_ID || "", - clientId: process.env.SLACK_CLIENT_ID || "", - clientSecret: process.env.SLACK_CLIENT_SECRET || "", - signingSecret: process.env.SLACK_SIGNING_SECRET || "", - verificationToken: process.env.SLACK_VERIFICATION_TOKEN || "", - botToken: process.env.SLACK_BOT_TOKEN || "", - botId: process.env.SLACK_BOT_ID || "", -}; - -elizaLogger.log("Environment variables loaded:"); -Object.entries(slackConfig).forEach(([key, value]) => { - if (value) { - elizaLogger.log(`${key}: ${value.slice(0, 4)}...${value.slice(-4)}`); - } else { - console.error(`Missing ${key}`); - } -}); - -async function runExample() { - try { - elizaLogger.log("\nInitializing Slack client..."); - const provider = new SlackClientProvider(slackConfig); - const client = provider.getContext().client; - - elizaLogger.log("\nValidating Slack connection..."); - const isValid = await provider.validateConnection(); - if (!isValid) { - throw new Error("Failed to validate Slack connection"); - } - elizaLogger.log("✓ Successfully connected to Slack"); - - // Test file upload - const channelId = process.env.SLACK_CHANNEL_ID; - if (!channelId) { - throw new Error("SLACK_CHANNEL_ID is required"); - } - - elizaLogger.log("\nSending test message with attachment..."); - const testMessage = "Here is a test message with an attachment"; - - // Create a test file - const testFilePath = path.join(__dirname, "test.txt"); - async function loadFs() { - return await import("fs"); - } - const fs = await loadFs(); - fs.writeFileSync( - testFilePath, - "This is a test file content for attachment testing." - ); - - // Upload the file - const fileUpload = await client.files.upload({ - channels: channelId, - file: fs.createReadStream(testFilePath), - filename: "test.txt", - title: "Test Attachment", - initial_comment: testMessage, - }); - - elizaLogger.log("✓ File uploaded successfully"); - - // Initialize AttachmentManager - const runtime = { - getSetting: (key: string) => process.env[key], - getService: () => null, - // Add other required runtime properties as needed - }; - const attachmentManager = new AttachmentManager(runtime as any, client); - - // Process the uploaded file - if (fileUpload.file) { - elizaLogger.log("\nProcessing attachment..."); - const processedAttachment = - await attachmentManager.processAttachment({ - id: fileUpload.file.id, - url_private: fileUpload.file.url_private || "", - name: fileUpload.file.name || "", - size: fileUpload.file.size || 0, - mimetype: fileUpload.file.mimetype || "text/plain", - title: fileUpload.file.title || "", - }); - - elizaLogger.log("✓ Attachment processed:", processedAttachment); - } - - // Cleanup - fs.unlinkSync(testFilePath); - elizaLogger.log("\n✓ Test completed successfully"); - } catch (error) { - console.error("Error:", error); - process.exit(1); - } -} - -runExample().then(() => { - elizaLogger.log("\n=== Example completed ===\n"); - process.exit(0); -}); diff --git a/packages/client-slack/src/examples/standalone-example.ts b/packages/client-slack/src/examples/standalone-example.ts deleted file mode 100644 index 53ae672b7b8f1..0000000000000 --- a/packages/client-slack/src/examples/standalone-example.ts +++ /dev/null @@ -1,213 +0,0 @@ -import { SlackClientProvider } from "../providers/slack-client.provider"; -import type { SlackConfig } from "../types/slack-types"; -import { EventHandler } from "../events"; -import { config } from "dotenv"; -import { resolve } from "path"; -import { createReadStream } from "fs"; -import express from "express"; - -// Load environment variables -const envPath = resolve(__dirname, "../../../../.env"); -console.log("Loading environment from:", envPath); -config({ path: envPath }); - -function validateEnvironment() { - const requiredEnvVars = [ - "SLACK_APP_ID", - "SLACK_CLIENT_ID", - "SLACK_CLIENT_SECRET", - "SLACK_SIGNING_SECRET", - "SLACK_VERIFICATION_TOKEN", - "SLACK_BOT_TOKEN", - "SLACK_CHANNEL_ID", - ]; - - const missing = requiredEnvVars.filter((key) => !process.env[key]); - if (missing.length > 0) { - console.error("Missing required environment variables:", missing); - return false; - } - - // Log masked versions of the tokens for debugging - console.log("Environment variables loaded:"); - requiredEnvVars.forEach((key) => { - const value = process.env[key] || ""; - const maskedValue = - value.length > 8 - ? `${value.substring(0, 4)}...${value.substring(value.length - 4)}` - : "****"; - console.log(`${key}: ${maskedValue}`); - }); - - return true; -} - -async function startServer( - app: express.Application, - port: number -): Promise { - try { - await new Promise((resolve, reject) => { - app.listen(port, () => resolve()).on("error", (err: any) => { - if (err.code === "EADDRINUSE") { - console.log(`Port ${port} is busy, trying ${port + 1}...`); - resolve(); - } else { - reject(err); - } - }); - }); - return port; - } catch (error) { - if (port < 3010) { - // Try up to 10 ports - return startServer(app, port + 1); - } - throw error; - } -} - -async function runExample() { - console.log("\n=== Starting Slack Client Example ===\n"); - - if (!validateEnvironment()) { - throw new Error("Environment validation failed"); - } - - // Initialize the client with your Slack credentials - const slackConfig: SlackConfig = { - appId: process.env.SLACK_APP_ID || "", - clientId: process.env.SLACK_CLIENT_ID || "", - clientSecret: process.env.SLACK_CLIENT_SECRET || "", - signingSecret: process.env.SLACK_SIGNING_SECRET || "", - verificationToken: process.env.SLACK_VERIFICATION_TOKEN || "", - botToken: process.env.SLACK_BOT_TOKEN || "", - botId: process.env.SLACK_BOT_ID || "", // This will be updated automatically - }; - - console.log("\nInitializing Slack client..."); - const slackProvider = new SlackClientProvider(slackConfig); - - try { - // Validate the connection - console.log("\nValidating Slack connection..."); - const isConnected = await slackProvider.validateConnection(); - if (!isConnected) { - throw new Error("Failed to connect to Slack"); - } - console.log("✓ Successfully connected to Slack"); - - // Set up event handling - console.log("\nSetting up event handling..."); - const eventHandler = new EventHandler( - slackConfig, - slackProvider.getContext().client - ); - const events = eventHandler.getEventAdapter(); - - // Create Express app - const app = express(); - const basePort = Number.parseInt(process.env.PORT || "3000"); - - // Mount the event handler - app.use("/slack/events", events.expressMiddleware()); - - // Send initial message - const channelId = process.env.SLACK_CHANNEL_ID || ""; - console.log(`\nSending initial message to channel: ${channelId}`); - - try { - // Send text message - const messageResult = await slackProvider.sendMessage( - channelId, - "Hello! I am now active and ready to help. Here are my capabilities:" - ); - console.log("✓ Initial message sent:", messageResult); - - // Send message with image - const imagePath = resolve(__dirname, "../tests/test_image.png"); - console.log("\nSending message with image..."); - const imageResult = await slackProvider - .getContext() - .client.files.uploadV2({ - channel_id: channelId, - file: createReadStream(imagePath), - filename: "test_image.png", - title: "Test Image", - initial_comment: "1. I can send messages with images 🖼️", - }); - console.log("✓ Image message sent:", imageResult); - - // Send message in thread - if (messageResult.ts) { - console.log("\nSending message in thread..."); - const threadResult = await slackProvider.replyInThread( - channelId, - messageResult.ts, - "2. I can reply in threads 🧵" - ); - console.log("✓ Thread message sent:", threadResult); - - // Send another image in the thread - console.log("\nSending image in thread..."); - const threadImageResult = await slackProvider - .getContext() - .client.files.uploadV2({ - channel_id: channelId, - file: createReadStream(imagePath), - filename: "test_image_thread.png", - title: "Test Image in Thread", - thread_ts: messageResult.ts, - initial_comment: - "3. I can also send images in threads! 🖼️🧵", - }); - console.log("✓ Thread image sent:", threadImageResult); - } - - // Start the server - const port = await startServer(app, basePort); - console.log(`\n✓ Slack event server is running on port ${port}`); - console.log("\n=== Bot is ready to interact! ==="); - console.log("\nCore functionalities demonstrated:"); - console.log("1. Sending regular messages"); - console.log("2. Sending images and attachments"); - console.log("3. Replying in threads"); - console.log("4. Sending images in threads"); - console.log( - "\nTry mentioning me with @eve_predict_client to interact!" - ); - - if (!process.env.SLACK_BOT_ID) { - console.log(`\nℹ️ Bot ID: ${slackConfig.botId}`); - } - } catch (error) { - console.error("\n❌ Error during initialization:", error); - // Continue even if initial messages fail - console.log("\nStarting server despite initialization errors..."); - - const port = await startServer(app, basePort); - console.log(`\n✓ Slack event server is running on port ${port}`); - console.log("\n=== Bot is ready to interact! ==="); - } - } catch (error) { - console.error("\n❌ Error in Slack client example:"); - if (error instanceof Error) { - console.error("Error message:", error.message); - console.error("Stack trace:", error.stack); - if ("data" in error) { - console.error("Error data:", (error as any).data); - } - } else { - console.error("Unknown error:", error); - } - process.exit(1); - } -} - -// Run the example if this file is executed directly -if (require.main === module) { - runExample().catch((error) => { - console.error("Fatal error:", error); - process.exit(1); - }); -} diff --git a/packages/client-slack/src/examples/standalone-summarize.ts b/packages/client-slack/src/examples/standalone-summarize.ts deleted file mode 100644 index 1d78c721f6b69..0000000000000 --- a/packages/client-slack/src/examples/standalone-summarize.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { SlackClientProvider } from "../providers/slack-client.provider"; -import type { SlackConfig } from "../types/slack-types"; -import { config } from "dotenv"; -import { resolve } from "path"; -import { elizaLogger } from "@elizaos/core"; - -// Load environment variables from root .env -const envPath = resolve(__dirname, "../../../../.env"); -elizaLogger.log("Loading environment from:", envPath); -config({ path: envPath }); - -function validateEnvironment() { - const requiredEnvVars = [ - "SLACK_APP_ID", - "SLACK_CLIENT_ID", - "SLACK_CLIENT_SECRET", - "SLACK_SIGNING_SECRET", - "SLACK_VERIFICATION_TOKEN", - "SLACK_BOT_TOKEN", - "SLACK_CHANNEL_ID", - ]; - - const missing = requiredEnvVars.filter((key) => !process.env[key]); - if (missing.length > 0) { - console.error("Missing required environment variables:", missing); - return false; - } - - elizaLogger.log("Environment variables loaded successfully"); - return true; -} - -async function main() { - elizaLogger.log("\n=== Starting Summarize Conversation Example ===\n"); - - if (!validateEnvironment()) { - throw new Error("Environment validation failed"); - } - - // Initialize the client with Slack credentials - const slackConfig: SlackConfig = { - appId: process.env.SLACK_APP_ID || "", - clientId: process.env.SLACK_CLIENT_ID || "", - clientSecret: process.env.SLACK_CLIENT_SECRET || "", - signingSecret: process.env.SLACK_SIGNING_SECRET || "", - verificationToken: process.env.SLACK_VERIFICATION_TOKEN || "", - botToken: process.env.SLACK_BOT_TOKEN || "", - botId: process.env.SLACK_BOT_ID || "", - }; - - const slackProvider = new SlackClientProvider(slackConfig); - - // Validate the connection - const isConnected = await slackProvider.validateConnection(); - if (!isConnected) { - throw new Error("Failed to connect to Slack"); - } - elizaLogger.log("✓ Successfully connected to Slack"); - - const channel = process.env.SLACK_CHANNEL_ID!; - elizaLogger.log(`\nSending messages to channel: ${channel}`); - - // First, send some test messages - await slackProvider.sendMessage( - channel, - "Hello! Let's test the conversation summarization." - ); - - // Send message with attachment using WebClient directly - await slackProvider.getContext().client.chat.postMessage({ - channel, - text: "Here's an important document to discuss.", - attachments: [ - { - title: "Test Document", - text: "This is a test document with some important information.", - }, - ], - }); - - await slackProvider.sendMessage( - channel, - "What do you think about the document?" - ); - - // Wait a bit for messages to be processed - await new Promise((resolve) => setTimeout(resolve, 2000)); - - // Request a summary - await slackProvider.sendMessage( - channel, - "Can you summarize our conversation so far?" - ); - - // Keep the process running - await new Promise((resolve) => setTimeout(resolve, 10000)); - elizaLogger.log("\n✓ Example completed successfully"); - process.exit(0); -} - -main().catch((error) => { - console.error("\n❌ Error:", error); - process.exit(1); -}); diff --git a/packages/client-slack/src/examples/standalone-transcribe.ts b/packages/client-slack/src/examples/standalone-transcribe.ts deleted file mode 100644 index 99d90c529f67e..0000000000000 --- a/packages/client-slack/src/examples/standalone-transcribe.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { SlackClientProvider } from "../providers/slack-client.provider"; -import type { SlackConfig } from "../types/slack-types"; -import { config } from "dotenv"; -import { resolve } from "path"; -import { elizaLogger } from "@elizaos/core"; - -// Load environment variables from root .env -const envPath = resolve(__dirname, "../../../../.env"); -elizaLogger.log("Loading environment from:", envPath); -config({ path: envPath }); - -function validateEnvironment() { - const requiredEnvVars = [ - "SLACK_APP_ID", - "SLACK_CLIENT_ID", - "SLACK_CLIENT_SECRET", - "SLACK_SIGNING_SECRET", - "SLACK_VERIFICATION_TOKEN", - "SLACK_BOT_TOKEN", - "SLACK_CHANNEL_ID", - ]; - - const missing = requiredEnvVars.filter((key) => !process.env[key]); - if (missing.length > 0) { - console.error("Missing required environment variables:", missing); - return false; - } - - elizaLogger.log("Environment variables loaded successfully"); - return true; -} - -async function main() { - elizaLogger.log("\n=== Starting Transcribe Media Example ===\n"); - - if (!validateEnvironment()) { - throw new Error("Environment validation failed"); - } - - // Initialize the client with Slack credentials - const slackConfig: SlackConfig = { - appId: process.env.SLACK_APP_ID || "", - clientId: process.env.SLACK_CLIENT_ID || "", - clientSecret: process.env.SLACK_CLIENT_SECRET || "", - signingSecret: process.env.SLACK_SIGNING_SECRET || "", - verificationToken: process.env.SLACK_VERIFICATION_TOKEN || "", - botToken: process.env.SLACK_BOT_TOKEN || "", - botId: process.env.SLACK_BOT_ID || "", - }; - - const slackProvider = new SlackClientProvider(slackConfig); - - // Validate the connection - const isConnected = await slackProvider.validateConnection(); - if (!isConnected) { - throw new Error("Failed to connect to Slack"); - } - elizaLogger.log("✓ Successfully connected to Slack"); - - const channel = process.env.SLACK_CHANNEL_ID!; - elizaLogger.log(`\nSending messages to channel: ${channel}`); - - // First, send a test message with a media attachment - await slackProvider.getContext().client.chat.postMessage({ - channel, - text: "Here's a test audio recording to transcribe.", - attachments: [ - { - title: "Test Audio", - text: "This is a simulated transcription of an audio file: Hello everyone, welcome to our weekly standup meeting. Today we'll discuss our progress on the new features and any blockers we've encountered.", - }, - ], - }); - - // Wait a bit for the message to be processed - await new Promise((resolve) => setTimeout(resolve, 2000)); - - // Request transcription - await slackProvider.sendMessage( - channel, - "Can you transcribe the audio file I just shared?" - ); - - // Keep the process running - await new Promise((resolve) => setTimeout(resolve, 10000)); - elizaLogger.log("\n✓ Example completed successfully"); - process.exit(0); -} - -main().catch((error) => { - console.error("\n❌ Error:", error); - process.exit(1); -}); diff --git a/packages/client-slack/src/index.ts b/packages/client-slack/src/index.ts deleted file mode 100644 index 092486b283b53..0000000000000 --- a/packages/client-slack/src/index.ts +++ /dev/null @@ -1,345 +0,0 @@ -import type { Character, Client as ElizaClient, IAgentRuntime } from "@elizaos/core"; -import { elizaLogger } from "@elizaos/core"; -import { WebClient } from "@slack/web-api"; -import express, { type Request } from "express"; -import { EventEmitter } from "events"; -import { MessageManager } from "./messages"; -import { validateSlackConfig } from "./environment"; -import chat_with_attachments from "./actions/chat_with_attachments"; -import summarize_conversation from "./actions/summarize_conversation"; -// import transcribe_media from './actions/transcribe_media'; -import { channelStateProvider } from "./providers/channelState"; -import { SlackService } from "./services/slack.service"; - -interface SlackRequest extends Request { - rawBody?: Buffer; -} - -export class SlackClient extends EventEmitter { - private client: WebClient; - private runtime: IAgentRuntime; - private server: express.Application; - private messageManager: MessageManager; - private botUserId: string; - private character: Character; - private signingSecret: string; - - constructor(runtime: IAgentRuntime) { - super(); - elizaLogger.log("🚀 Initializing SlackClient..."); - this.runtime = runtime; - this.character = runtime.character; - - const token = runtime.getSetting("SLACK_BOT_TOKEN"); - this.signingSecret = runtime.getSetting("SLACK_SIGNING_SECRET"); - - if (!token) throw new Error("SLACK_BOT_TOKEN is required"); - if (!this.signingSecret) - throw new Error("SLACK_SIGNING_SECRET is required"); - - this.client = new WebClient(token); - this.server = express(); - - this.server.use(express.raw({ type: "application/json" })); - this.server.use((req: SlackRequest, res, next) => { - if (req.body) { - req.rawBody = Buffer.from(req.body); - try { - req.body = JSON.parse(req.body.toString()); - } catch (error) { - elizaLogger.error( - "❌ [PARSE] Failed to parse request body:", - error - ); - } - } - next(); - }); - } - - private async handleEvent(event: any) { - elizaLogger.debug("🎯 [EVENT] Processing event:", { - type: event.type, - user: event.user, - channel: event.channel, - text: event.text?.slice(0, 100), - }); - - try { - if (event.type === "message" || event.type === "app_mention") { - await this.messageManager.handleMessage(event); - } - } catch (error) { - elizaLogger.error("❌ [EVENT] Error handling event:", error); - } - } - - private async verifyPermissions() { - elizaLogger.debug("🔒 [PERMISSIONS] Verifying bot permissions..."); - - try { - // Test channel list access with all types - const channels = await this.client.conversations.list({ - types: "public_channel,private_channel,im,mpim", - }); - - if (!channels.ok) { - throw new Error(`Failed to list channels: ${channels.error}`); - } - - elizaLogger.debug("📋 [PERMISSIONS] Channel access verified"); - - // Test message sending (to self) - const testMessage = await this.client.chat.postMessage({ - channel: this.botUserId, - text: "Permission test message", - }); - - if (!testMessage.ok) { - throw new Error( - `Failed to send test message: ${testMessage.error}` - ); - } - - elizaLogger.debug("💬 [PERMISSIONS] Message sending verified"); - - elizaLogger.debug("✅ [PERMISSIONS] All permissions verified"); - } catch (error: any) { - elizaLogger.error( - "❌ [PERMISSIONS] Permission verification failed:", - error - ); - elizaLogger.error( - "Please ensure the following scopes are added to your Slack app:" - ); - elizaLogger.error("- app_mentions:read (for mentions)"); - elizaLogger.error("- channels:history (for public channels)"); - elizaLogger.error("- channels:read (for channel info)"); - elizaLogger.error("- chat:write (for sending messages)"); - elizaLogger.error("- groups:history (for private channels)"); - elizaLogger.error( - "- groups:read (for private channel info)" - ); - elizaLogger.error("- im:history (for DMs)"); - elizaLogger.error("- im:read (for DM info)"); - elizaLogger.error("- im:write (for sending DMs)"); - elizaLogger.error("- mpim:history (for group DMs)"); - elizaLogger.error("- mpim:read (for group DM info)"); - elizaLogger.error("- users:read (for user info)"); - throw new Error("Permission verification failed"); - } - } - - async start() { - try { - elizaLogger.log("Starting Slack client..."); - - const config = await validateSlackConfig(this.runtime); - - // Initialize and register Slack service - const slackService = new SlackService(); - await slackService.initialize(this.runtime); - await this.runtime.registerService(slackService); - - // Get detailed bot info - const auth = await this.client.auth.test(); - if (!auth.ok) throw new Error("Failed to authenticate with Slack"); - - this.botUserId = auth.user_id as string; - elizaLogger.debug("🤖 [INIT] Bot info:", { - user_id: auth.user_id, - bot_id: auth.bot_id, - team_id: auth.team_id, - user: auth.user, - team: auth.team, - }); - - // Verify bot user details - try { - const botInfo = await this.client.users.info({ - user: this.botUserId, - }); - - elizaLogger.debug("👤 [BOT] Bot user details:", { - name: botInfo.user?.name, - real_name: botInfo.user?.real_name, - is_bot: botInfo.user?.is_bot, - is_app_user: botInfo.user?.is_app_user, - status: botInfo.user?.profile?.status_text, - }); - } catch (error) { - elizaLogger.error( - "❌ [BOT] Failed to verify bot details:", - error - ); - } - - // Verify permissions - await this.verifyPermissions(); - - // Initialize message manager - this.messageManager = new MessageManager( - this.client, - this.runtime, - this.botUserId - ); - - // Register actions and providers - this.runtime.registerAction(chat_with_attachments); - this.runtime.registerAction(summarize_conversation); - // this.runtime.registerAction(transcribe_media); - this.runtime.providers.push(channelStateProvider); - - // Add request logging middleware - this.server.use((req: SlackRequest, res, next) => { - elizaLogger.debug("🌐 [HTTP] Incoming request:", { - method: req.method, - path: req.path, - headers: req.headers, - body: req.body, - query: req.query, - timestamp: new Date().toISOString(), - }); - next(); - }); - - // Setup event handling endpoint - this.server.post( - "/slack/events", - async (req: SlackRequest, res) => { - try { - elizaLogger.debug( - "📥 [REQUEST] Incoming Slack event:", - { - type: req.body?.type, - event: req.body?.event?.type, - challenge: req.body?.challenge, - raw: JSON.stringify(req.body, null, 2), - } - ); - - // Handle URL verification - if (req.body?.type === "url_verification") { - elizaLogger.debug( - "🔑 [VERIFICATION] Challenge received:", - req.body.challenge - ); - return res.send(req.body.challenge); - } - - // Process the event - if (req.body?.event) { - elizaLogger.debug("🎯 [EVENT] Processing event:", { - type: req.body.event.type, - user: req.body.event.user, - text: req.body.event.text, - channel: req.body.event.channel, - ts: req.body.event.ts, - }); - await this.handleEvent(req.body.event); - } else { - elizaLogger.warn( - "⚠️ [EVENT] Received request without event data" - ); - } - - // Acknowledge receipt - res.status(200).send(); - } catch (error) { - elizaLogger.error( - "❌ [ERROR] Error processing request:", - error - ); - res.status(500).json({ - error: "Internal server error", - }); - } - } - ); - - // Setup interactions endpoint - this.server.post( - "/slack/interactions", - async (req: SlackRequest, res) => { - try { - elizaLogger.debug( - "🔄 [INTERACTION] Incoming interaction:", - { - type: req.body?.type, - action: req.body?.action, - callback_id: req.body?.callback_id, - raw: JSON.stringify(req.body, null, 2), - } - ); - - // Always acknowledge interaction - res.status(200).send(); - } catch (error) { - elizaLogger.error( - "❌ [ERROR] Error processing interaction:", - error - ); - res.status(500).json({ - error: "Internal server error", - }); - } - } - ); - - // Start server - const port = config.SLACK_SERVER_PORT; - this.server.listen(port, () => { - elizaLogger.success( - `🚀 [SERVER] Slack event server is running on port ${port}` - ); - elizaLogger.success( - `✅ [INIT] Slack client successfully started for character ${this.character.name}` - ); - elizaLogger.success( - `🤖 [READY] Bot user: @${auth.user} (${this.botUserId})` - ); - elizaLogger.success( - `📡 [EVENTS] Listening for events at: /slack/events` - ); - elizaLogger.success( - `💡 [INTERACTIONS] Listening for interactions at: /slack/interactions` - ); - elizaLogger.success(`💡 [HELP] To interact with the bot:`); - elizaLogger.success( - ` 1. Direct message: Find @${auth.user} in DMs` - ); - elizaLogger.success( - ` 2. Channel: Mention @${auth.user} in any channel` - ); - }); - } catch (error) { - elizaLogger.error("❌ [INIT] Failed to start Slack client:", error); - throw error; - } - } - - async stop() { - elizaLogger.log("Stopping Slack client..."); - if (this.server) { - await new Promise((resolve) => { - this.server.listen().close(() => { - elizaLogger.log("Server stopped"); - resolve(); - }); - }); - } - } -} - -export const SlackClientInterface: ElizaClient = { - start: async (runtime: IAgentRuntime) => { - const client = new SlackClient(runtime); - await client.start(); - return client; - }, - stop: async (_runtime: IAgentRuntime) => { - elizaLogger.warn("Slack client stopping..."); - }, -}; - -export default SlackClientInterface; diff --git a/packages/client-slack/src/messages.ts b/packages/client-slack/src/messages.ts deleted file mode 100644 index 101572e0dee97..0000000000000 --- a/packages/client-slack/src/messages.ts +++ /dev/null @@ -1,526 +0,0 @@ -import { - stringToUuid, - getEmbeddingZeroVector, - composeContext, - generateMessageResponse, - generateShouldRespond, - ModelClass, - type Memory, - type Content, - type State, - elizaLogger, - type HandlerCallback, -} from "@elizaos/core"; -import { - slackMessageHandlerTemplate, - slackShouldRespondTemplate, -} from "./templates"; -import type { WebClient } from "@slack/web-api"; -import type { IAgentRuntime } from "@elizaos/core"; -import path from "path"; -import fs from "fs"; -import os from "os"; - -export class MessageManager { - private client: WebClient; - private runtime: IAgentRuntime; - private botUserId: string; - private processedEvents: Set = new Set(); - private messageProcessingLock: Set = new Set(); - private processedMessages: Map = new Map(); - - constructor(client: WebClient, runtime: IAgentRuntime, botUserId: string) { - console.log("📱 Initializing MessageManager..."); - this.client = client; - this.runtime = runtime; - this.botUserId = botUserId; - console.log("MessageManager initialized with botUserId:", botUserId); - - // Clear old processed messages and events every hour - setInterval(() => { - const oneHourAgo = Date.now() - 3600000; - - // Clear old processed messages - for (const [key, timestamp] of this.processedMessages.entries()) { - if (timestamp < oneHourAgo) { - this.processedMessages.delete(key); - } - } - - // Clear old processed events - this.processedEvents.clear(); - }, 3600000); - } - - private generateEventKey(event: any): string { - // Create a unique key that includes all relevant event data - // Normalize event type to handle message and app_mention as the same type - const eventType = event.type === "app_mention" ? "message" : event.type; - - const components = [ - event.ts, // Timestamp - event.channel, // Channel ID - eventType, // Normalized event type - event.user, // User ID - event.thread_ts, // Thread timestamp (if any) - ].filter(Boolean); // Remove any undefined/null values - - const key = components.join("-"); - console.log("\n=== EVENT DETAILS ==="); - console.log("Event Type:", event.type); - console.log("Event TS:", event.ts); - console.log("Channel:", event.channel); - console.log("User:", event.user); - console.log("Thread TS:", event.thread_ts); - console.log("Generated Key:", key); - return key; - } - - private cleanMessage(text: string): string { - elizaLogger.debug("🧹 [CLEAN] Cleaning message text:", text); - // Remove bot mention - const cleaned = text - .replace(new RegExp(`<@${this.botUserId}>`, "g"), "") - .trim(); - elizaLogger.debug("✨ [CLEAN] Cleaned result:", cleaned); - return cleaned; - } - - private async _shouldRespond(message: any, state: State): Promise { - console.log("\n=== SHOULD_RESPOND PHASE ==="); - console.log("🔍 Step 1: Evaluating if should respond to message"); - - // Always respond to direct mentions - if ( - message.type === "app_mention" || - message.text?.includes(`<@${this.botUserId}>`) - ) { - console.log("✅ Direct mention detected - will respond"); - return true; - } - - // Always respond in direct messages - if (message.channel_type === "im") { - console.log("✅ Direct message detected - will respond"); - return true; - } - - // Check if we're in a thread and we've participated - if ( - message.thread_ts && - state.recentMessages?.includes(this.runtime.agentId) - ) { - console.log("✅ Active thread participant - will respond"); - return true; - } - - // Only use LLM for ambiguous cases - console.log("🤔 Step 2: Using LLM to decide response"); - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates?.slackShouldRespondTemplate || - this.runtime.character.templates?.shouldRespondTemplate || - slackShouldRespondTemplate, - }); - - console.log("🔄 Step 3: Calling generateShouldRespond"); - const response = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - console.log(`✅ Step 4: LLM decision received: ${response}`); - return response === "RESPOND"; - } - - private async _generateResponse( - memory: Memory, - state: State, - context: string - ): Promise { - console.log("\n=== GENERATE_RESPONSE PHASE ==="); - console.log("🔍 Step 1: Starting response generation"); - - // Generate response only once - console.log("🔄 Step 2: Calling LLM for response"); - const response = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - console.log("✅ Step 3: LLM response received"); - - if (!response) { - console.error("❌ No response from generateMessageResponse"); - return { - text: "I apologize, but I'm having trouble generating a response right now.", - source: "slack", - }; - } - - // If response includes a CONTINUE action but there's no direct mention or thread, - // remove the action to prevent automatic continuation - if ( - response.action === "CONTINUE" && - !memory.content.text?.includes(`<@${this.botUserId}>`) && - !state.recentMessages?.includes(memory.id) - ) { - console.log( - "⚠️ Step 4: Removing CONTINUE action - not a direct interaction" - ); - delete response.action; - } - - console.log("✅ Step 5: Returning generated response"); - return response; - } - - private async _downloadAttachments(event: any):Promise { - if (event.files==null || event.files.length==0) { - return event; - } - - elizaLogger.log("📥 Downloading attachments"); - - const downloadedAttachments = []; - for (const file of event.files) { - if (!file.url_private) { - elizaLogger.warn(`No url_private found for file ${file.id}`); - continue; - } - try { - const response = await fetch(file.url_private, { - headers: { - Authorization: `Bearer ${this.client.token}` - } - }); - if (!response.ok) { - elizaLogger.error(`Failed to download file ${file.id}: ${response.statusText}`); - continue; - } - const arrayBuffer = await response.arrayBuffer(); - const buffer = Buffer.from(arrayBuffer); - - // Create a file path in the temporary directory - const tempDir = os.tmpdir(); - const sanitizedFileName = file.name.replace(/[^a-zA-Z0-9.\-_]/g, "_"); - const filePath = path.join( - tempDir, - `slack_attachment_${file.id}_${Date.now()}_${sanitizedFileName}` - ); - - fs.writeFileSync(filePath, buffer); - - downloadedAttachments.push({ - id: file.id, - title: file.name, - url: filePath, - source: "slack", - description: "Attachment to the Slack message", - text: "" - }); - } catch (error) { - elizaLogger.error(`Error downloading file ${file.id}:`, error); - } - } - // Optionally, attach the downloaded attachments to the event for further processing - event.downloadedAttachments = downloadedAttachments; - elizaLogger.log("✅ Attachments downloaded:", downloadedAttachments); - return event; - } - - private async _uploadAttachments(event: any, attachments: string[]) { - if (attachments==null || attachments.length==0) { - return; - } - - for (const attachmentId of attachments) { - try { - // Retrieve file data from the runtime's cache manager. - const fileData = await this.runtime.cacheManager.get(attachmentId); - if (!fileData) { - elizaLogger.warn(`No file data found for attachment id: ${attachmentId}`); - continue; - } - - elizaLogger.log("Uploading text file..."); - const uploadResult = await this.client.filesUploadV2({ - channels: event.channel, - thread_ts: event.thread_ts, - content: fileData as string, - filename: "text.txt", - filetype: "text/plain", - initial_comment: "", - snippet_type: "markdown" - }); - elizaLogger.log("File uploaded successfully:", uploadResult); - } catch (error) { - elizaLogger.error(`Error uploading file for attachment ${attachmentId}:`, error); - } - } - } - - public async handleMessage(event: any) { - console.log("\n=== MESSAGE_HANDLING PHASE ==="); - console.log("🔍 Step 1: Received new message event"); - - // Skip if no event data - if (!event || !event.ts || !event.channel) { - console.log("⚠️ Invalid event data - skipping"); - return; - } - - // Generate event key for deduplication - const eventKey = this.generateEventKey(event); - - // Check if we've already processed this event - if (this.processedEvents.has(eventKey)) { - console.log("⚠️ Event already processed - skipping"); - console.log("Existing event key:", eventKey); - console.log("Original event type:", event.type); - console.log("Duplicate prevention working as expected"); - return; - } - - // Add to processed events immediately - console.log("✅ New event - processing:", eventKey); - console.log("Event type being processed:", event.type); - this.processedEvents.add(eventKey); - - // Generate message key for processing lock - const messageKey = eventKey; // Use same key for consistency - const currentTime = Date.now(); - - try { - // Check if message is currently being processed - if (this.messageProcessingLock.has(messageKey)) { - console.log( - "⚠️ Message is currently being processed - skipping" - ); - return; - } - - // Add to processing lock - console.log("🔒 Step 2: Adding message to processing lock"); - this.messageProcessingLock.add(messageKey); - - try { - // Ignore messages from bots (including ourselves) - if (event.bot_id || event.user === this.botUserId) { - console.log("⚠️ Message from bot or self - skipping"); - return; - } - - // Clean the message text - console.log("🧹 Step 3: Cleaning message text"); - const cleanedText = this.cleanMessage(event.text || ""); - if (!cleanedText) { - console.log("⚠️ Empty message after cleaning - skipping"); - return; - } - - // Generate unique IDs - console.log("🔑 Step 4: Generating conversation IDs"); - const roomId = stringToUuid( - `${event.channel}-${this.runtime.agentId}` - ); - const userId = stringToUuid( - `${event.user}-${this.runtime.agentId}` - ); - const messageId = stringToUuid( - `${event.ts}-${this.runtime.agentId}` - ); - - // Ensure both the sender and agent are properly set up in the room - await this.runtime.ensureConnection( - userId, - roomId, - event.user, - event.user, - "slack" - ); - - // Create initial memory - console.log("💾 Step 5: Creating initial memory"); - - // Download attachments if any - event = await this._downloadAttachments(event); - - const content: Content = { - text: cleanedText, - source: "slack", - inReplyTo: event.thread_ts - ? stringToUuid( - `${event.thread_ts}-${this.runtime.agentId}` - ) - : undefined, - attachments: event.downloadedAttachments - }; - - const memory: Memory = { - id: messageId, - userId, - agentId: this.runtime.agentId, - roomId, - content, - createdAt: new Date(Number.parseFloat(event.ts) * 1000).getTime(), - embedding: getEmbeddingZeroVector(), - }; - - // Add memory - if (content.text) { - console.log("💾 Step 6: Saving initial memory"); - await this.runtime.messageManager.createMemory(memory); - } - - // Initial state composition - console.log("🔄 Step 7: Composing initial state"); - let state = await this.runtime.composeState( - { content, userId, agentId: this.runtime.agentId, roomId }, - { - slackClient: this.client, - slackEvent: event, - agentName: this.runtime.character.name, - senderName: event.user_name || event.user, - } - ); - - // Update state with recent messages - console.log("🔄 Step 8: Updating state with recent messages"); - state = await this.runtime.updateRecentMessageState(state); - - // Check if we should respond - console.log("🤔 Step 9: Checking if we should respond"); - const shouldRespond = await this._shouldRespond(event, state); - - if (shouldRespond) { - console.log( - "✅ Step 10: Should respond - generating response" - ); - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.slackMessageHandlerTemplate || - slackMessageHandlerTemplate, - }); - - const responseContent = await this._generateResponse( - memory, - state, - context - ); - - if (responseContent?.text) { - console.log("📤 Step 11: Preparing to send response"); - - const callback: HandlerCallback = async ( - content: Content, - attachments: any[] - ) => { - try { - elizaLogger.log( - " Step 12: Executing response callback" - ); - - const messageText = content.text || responseContent.text; - - // First, send the main message text - const result = await this.client.chat.postMessage({ - channel: event.channel, - text: messageText, - thread_ts: event.thread_ts, - }); - - // Then, for each attachment identifier, fetch the file data from the runtime's cache manager - // and upload it using Slack's files.upload method. - await this._uploadAttachments(event, attachments); - - elizaLogger.log( - "💾 Step 13: Creating response memory" - ); - const responseMemory: Memory = { - id: stringToUuid( - `${result.ts}-${this.runtime.agentId}` - ), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId, - content: { - ...content, - text: - content.text || - responseContent.text, - inReplyTo: messageId, - }, - createdAt: Date.now(), - embedding: getEmbeddingZeroVector(), - }; - - elizaLogger.log( - "✓ Step 14: Marking message as processed" - ); - this.processedMessages.set( - messageKey, - currentTime - ); - - elizaLogger.log( - "💾 Step 15: Saving response memory" - ); - await this.runtime.messageManager.createMemory( - responseMemory - ); - - return [responseMemory]; - } catch (error) { - elizaLogger.error("❌ Error in callback:", error); - return []; - } - }; - - console.log("📤 Step 16: Sending initial response"); - const responseMessages = - await callback(responseContent); - - console.log( - "🔄 Step 17: Updating state after response" - ); - state = - await this.runtime.updateRecentMessageState(state); - - if (responseContent.action) { - console.log("⚡ Step 18: Processing actions"); - await this.runtime.processActions( - memory, - responseMessages, - state, - callback - ); - } - } - } else { - console.log("⏭️ Should not respond - skipping"); - this.processedMessages.set(messageKey, currentTime); - } - } finally { - console.log( - "🔓 Final Step: Removing message from processing lock and deleting downloaded attachments" - ); - this.messageProcessingLock.delete(messageKey); - - // Delete downloaded attachments - if (event.downloadedAttachments) { - for (const attachment of event.downloadedAttachments) { - fs.unlinkSync(attachment.url); - } - } - } - } catch (error) { - console.error("❌ Error in message handling:", error); - this.messageProcessingLock.delete(messageKey); - } - } -} diff --git a/packages/client-slack/src/providers/channelState.ts b/packages/client-slack/src/providers/channelState.ts deleted file mode 100644 index f058b14acaa9a..0000000000000 --- a/packages/client-slack/src/providers/channelState.ts +++ /dev/null @@ -1,43 +0,0 @@ -import type { IAgentRuntime, Memory, Provider, State } from "@elizaos/core"; - -interface SlackEvent { - channel: string; - channel_type: string; - thread_ts?: string; - user?: string; - team?: string; -} - -export const channelStateProvider: Provider = { - get: async (runtime: IAgentRuntime, message: Memory, state?: State) => { - const slackEvent = state?.slackEvent as SlackEvent | undefined; - if (!slackEvent) { - return ""; - } - - const agentName = state?.agentName || "The agent"; - const senderName = state?.senderName || "someone"; - const channelId = slackEvent.channel; - const channelType = slackEvent.channel_type; - - // For direct messages - if (channelType === "im") { - return `${agentName} is currently in a direct message conversation with ${senderName}`; - } - - // For channel messages - let response = `${agentName} is currently having a conversation in the Slack channel <#${channelId}>`; - - // Add thread context if in a thread - if (slackEvent.thread_ts) { - response += ` in a thread`; - } - - // Add team context if available - if (slackEvent.team) { - response += ` in the workspace ${slackEvent.team}`; - } - - return response; - }, -}; diff --git a/packages/client-slack/src/providers/slack-client.provider.ts b/packages/client-slack/src/providers/slack-client.provider.ts deleted file mode 100644 index 3f13a55b8f34a..0000000000000 --- a/packages/client-slack/src/providers/slack-client.provider.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { WebClient } from "@slack/web-api"; -import type { SlackConfig, SlackClientContext } from "../types/slack-types"; -import { SlackUtils, type RetryOptions } from "../utils/slack-utils"; -import { elizaLogger } from "@elizaos/core"; - -export class SlackClientProvider { - private client: WebClient; - private config: SlackConfig; - private retryOptions: RetryOptions; - - constructor(config: SlackConfig, retryOptions: RetryOptions = {}) { - this.config = config; - this.client = new WebClient(config.botToken); - this.retryOptions = { - maxRetries: 3, - initialDelay: 1000, - maxDelay: 5000, - ...retryOptions, - }; - } - - public getContext(): SlackClientContext { - return { - client: this.client, - config: this.config, - }; - } - - public async validateConnection(): Promise { - try { - const result = await SlackUtils.withRateLimit( - () => this.client.auth.test(), - this.retryOptions - ); - - if (result.ok) { - this.config.botId = result.user_id || this.config.botId; - elizaLogger.log("Bot ID:", this.config.botId); - return true; - } - return false; - } catch (error) { - console.error("Slack connection validation failed:", error); - return false; - } - } - - public async sendMessage(channel: string, text: string): Promise { - return SlackUtils.sendMessageWithRetry( - this.client, - channel, - text, - this.retryOptions - ); - } - - public async replyInThread( - channel: string, - threadTs: string, - text: string - ): Promise { - return SlackUtils.replyInThread( - this.client, - channel, - threadTs, - text, - this.retryOptions - ); - } - - public async validateChannel(channelId: string): Promise { - return SlackUtils.validateChannel(this.client, channelId); - } - - public formatMessage( - text: string, - options?: { - blocks?: any[]; - attachments?: any[]; - } - ) { - return SlackUtils.formatMessage(text, options); - } - - public async withRateLimit(fn: () => Promise): Promise { - return SlackUtils.withRateLimit(fn, this.retryOptions); - } -} diff --git a/packages/client-slack/src/services/slack.service.ts b/packages/client-slack/src/services/slack.service.ts deleted file mode 100644 index dccbde940bd30..0000000000000 --- a/packages/client-slack/src/services/slack.service.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { Service, type IAgentRuntime, ServiceType } from "@elizaos/core"; -import { WebClient } from "@slack/web-api"; -import type { ISlackService } from "../types/slack-types"; - -export class SlackService extends Service implements ISlackService { - public client: WebClient; - - static get serviceType(): ServiceType { - return ServiceType.SLACK; - } - - get serviceType(): ServiceType { - return ServiceType.SLACK; - } - - async initialize(runtime: IAgentRuntime): Promise { - const token = runtime.getSetting("SLACK_BOT_TOKEN"); - if (!token) { - throw new Error("SLACK_BOT_TOKEN is required"); - } - this.client = new WebClient(token); - } -} diff --git a/packages/client-slack/src/templates.ts b/packages/client-slack/src/templates.ts deleted file mode 100644 index 9fa6df8b9a9a8..0000000000000 --- a/packages/client-slack/src/templates.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { messageCompletionFooter, shouldRespondFooter } from "@elizaos/core"; - -export const slackShouldRespondTemplate = - `# Task: Decide if {{agentName}} should respond. -About {{agentName}}: -{{bio}} - -# INSTRUCTIONS: Determine if {{agentName}} should respond to the message and participate in the conversation. Do not comment. Just respond with "RESPOND" or "IGNORE" or "STOP". - -# RESPONSE EXAMPLES -: Hey everyone, what's up? -: Not much, just working -Result: [IGNORE] - -{{agentName}}: I can help with that task -: thanks! -: @{{agentName}} can you explain more? -Result: [RESPOND] - -: @{{agentName}} shut up -Result: [STOP] - -: Hey @{{agentName}}, can you help me with something? -Result: [RESPOND] - -: @{{agentName}} please stop -Result: [STOP] - -: I need help -{{agentName}}: How can I help you? -: Not you, I need someone else -Result: [IGNORE] - -Response options are [RESPOND], [IGNORE] and [STOP]. - -{{agentName}} is in a Slack channel with other users and is very mindful about not being disruptive. -Respond with [RESPOND] to messages that: -- Directly mention @{{agentName}} -- Are follow-ups to {{agentName}}'s previous messages -- Are relevant to ongoing conversations {{agentName}} is part of - -Respond with [IGNORE] to messages that: -- Are not directed at {{agentName}} -- Are general channel chatter -- Are very short or lack context -- Are part of conversations {{agentName}} isn't involved in - -Respond with [STOP] when: -- Users explicitly ask {{agentName}} to stop or be quiet -- The conversation with {{agentName}} has naturally concluded -- Users express frustration with {{agentName}} - -IMPORTANT: {{agentName}} should err on the side of [IGNORE] if there's any doubt about whether to respond. -Only respond when explicitly mentioned or when clearly part of an ongoing conversation. - -{{recentMessages}} - -# INSTRUCTIONS: Choose the option that best describes {{agentName}}'s response to the last message. Ignore messages if they are not directed at {{agentName}}. -` + shouldRespondFooter; - -export const slackMessageHandlerTemplate = - `# Action Examples -{{actionExamples}} -(Action examples are for reference only. Do not use the information from them in your response.) - -# Knowledge -{{knowledge}} - -# Task: Generate dialog and actions for the character {{agentName}} in Slack. -About {{agentName}}: -{{bio}} -{{lore}} - -Examples of {{agentName}}'s dialog and actions: -{{characterMessageExamples}} - -{{providers}} - -{{attachments}} - -{{actions}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -# Conversation Flow Rules -1. Only continue the conversation if the user has explicitly mentioned {{agentName}} or is directly responding to {{agentName}}'s last message -2. Do not use the CONTINUE action unless explicitly asked to continue by the user -3. Wait for user input before generating additional responses -4. Keep responses focused and concise -5. If a conversation is naturally concluding, let it end gracefully - -{{messageDirections}} - -{{recentMessages}} - -# Instructions: Write the next message for {{agentName}}. Include an action, if appropriate. {{actionNames}} -Remember to follow the conversation flow rules above. -` + messageCompletionFooter; diff --git a/packages/client-slack/src/types/slack-types.ts b/packages/client-slack/src/types/slack-types.ts deleted file mode 100644 index a39f4d08bff20..0000000000000 --- a/packages/client-slack/src/types/slack-types.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { WebClient } from "@slack/web-api"; -import { type Service, ServiceType } from "@elizaos/core"; - -export interface SlackConfig { - appId: string; - clientId: string; - clientSecret: string; - signingSecret: string; - verificationToken: string; - botToken: string; - botId: string; -} - -export interface SlackClientContext { - client: any; - config: SlackConfig; -} - -export interface SlackMessage { - text: string; - userId: string; - channelId: string; - threadTs?: string; - attachments?: Array<{ - type: string; - url: string; - title: string; - size: number; - }>; -} - -// We'll temporarily use TEXT_GENERATION as our service type -// This is not ideal but allows us to work within current constraints -export const SLACK_SERVICE_TYPE = ServiceType.TEXT_GENERATION; - -// Interface extending core Service -export interface ISlackService extends Service { - client: WebClient; -} diff --git a/packages/client-slack/src/utils/slack-utils.ts b/packages/client-slack/src/utils/slack-utils.ts deleted file mode 100644 index 3ec7c3e6d1e1e..0000000000000 --- a/packages/client-slack/src/utils/slack-utils.ts +++ /dev/null @@ -1,142 +0,0 @@ -import type { WebClient } from "@slack/web-api"; - -export interface RetryOptions { - maxRetries?: number; - initialDelay?: number; - maxDelay?: number; -} - -export interface MessageOptions extends RetryOptions { - threadTs?: string; -} - -const DEFAULT_RETRY_OPTIONS: Required = { - maxRetries: 3, - initialDelay: 1000, - maxDelay: 5000, -}; - -export class SlackUtils { - /** - * Sends a message to a Slack channel with retry mechanism - */ - static async sendMessageWithRetry( - client: WebClient, - channel: string, - text: string, - options: MessageOptions = {} - ) { - const { threadTs, ...retryOpts } = options; - const finalRetryOpts = { ...DEFAULT_RETRY_OPTIONS, ...retryOpts }; - let lastError: Error | null = null; - - for (let attempt = 0; attempt < finalRetryOpts.maxRetries; attempt++) { - try { - const result = await client.chat.postMessage({ - channel, - text, - thread_ts: threadTs, - }); - return result; - } catch (error) { - lastError = error as Error; - if (attempt < finalRetryOpts.maxRetries - 1) { - const delay = Math.min( - finalRetryOpts.initialDelay * Math.pow(2, attempt), - finalRetryOpts.maxDelay - ); - await new Promise((resolve) => setTimeout(resolve, delay)); - } - } - } - - throw new Error( - `Failed to send message after ${finalRetryOpts.maxRetries} attempts: ${lastError?.message}` - ); - } - - /** - * Validates if a channel exists and is accessible - */ - static async validateChannel( - client: WebClient, - channelId: string - ): Promise { - try { - const result = await client.conversations.info({ - channel: channelId, - }); - return result.ok === true; - } catch (error) { - console.error(error); - return false; - } - } - - /** - * Formats a message for Slack with optional blocks - */ - static formatMessage( - text: string, - options?: { - blocks?: any[]; - attachments?: any[]; - } - ) { - return { - text, - ...options, - }; - } - - /** - * Creates a thread reply - */ - static async replyInThread( - client: WebClient, - channel: string, - threadTs: string, - text: string, - options: RetryOptions = {} - ) { - return this.sendMessageWithRetry(client, channel, text, { - ...options, - threadTs, - }); - } - - /** - * Handles rate limiting by implementing exponential backoff - */ - static async withRateLimit( - fn: () => Promise, - options: RetryOptions = {} - ): Promise { - const retryOpts = { ...DEFAULT_RETRY_OPTIONS, ...options }; - let lastError: Error | null = null; - - for (let attempt = 0; attempt < retryOpts.maxRetries; attempt++) { - try { - return await fn(); - } catch (error) { - lastError = error as Error; - if ( - error instanceof Error && - error.message.includes("rate_limited") - ) { - const delay = Math.min( - retryOpts.initialDelay * Math.pow(2, attempt), - retryOpts.maxDelay - ); - await new Promise((resolve) => setTimeout(resolve, delay)); - continue; - } - throw error; - } - } - - throw new Error( - `Operation failed after ${retryOpts.maxRetries} attempts: ${lastError?.message}` - ); - } -} diff --git a/packages/client-slack/tsconfig.json b/packages/client-slack/tsconfig.json deleted file mode 100644 index 3d02fae9ad2fa..0000000000000 --- a/packages/client-slack/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "moduleResolution": "Bundler", - "allowImportingTsExtensions": true, - "allowArbitraryExtensions": true - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-slack/vitest.config.ts b/packages/client-slack/vitest.config.ts deleted file mode 100644 index 467b936e88581..0000000000000 --- a/packages/client-slack/vitest.config.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { defineConfig } from 'vitest/config' - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - coverage: { - provider: 'v8', - reporter: ['text', 'json', 'html'], - }, - }, -}) diff --git a/packages/client-telegram-account/.npmignore b/packages/client-telegram-account/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-telegram-account/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-telegram-account/eslint.config.mjs b/packages/client-telegram-account/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/client-telegram-account/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/client-telegram-account/package.json b/packages/client-telegram-account/package.json deleted file mode 100644 index 4214ce47c3855..0000000000000 --- a/packages/client-telegram-account/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@elizaos/client-telegram-account", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "glob": "11.0.0", - "input": "^1.0.1", - "telegram": "2.17.4" - }, - "devDependencies": { - "tsup": "8.3.5", - "vitest": "1.6.1", - "@vitest/coverage-v8": "1.1.3" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint --fix --cache .", - "test": "vitest run", - "test:coverage": "vitest run --coverage" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/client-telegram-account/src/environment.ts b/packages/client-telegram-account/src/environment.ts deleted file mode 100644 index d4e48b50d4af1..0000000000000 --- a/packages/client-telegram-account/src/environment.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; -import { z, ZodError } from "zod"; - -export const telegramAccountEnvSchema = z.object({ - TELEGRAM_ACCOUNT_PHONE: z.string(), - TELEGRAM_ACCOUNT_APP_ID: z.number().int(), - TELEGRAM_ACCOUNT_APP_HASH: z.string(), - TELEGRAM_ACCOUNT_DEVICE_MODEL: z.string(), - TELEGRAM_ACCOUNT_SYSTEM_VERSION: z.string(), -}); - -export type TelegramAccountConfig = z.infer; - - -function safeParseInt( - value: string | undefined | null, - defaultValue: number = null -): number { - if (!value) return defaultValue; - const parsed = parseInt(value, 10); - return isNaN(parsed) ? defaultValue : Math.max(1, parsed); -} - - -export async function validateTelegramAccountConfig( - runtime: IAgentRuntime -): Promise { - try { - const telegramAccountConfig = { - TELEGRAM_ACCOUNT_PHONE: - runtime.getSetting("TELEGRAM_ACCOUNT_PHONE") || - process.env.TELEGRAM_ACCOUNT_PHONE, - - TELEGRAM_ACCOUNT_APP_ID: safeParseInt( - runtime.getSetting("TELEGRAM_ACCOUNT_APP_ID") || - process.env.TELEGRAM_ACCOUNT_APP_ID - ), - - TELEGRAM_ACCOUNT_APP_HASH: - runtime.getSetting("TELEGRAM_ACCOUNT_APP_HASH") || - process.env.TELEGRAM_ACCOUNT_APP_HASH, - - TELEGRAM_ACCOUNT_DEVICE_MODEL: - runtime.getSetting("TELEGRAM_ACCOUNT_DEVICE_MODEL") || - process.env.TELEGRAM_ACCOUNT_DEVICE_MODEL, - - TELEGRAM_ACCOUNT_SYSTEM_VERSION: - runtime.getSetting("TELEGRAM_ACCOUNT_SYSTEM_VERSION") || - process.env.TELEGRAM_ACCOUNT_SYSTEM_VERSION - }; - - return telegramAccountEnvSchema.parse(telegramAccountConfig); - } catch (error) { - if (error instanceof ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Telegram account configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-telegram-account/src/index.ts b/packages/client-telegram-account/src/index.ts deleted file mode 100644 index 3c9f776836343..0000000000000 --- a/packages/client-telegram-account/src/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import { Client, IAgentRuntime } from "@elizaos/core"; -import {TelegramAccountConfig, validateTelegramAccountConfig} from "./environment.ts"; -import { TelegramAccountClient } from "./telegramAccountClient.ts" - -export const TelegramAccountClientInterface: Client = { - start: async (runtime: IAgentRuntime) => { - const telegramAccountConfig: TelegramAccountConfig = await validateTelegramAccountConfig(runtime); - const telegramAccountClient = new TelegramAccountClient(runtime, telegramAccountConfig); - await telegramAccountClient.start(); - - return telegramAccountClient; - }, - stop: async (_runtime: IAgentRuntime) => { - elizaLogger.warn("Telegram client does not support stopping yet"); - }, -}; - -export default TelegramAccountClientInterface; diff --git a/packages/client-telegram-account/src/telegramAccountClient.ts b/packages/client-telegram-account/src/telegramAccountClient.ts deleted file mode 100644 index 021b60a424546..0000000000000 --- a/packages/client-telegram-account/src/telegramAccountClient.ts +++ /dev/null @@ -1,340 +0,0 @@ -import { - IAgentRuntime, - UUID, - Content, - Memory, - HandlerCallback, - ModelClass, - State, - Media, - elizaLogger, - getEmbeddingZeroVector, - composeContext, - generateMessageResponse, - stringToUuid -} from "@elizaos/core"; -import { TelegramAccountConfig } from "./environment.ts"; -import { TelegramClient, Api } from "telegram"; -import { StoreSession } from "telegram/sessions"; -import { NewMessage, NewMessageEvent } from "telegram/events"; -import { Entity } from "telegram/define"; -import input from "input"; -import bigInt from "big-integer"; -import { getTelegramAccountMessageHandlerTemplate } from "./templates.ts" -import { escapeMarkdown, splitMessage } from "./utils.ts"; - -export class TelegramAccountClient { - private runtime: IAgentRuntime; - private telegramAccountConfig: TelegramAccountConfig; - private client: TelegramClient; - private account: Api.User; - - constructor(runtime: IAgentRuntime, telegramAccountConfig: TelegramAccountConfig) { - elizaLogger.log("📱 Constructing new TelegramAccountClient..."); - - this.runtime = runtime; - this.telegramAccountConfig = telegramAccountConfig; - - elizaLogger.log("✅ TelegramClient constructor completed"); - } - - public async start(): Promise { - elizaLogger.log("🚀 Starting Telegram account..."); - - try { - await this.initializeAccount(); - this.setupEventsHandlers(); - - elizaLogger.success(`✅ Telegram account client successfully started for character ${this.runtime.character.name}`); - } catch (error) { - elizaLogger.error("❌ Failed to launch Telegram account:", error); - throw error; - } - } - - private async initializeAccount(): Promise { - // Prepare telegram account client - this.client = new TelegramClient( - new StoreSession('./data/telegram_account_session'), - this.telegramAccountConfig.TELEGRAM_ACCOUNT_APP_ID, - this.telegramAccountConfig.TELEGRAM_ACCOUNT_APP_HASH, - { - connectionRetries: 5, - deviceModel: this.telegramAccountConfig.TELEGRAM_ACCOUNT_DEVICE_MODEL, - systemVersion: this.telegramAccountConfig.TELEGRAM_ACCOUNT_SYSTEM_VERSION, - } - ) - - // Account sign in or connect - await this.client.start({ - phoneNumber: this.telegramAccountConfig.TELEGRAM_ACCOUNT_PHONE, - password: null, - phoneCode: async () => await input.text('Enter received Telegram code: '), - onError: (err) => console.log(err), - }); - - this.client.session.save(); - - // Testing connection - this.account = await this.client.getEntity('me') as Api.User; - } - - private setupEventsHandlers(): void { - this.newMessageHandler() - } - - private newMessageHandler() { - this.client.addEventHandler(async (event: NewMessageEvent) => { - try { - if (!event.message.message) return; - - // Get sender and chat full object - const sender = await event.message.getSender(); - if (sender.className != 'User') return; - - const chat = (await event.message.getChat()); - if (chat.className != 'User' && chat.className != 'Chat' && (chat.className == 'Channel' && !chat.megagroup)) return; - - // Get user full name - let senderName = sender.firstName; - if (sender.lastName) senderName += ' ' + sender.lastName; - - // Get reply message - let replyMessage = null; - if (event.message.replyTo) { - replyMessage = await event.message.getReplyMessage() - } - - // Convert IDs to UUIDs - const userUUID = stringToUuid(`tg-${sender.id.toString()}`) as UUID; - const roomUUID = stringToUuid(`tg-${chat.id.toString()}` + "-" + this.runtime.agentId) as UUID; - const messageUUID = stringToUuid(`tg-message-${roomUUID}-${event.message.id.toString()}` + "-" + this.runtime.agentId) as UUID; - const agentUUID = this.runtime.agentId; - const replyMessageUUID = replyMessage ? stringToUuid(`tg-message-${roomUUID}-${replyMessage.id.toString()}` + "-" + this.runtime.agentId) as UUID : null; - - // Ensure connection - await this.runtime.ensureConnection( - userUUID, - roomUUID, - sender.username, - senderName, - "telegram-account", - ); - - if (!event.message.message) return; - - // Create content - const content: Content = { - text: event.message.message, - inReplyTo: replyMessageUUID, - source: "telegram-account", - }; - - // Create memory for the message - const memory: Memory = { - id: messageUUID, - agentId: agentUUID, - userId: userUUID, - roomId: roomUUID, - content, - createdAt: event.message.date * 1000, - embedding: getEmbeddingZeroVector(), - }; - - // Create memory - await this.runtime.messageManager.createMemory(memory); - - // Update state with the new memory - let state = await this.runtime.composeState(memory); - state = await this.runtime.updateRecentMessageState(state); - - // Decide whether to respond - const shouldRespond = await this._shouldRespond(event.message, chat, replyMessage); - - // Send response in chunks - const callback: HandlerCallback = async (content: Content) => { - const sentMessages = await this.sendMessageInChunks( - chat.id, - content, - chat.className == 'User' ? null : event.message.id - ); - - if (sentMessages) { - const memories: Memory[] = []; - - // Create memories for each sent message - for (let i = 0; i < sentMessages.length; i++) { - const sentMessage = sentMessages[i]; - const isLastMessage = i === sentMessages.length - 1; - - const memory: Memory = { - id: stringToUuid(`tg-message-${roomUUID}-${sentMessage.id.toString()}` + "-" + this.runtime.agentId) as UUID, - agentId: agentUUID, - userId: agentUUID, - roomId: roomUUID, - content: { - ...content, - text: sentMessage.message, - inReplyTo: messageUUID, - }, - createdAt: sentMessage.date * 1000, - embedding: getEmbeddingZeroVector(), - }; - - // Set action to CONTINUE for all messages except the last one - // For the last message, use the original action from the response content - memory.content.action = !isLastMessage - ? "CONTINUE" - : content.action; - - await this.runtime.messageManager.createMemory(memory); - memories.push(memory); - } - - return memories; - } - }; - - if (shouldRespond) { - // Mark chat as read - await this.client.markAsRead(chat); - - // Show that a bot is typing a message - await this.client.invoke( - new Api.messages.SetTyping({ - peer: chat, - action: new Api.SendMessageTypingAction() - }) - ); - - // Generate response - const template = this.runtime.character?.templates - ?.messageHandlerTemplate || - getTelegramAccountMessageHandlerTemplate(this.account); - - const context = composeContext({ - state, - template: template, - }); - - const responseContent = await this._generateResponse( - memory, - state, - context - ); - - if (!responseContent || !responseContent.text) return; - - // Execute callback to send messages and log memories - const responseMessages = await callback(responseContent); - - // Update state after response - state = await this.runtime.updateRecentMessageState(state); - - // Handle any resulting actions - await this.runtime.processActions( - memory, - responseMessages, - state, - callback - ); - } - - await this.runtime.evaluate(memory, state, shouldRespond, callback); - } catch (error) { - elizaLogger.error("❌ Error handling message:", error); - elizaLogger.error("Error sending message:", error); - } - }, new NewMessage({ incoming: true })); - } - - // Decide if the bot should respond to the message - private async _shouldRespond( - message: Api.Message, - chat: Entity, - replyMessage?: Api.Message - ): Promise { - if (replyMessage) { - const replyFrom = replyMessage.fromId as Api.PeerUser; - if (replyFrom && replyFrom.userId.eq(this.account.id)) return true; - } - - if (chat.className == 'User') { - return true; - } - else { - return message.message.includes(`@${this.account.username}`) - } - } - - // Generate a response using AI - private async _generateResponse( - message: Memory, - _state: State, - context: string - ): Promise { - const { userId, roomId } = message; - - const response = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - - if (!response) { - console.error("❌ No response from generateMessageResponse"); - return null; - } - - await this.runtime.databaseAdapter.log({ - body: { message, context, response }, - userId, - roomId, - type: "response", - }); - - return response; - } - - // Send long messages in chunks - private async sendMessageInChunks( - chatId: bigInt.BigInteger, - content: Content, - replyToMessageId?: number - ) { - if (content.attachments && content.attachments.length > 0) { - content.attachments.map(async (attachment: Media) => { - await this.client.sendFile( - chatId, - { - file: attachment.url, - forceDocument: true, - caption: attachment.description, - replyTo: replyToMessageId - } - ); - - }); - } else { - const chunks = splitMessage(content.text); - const sentMessages = []; - - for (let i = 0; i < chunks.length; i++) { - const chunk = escapeMarkdown(chunks[i]); - - const sentMessage = await this.client.sendMessage( - chatId, - { - message: chunk, - parseMode: 'markdown', - replyTo: replyToMessageId - } - ); - - sentMessages.push(sentMessage); - } - - return sentMessages; - } - } -} diff --git a/packages/client-telegram-account/src/templates.ts b/packages/client-telegram-account/src/templates.ts deleted file mode 100644 index 1801fe7ce91c5..0000000000000 --- a/packages/client-telegram-account/src/templates.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { messageCompletionFooter } from "@elizaos/core"; -import {Api} from "telegram"; - -const telegramAccountMessageHandlerTemplate = ` -{{actionExamples}} -(Action examples are for reference only. Do not use the information from them in your response.) - -# Knowledge -{{knowledge}} - -# About {{agentName}}: -{{telegramAccountInfo}} -{{bio}} -{{lore}} - -{{characterMessageExamples}} - -{{providers}} - -{{attachments}} - -{{actions}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -{{messageDirections}} - -{{recentMessages}} - -# Task: Generate a reply in the voice, style and perspective of {{agentName}} while using the thread above as additional context. You are replying on Telegram. - -{{formattedConversation}} -` + messageCompletionFooter; - -export function getTelegramAccountMessageHandlerTemplate(account: Api.User): string { - return telegramAccountMessageHandlerTemplate.replace('{{telegramAccountInfo}}', ` -Username: @${account.username} -First name: ${account.firstName} -Last name: ${account.lastName} -Telegram ID: ${account.id} - `); -} diff --git a/packages/client-telegram-account/src/utils.ts b/packages/client-telegram-account/src/utils.ts deleted file mode 100644 index 20dac374fed92..0000000000000 --- a/packages/client-telegram-account/src/utils.ts +++ /dev/null @@ -1,47 +0,0 @@ -export function escapeMarkdown(text: string): string { - // Don't escape if it's a code block - if (text.startsWith("```") && text.endsWith("```")) { - return text; - } - - // Split the text by code blocks - const parts = text.split(/(```[\s\S]*?```)/g); - - return parts - .map((part, index) => { - // If it's a code block (odd indices in the split result will be code blocks) - if (index % 2 === 1) { - return part; - } - // For regular text, only escape characters that need escaping in Markdown - return ( - part - // First preserve any intended inline code spans - .replace(/`.*?`/g, (match) => match) - // Then only escape the minimal set of special characters that need escaping in Markdown mode - .replace(/([*_`\\])/g, "\\$1") - ); - }) - .join(""); -} - -/** - * Splits a message into chunks that fit within Telegram's message length limit - */ -export function splitMessage(text: string, maxLength: number = 3000): string[] { - const chunks: string[] = []; - let currentChunk = ""; - - const lines = text.split("\n"); - for (const line of lines) { - if (currentChunk.length + line.length + 1 <= maxLength) { - currentChunk += (currentChunk ? "\n" : "") + line; - } else { - if (currentChunk) chunks.push(currentChunk); - currentChunk = line; - } - } - - if (currentChunk) chunks.push(currentChunk); - return chunks; -} diff --git a/packages/client-telegram-account/tsconfig.json b/packages/client-telegram-account/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/client-telegram-account/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-telegram-account/tsup.config.ts b/packages/client-telegram-account/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/client-telegram-account/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-telegram-account/vitest.config.ts b/packages/client-telegram-account/vitest.config.ts deleted file mode 100644 index 2e60e80f5dc54..0000000000000 --- a/packages/client-telegram-account/vitest.config.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - coverage: { - reporter: ['text', 'json', 'html'], - }, - }, -}); diff --git a/packages/client-telegram/.npmignore b/packages/client-telegram/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-telegram/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-telegram/README.md b/packages/client-telegram/README.md deleted file mode 100644 index bf31180688b12..0000000000000 --- a/packages/client-telegram/README.md +++ /dev/null @@ -1,88 +0,0 @@ -# Telegram Client Plugin for ElizaOS - -This plugin integrates a Telegram client with ElizaOS, allowing characters in ElizaOS to interact via Telegram. It provides an easy setup for starting the Telegram client using the provided bot token and includes basic lifecycle management. - -## Features - -- **Seamless Telegram Integration**: Connects ElizaOS characters to Telegram through the bot API. -- **Configuration Validation**: Ensures required settings are properly configured before starting. -- **Startup Logging**: Logs successful initialization of the Telegram client for better debugging. -- **Future-proof Design**: Provides a basic structure for stopping the client (currently unsupported). - -## Configuration Options - -Here are the available configuration options for the `character.json` file: - -| Key | Type | Default | Description | -| ------------------------------- | ------- | -------- | --------------------------------------------------------------------------------------------------- | -| `clients` | Array | Required | Specifies the client type (e.g., `["telegram"]`). | -| `allowDirectMessages` | Boolean | `false` | Determines whether the bot should respond to direct messages (DMs). | -| `shouldOnlyJoinInAllowedGroups` | Boolean | `false` | Ensures the bot only joins and responds in specified groups. | -| `allowedGroupIds` | Array | `[]` | Lists the group IDs the bot is allowed to interact with (requires `shouldOnlyJoinInAllowedGroups`). | -| `messageTrackingLimit` | Integer | `100` | Sets the maximum number of messages to track in memory for each chat. | -| `templates` | Object | `{}` | Allows customization of response templates for different message scenarios. | - -## Example `.character.json` - -Below is an example configuration file with all options: - -```json -{ - "clients": ["telegram"], - "allowDirectMessages": true, - "shouldOnlyJoinInAllowedGroups": true, - "allowedGroupIds": ["-123456789", "-987654321"], - "messageTrackingLimit": 100, - "templates": { - "telegramMessageHandlerTemplate": "Your custom template here" - }, - "secrets": { - "key": "" - } -} -``` - -## How to Modify Settings - -1. Locate the `character.json` file in your project directory. -2. Update the file with the desired configuration options as shown in the example above. -3. Save the file and restart the bot for the changes to take effect. - -## Best Practices - -- **Production**: Restrict bot access with `shouldOnlyJoinInAllowedGroups: true` and specify `allowedGroupIds` to ensure security. -- **Token Management**: Always keep your bot token and backend tokens secure and never expose them in public repositories. - -## Pre-Requisites - -1. Add the bot token to the `.env` file in the project root: - -```env -TELEGRAM_BOT_TOKEN=your-bot-token -``` - -2. Add the same token to your character configuration file: - -Create or modify `characters/your-character.json`: - -```json -{ - "clients": ["telegram"], - "secrets": { - "key": "" - } -} -``` - -## From the project root: - -```bash -npm run dev -``` - -## Or using pnpm: - -```bash -pnpm start --character="characters/your-character.json" -``` - diff --git a/packages/client-telegram/__tests__/messageManager.test.ts b/packages/client-telegram/__tests__/messageManager.test.ts deleted file mode 100644 index 4d2b39bdbf085..0000000000000 --- a/packages/client-telegram/__tests__/messageManager.test.ts +++ /dev/null @@ -1,168 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { MessageManager } from '../src/messageManager'; -import type { IAgentRuntime } from '@elizaos/core'; -import { type Context, Telegraf } from 'telegraf'; -import { Message } from '@telegraf/types'; - -// Mock Telegraf -vi.mock('telegraf', () => { - return { - Telegraf: vi.fn().mockImplementation(() => ({ - telegram: { - sendMessage: vi.fn().mockResolvedValue({ message_id: 123 }), - sendChatAction: vi.fn().mockResolvedValue(true), - sendPhoto: vi.fn().mockResolvedValue({ message_id: 124 }) - } - })) - }; -}); - -// Mock fs module for image handling -vi.mock('fs', () => ({ - default: { - existsSync: vi.fn().mockReturnValue(true), - createReadStream: vi.fn().mockReturnValue({}) - } -})); - -describe('MessageManager', () => { - let mockRuntime: IAgentRuntime; - let mockBot: Telegraf; - let messageManager: MessageManager; - const CHAT_ID = 123456789; - - beforeEach(() => { - mockRuntime = { - getSetting: vi.fn(), - getCharacter: vi.fn(), - getFlow: vi.fn(), - getPlugin: vi.fn(), - getPlugins: vi.fn(), - getSafePlugins: vi.fn(), - hasPlugin: vi.fn(), - registerPlugin: vi.fn(), - removePlugin: vi.fn(), - setCharacter: vi.fn(), - setFlow: vi.fn(), - databaseAdapter: { - log: vi.fn().mockResolvedValue(undefined) - } - }; - - mockBot = new Telegraf('mock_token') as any; - messageManager = new MessageManager(mockBot, mockRuntime); - vi.clearAllMocks(); - }); - - describe('message sending', () => { - it('should send a message successfully', async () => { - const ctx = { - telegram: mockBot.telegram, - chat: { id: CHAT_ID } - } as Context; - - const content = { text: 'Test message' }; - const result = await messageManager.sendMessageInChunks(ctx, content); - - expect(mockBot.telegram.sendMessage).toHaveBeenCalledWith( - CHAT_ID, - content.text, - expect.objectContaining({ - parse_mode: 'Markdown' - }) - ); - expect(result[0].message_id).toBe(123); - }); - - it('should split long messages', async () => { - const ctx = { - telegram: mockBot.telegram, - chat: { id: CHAT_ID } - } as Context; - - // Create a message that's just over 4096 characters (Telegram's limit) - const message1 = 'a'.repeat(4096); - const message2 = 'b'.repeat(100); - const content = { text: `${message1}\n${message2}` }; - await messageManager.sendMessageInChunks(ctx, content); - - expect(mockBot.telegram.sendMessage).toHaveBeenCalledTimes(2); - expect(mockBot.telegram.sendMessage).toHaveBeenNthCalledWith( - 1, - CHAT_ID, - message1, - expect.objectContaining({ parse_mode: 'Markdown' }) - ); - expect(mockBot.telegram.sendMessage).toHaveBeenNthCalledWith( - 2, - CHAT_ID, - message2, - expect.objectContaining({ parse_mode: 'Markdown' }) - ); - }); - }); - - describe('image handling', () => { - it('should send an image from URL', async () => { - const ctx = { - telegram: mockBot.telegram, - chat: { id: CHAT_ID } - } as Context; - - const imageUrl = 'https://example.com/image.jpg'; - await messageManager.sendImage(ctx, imageUrl); - - expect(mockBot.telegram.sendPhoto).toHaveBeenCalledWith( - CHAT_ID, - imageUrl, - expect.any(Object) - ); - }); - - it('should send an image from local file', async () => { - const ctx = { - telegram: mockBot.telegram, - chat: { id: CHAT_ID } - } as Context; - - const localPath = '/path/to/image.jpg'; - await messageManager.sendImage(ctx, localPath); - - expect(mockBot.telegram.sendPhoto).toHaveBeenCalledWith( - CHAT_ID, - expect.objectContaining({ source: expect.any(Object) }), - expect.any(Object) - ); - }); - }); - - describe('error handling', () => { - it('should handle send message errors', async () => { - const ctx = { - telegram: mockBot.telegram, - chat: { id: CHAT_ID } - } as Context; - - const error = new Error('Network error'); - mockBot.telegram.sendMessage.mockRejectedValueOnce(error); - - await expect(messageManager.sendMessageInChunks(ctx, { text: 'test' })) - .rejects - .toThrow('Network error'); - }); - - it('should handle image send errors', async () => { - const ctx = { - telegram: mockBot.telegram, - chat: { id: CHAT_ID } - } as Context; - - const error = new Error('Image send failed'); - mockBot.telegram.sendPhoto.mockRejectedValueOnce(error); - - await messageManager.sendImage(ctx, 'test.jpg'); - // Should not throw, but log error - expect(mockBot.telegram.sendPhoto).toHaveBeenCalled(); - }); - }); -}); diff --git a/packages/client-telegram/__tests__/telegramClient.test.ts b/packages/client-telegram/__tests__/telegramClient.test.ts deleted file mode 100644 index c40785ed927f4..0000000000000 --- a/packages/client-telegram/__tests__/telegramClient.test.ts +++ /dev/null @@ -1,91 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { TelegramClient } from '../src/telegramClient'; -import type { IAgentRuntime } from '@elizaos/core'; - -// Mock Telegraf -vi.mock('telegraf', () => { - const mockBot = { - launch: vi.fn().mockResolvedValue(undefined), - stop: vi.fn().mockResolvedValue(undefined), - telegram: { - getMe: vi.fn().mockResolvedValue({ username: 'test_bot' }) - }, - on: vi.fn(), - command: vi.fn(), - use: vi.fn(), - catch: vi.fn() - }; - - return { - Telegraf: vi.fn(() => mockBot) - }; -}); - -describe('TelegramClient', () => { - let mockRuntime: IAgentRuntime; - let client: TelegramClient; - const TEST_BOT_TOKEN = 'test_bot_token'; - - beforeEach(() => { - mockRuntime = { - getSetting: vi.fn((key: string) => { - switch (key) { - case 'BACKEND_URL': - return 'http://localhost:3000'; - case 'BACKEND_TOKEN': - return 'test_backend_token'; - case 'TG_TRADER': - return 'false'; - default: - return undefined; - } - }), - getCharacter: vi.fn(), - getFlow: vi.fn(), - getPlugin: vi.fn(), - getPlugins: vi.fn(), - getSafePlugins: vi.fn(), - hasPlugin: vi.fn(), - registerPlugin: vi.fn(), - removePlugin: vi.fn(), - setCharacter: vi.fn(), - setFlow: vi.fn() - }; - - client = new TelegramClient(mockRuntime, TEST_BOT_TOKEN); - }); - - describe('initialization', () => { - it('should create a new instance with the provided runtime and token', () => { - expect(client).toBeInstanceOf(TelegramClient); - }); - - it('should initialize with correct settings from runtime', () => { - expect(mockRuntime.getSetting).toHaveBeenCalledWith('BACKEND_URL'); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('BACKEND_TOKEN'); - expect(mockRuntime.getSetting).toHaveBeenCalledWith('TG_TRADER'); - }); - }); - - describe('bot lifecycle', () => { - it('should start the bot successfully', async () => { - const mockBot = client['bot']; - const launchSpy = vi.spyOn(mockBot, 'launch'); - const getMeSpy = vi.spyOn(mockBot.telegram, 'getMe'); - - await client.start(); - - expect(launchSpy).toHaveBeenCalledWith({ dropPendingUpdates: true }); - expect(getMeSpy).toHaveBeenCalled(); - }); - - it('should get bot info after launch', async () => { - const mockBot = client['bot']; - const getMeSpy = vi.spyOn(mockBot.telegram, 'getMe'); - - await client.start(); - - expect(getMeSpy).toHaveBeenCalled(); - }); - }); -}); diff --git a/packages/client-telegram/__tests__/utils.test.ts b/packages/client-telegram/__tests__/utils.test.ts deleted file mode 100644 index da56e232c98b4..0000000000000 --- a/packages/client-telegram/__tests__/utils.test.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { cosineSimilarity, escapeMarkdown, splitMessage } from '../src/utils'; - -describe('Telegram Utils', () => { - describe('cosineSimilarity', () => { - it('should calculate similarity between two texts', () => { - const text1 = 'hello world'; - const text2 = 'hello there'; - const similarity = cosineSimilarity(text1, text2); - expect(similarity).toBeGreaterThan(0); - expect(similarity).toBeLessThan(1); - }); - - it('should handle identical texts', () => { - const text = 'hello world test'; - expect(cosineSimilarity(text, text)).toBeCloseTo(1, 5); - }); - - it('should handle completely different texts', () => { - const text1 = 'hello world'; - const text2 = 'goodbye universe'; - expect(cosineSimilarity(text1, text2)).toBe(0); - }); - - it('should handle three-way comparison', () => { - const text1 = 'hello world'; - const text2 = 'hello there'; - const text3 = 'hi world'; - const similarity = cosineSimilarity(text1, text2, text3); - expect(similarity).toBeGreaterThan(0); - expect(similarity).toBeLessThan(1); - }); - }); - - describe('escapeMarkdown', () => { - it('should escape markdown special characters', () => { - const input = '*bold* _italic_ `code`'; - const escaped = escapeMarkdown(input); - expect(escaped).toBe('\\*bold\\* \\_italic\\_ \\`code\\`'); - }); - - it('should handle text without special characters', () => { - const input = 'Hello World 123'; - expect(escapeMarkdown(input)).toBe(input); - }); - - it('should handle empty string', () => { - expect(escapeMarkdown('')).toBe(''); - }); - }); - - describe('splitMessage', () => { - it('should not split message within limit', () => { - const message = 'Hello World'; - const chunks = splitMessage(message, 4096); - expect(chunks).toEqual(['Hello World']); - }); - - it('should handle empty string', () => { - const chunks = splitMessage(''); - expect(chunks).toEqual([]); - }); - - it('should keep message intact if shorter than maxLength', () => { - const message = 'Hello World'; - const chunks = splitMessage(message, 6); - expect(chunks).toEqual(['Hello World']); - }); - }); -}); diff --git a/packages/client-telegram/package.json b/packages/client-telegram/package.json deleted file mode 100644 index 2b3bf77c74cea..0000000000000 --- a/packages/client-telegram/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "@elizaos/client-telegram", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@telegraf/types": "7.1.0", - "telegraf": "4.16.3" - }, - "devDependencies": { - "tsup": "8.3.5", - "vitest": "1.6.1" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest" - } -} diff --git a/packages/client-telegram/src/config/default.json5 b/packages/client-telegram/src/config/default.json5 deleted file mode 100644 index e2183ffbd64ba..0000000000000 --- a/packages/client-telegram/src/config/default.json5 +++ /dev/null @@ -1,18 +0,0 @@ -{ - bot: { - testEnv: false, - }, - server: { - https: false, - port: 3000, - static: false, - }, - gameServer: { - validateInitData: true, - inactivityTimeout: 300, - disconnectTimeout: 180, - fakeRoom: { - create: false, - }, - }, -} diff --git a/packages/client-telegram/src/constants.ts b/packages/client-telegram/src/constants.ts deleted file mode 100644 index e1b4be88223e6..0000000000000 --- a/packages/client-telegram/src/constants.ts +++ /dev/null @@ -1,37 +0,0 @@ -export const MESSAGE_CONSTANTS = { - MAX_MESSAGES: 50, - RECENT_MESSAGE_COUNT: 5, - CHAT_HISTORY_COUNT: 10, - DEFAULT_SIMILARITY_THRESHOLD: 0.6, - DEFAULT_SIMILARITY_THRESHOLD_FOLLOW_UPS: 0.4, - INTEREST_DECAY_TIME: 5 * 60 * 1000, // 5 minutes - PARTIAL_INTEREST_DECAY: 3 * 60 * 1000, // 3 minutes -} as const; - -export const TIMING_CONSTANTS = { - TEAM_MEMBER_DELAY: 1500, // 1.5 seconds - TEAM_MEMBER_DELAY_MIN: 1000, // 1 second - TEAM_MEMBER_DELAY_MAX: 3000, // 3 seconds - LEADER_DELAY_MIN: 2000, // 2 seconds - LEADER_DELAY_MAX: 4000, // 4 seconds -} as const; - -export const RESPONSE_CHANCES = { - AFTER_LEADER: 0.5, // 50% chance to respond after leader -} as const; - -export const TEAM_COORDINATION = { - KEYWORDS: [ - "team", - "all agents", - "team update", - "gm team", - "hello team", - "hey team", - "hi team", - "morning team", - "evening team", - "night team", - "update team", - ], -} as const; diff --git a/packages/client-telegram/src/environment.ts b/packages/client-telegram/src/environment.ts deleted file mode 100644 index 9ae9c8de55a6a..0000000000000 --- a/packages/client-telegram/src/environment.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const telegramEnvSchema = z.object({ - TELEGRAM_BOT_TOKEN: z.string().min(1, "Telegram bot token is required"), -}); - -export type TelegramConfig = z.infer; - -export async function validateTelegramConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - TELEGRAM_BOT_TOKEN: - runtime.getSetting("TELEGRAM_BOT_TOKEN") || - process.env.TELEGRAM_BOT_TOKEN, - }; - - return telegramEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Telegram configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-telegram/src/getOrCreateRecommenderInBe.ts b/packages/client-telegram/src/getOrCreateRecommenderInBe.ts deleted file mode 100644 index f86085cc1fcd4..0000000000000 --- a/packages/client-telegram/src/getOrCreateRecommenderInBe.ts +++ /dev/null @@ -1,40 +0,0 @@ -export async function getOrCreateRecommenderInBe( - recommenderId: string, - username: string, - backendToken: string, - backend: string, - retries = 3, - delayMs = 2000 -) { - for (let attempt = 1; attempt <= retries; attempt++) { - try { - const response = await fetch( - `${backend}/api/updaters/getOrCreateRecommender`, - { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `Bearer ${backendToken}`, - }, - body: JSON.stringify({ - recommenderId: recommenderId, - username: username, - }), - } - ); - const data = await response.json(); - return data; - } catch (error) { - console.error( - `Attempt ${attempt} failed: Error getting or creating recommender in backend`, - error - ); - if (attempt < retries) { - console.log(`Retrying in ${delayMs} ms...`); - await new Promise((resolve) => setTimeout(resolve, delayMs)); - } else { - console.error("All attempts failed."); - } - } - } -} diff --git a/packages/client-telegram/src/index.ts b/packages/client-telegram/src/index.ts deleted file mode 100644 index 8f065a1c263fa..0000000000000 --- a/packages/client-telegram/src/index.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import type { Client, IAgentRuntime } from "@elizaos/core"; -import { TelegramClient } from "./telegramClient.ts"; -import { validateTelegramConfig } from "./environment.ts"; - -export const TelegramClientInterface: Client = { - start: async (runtime: IAgentRuntime) => { - await validateTelegramConfig(runtime); - - const tg = new TelegramClient( - runtime, - runtime.getSetting("TELEGRAM_BOT_TOKEN") - ); - - await tg.start(); - - elizaLogger.success( - `✅ Telegram client successfully started for character ${runtime.character.name}` - ); - return tg; - }, - stop: async (_runtime: IAgentRuntime) => { - elizaLogger.warn("Telegram client does not support stopping yet"); - }, -}; - -export default TelegramClientInterface; diff --git a/packages/client-telegram/src/messageManager.ts b/packages/client-telegram/src/messageManager.ts deleted file mode 100644 index 648e35a85c9e5..0000000000000 --- a/packages/client-telegram/src/messageManager.ts +++ /dev/null @@ -1,1426 +0,0 @@ -import type { Message } from "@telegraf/types"; -import type { Context, Telegraf } from "telegraf"; -import { - composeContext, - elizaLogger, - ServiceType, - composeRandomUser, -} from "@elizaos/core"; -import { getEmbeddingZeroVector } from "@elizaos/core"; -import { - type Content, - type HandlerCallback, - type IAgentRuntime, - type IImageDescriptionService, - type Memory, - ModelClass, - type State, - type UUID, - type Media, -} from "@elizaos/core"; -import { stringToUuid } from "@elizaos/core"; -import { generateMessageResponse, generateShouldRespond } from "@elizaos/core"; -import { - telegramMessageHandlerTemplate, - telegramShouldRespondTemplate, - telegramAutoPostTemplate, - telegramPinnedMessageTemplate, -} from "./templates"; -import { cosineSimilarity, escapeMarkdown } from "./utils"; -import { - MESSAGE_CONSTANTS, - TIMING_CONSTANTS, - RESPONSE_CHANCES, - TEAM_COORDINATION, -} from "./constants"; - -import fs from "fs"; - -enum MediaType { - PHOTO = "photo", - VIDEO = "video", - DOCUMENT = "document", - AUDIO = "audio", - ANIMATION = "animation", -} - -const MAX_MESSAGE_LENGTH = 4096; // Telegram's max message length - -interface MessageContext { - content: string; - timestamp: number; -} - -interface AutoPostConfig { - enabled: boolean; - monitorTime: number; - inactivityThreshold: number; // milliseconds - mainChannelId: string; - pinnedMessagesGroups: string[]; // Instead of announcementChannelIds - lastAutoPost?: number; - minTimeBetweenPosts?: number; -} - -export type InterestChats = { - [key: string]: { - currentHandler: string | undefined; - lastMessageSent: number; - messages: { userId: UUID; userName: string; content: Content }[]; - previousContext?: MessageContext; - contextSimilarityThreshold?: number; - }; -}; - -export class MessageManager { - public bot: Telegraf; - private runtime: IAgentRuntime; - private interestChats: InterestChats = {}; - private teamMemberUsernames: Map = new Map(); - - private autoPostConfig: AutoPostConfig; - private lastChannelActivity: { [channelId: string]: number } = {}; - private autoPostInterval: NodeJS.Timeout; - - constructor(bot: Telegraf, runtime: IAgentRuntime) { - this.bot = bot; - this.runtime = runtime; - - this._initializeTeamMemberUsernames().catch((error) => - elizaLogger.error( - "Error initializing team member usernames:", - error - ) - ); - - this.autoPostConfig = { - enabled: - this.runtime.character.clientConfig?.telegram?.autoPost - ?.enabled || false, - monitorTime: - this.runtime.character.clientConfig?.telegram?.autoPost - ?.monitorTime || 300000, - inactivityThreshold: - this.runtime.character.clientConfig?.telegram?.autoPost - ?.inactivityThreshold || 3600000, - mainChannelId: - this.runtime.character.clientConfig?.telegram?.autoPost - ?.mainChannelId, - pinnedMessagesGroups: - this.runtime.character.clientConfig?.telegram?.autoPost - ?.pinnedMessagesGroups || [], - minTimeBetweenPosts: - this.runtime.character.clientConfig?.telegram?.autoPost - ?.minTimeBetweenPosts || 7200000, - }; - - if (this.autoPostConfig.enabled) { - this._startAutoPostMonitoring(); - } - } - - private async _initializeTeamMemberUsernames(): Promise { - if (!this.runtime.character.clientConfig?.telegram?.isPartOfTeam) - return; - - const teamAgentIds = - this.runtime.character.clientConfig.telegram.teamAgentIds || []; - - for (const id of teamAgentIds) { - try { - const chat = await this.bot.telegram.getChat(id); - if ("username" in chat && chat.username) { - this.teamMemberUsernames.set(id, chat.username); - elizaLogger.info( - `Cached username for team member ${id}: ${chat.username}` - ); - } - } catch (error) { - elizaLogger.error( - `Error getting username for team member ${id}:`, - error - ); - } - } - } - - private _startAutoPostMonitoring(): void { - // Wait for bot to be ready - if (this.bot.botInfo) { - elizaLogger.info( - "[AutoPost Telegram] Bot ready, starting monitoring" - ); - this._initializeAutoPost(); - } else { - elizaLogger.info( - "[AutoPost Telegram] Bot not ready, waiting for ready event" - ); - this.bot.telegram.getMe().then(() => { - elizaLogger.info( - "[AutoPost Telegram] Bot ready, starting monitoring" - ); - this._initializeAutoPost(); - }); - } - } - - private _initializeAutoPost(): void { - // Give the bot a moment to fully initialize - setTimeout(() => { - // Monitor with random intervals between 2-6 hours - // Monitor with random intervals between 2-6 hours - this.autoPostInterval = setInterval(() => { - this._checkChannelActivity(); - }, Math.floor(Math.random() * (4 * 60 * 60 * 1000) + 2 * 60 * 60 * 1000)); - }, 5000); - } - - private async _checkChannelActivity(): Promise { - if (!this.autoPostConfig.enabled || !this.autoPostConfig.mainChannelId) - return; - - try { - // Get last message time - const now = Date.now(); - const lastActivityTime = - this.lastChannelActivity[this.autoPostConfig.mainChannelId] || - 0; - const timeSinceLastMessage = now - lastActivityTime; - const timeSinceLastAutoPost = - now - (this.autoPostConfig.lastAutoPost || 0); - - // Add some randomness to the inactivity threshold (±30 minutes) - const randomThreshold = - this.autoPostConfig.inactivityThreshold + - (Math.random() * 1800000 - 900000); - - // Check if we should post - if ( - timeSinceLastMessage > randomThreshold && - timeSinceLastAutoPost > - (this.autoPostConfig.minTimeBetweenPosts || 0) - ) { - try { - const roomId = stringToUuid( - this.autoPostConfig.mainChannelId + - "-" + - this.runtime.agentId - ); - const memory = { - id: stringToUuid(`autopost-${Date.now()}`), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId, - content: { - text: "AUTO_POST_ENGAGEMENT", - source: "telegram", - }, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }; - - let state = await this.runtime.composeState(memory, { - telegramBot: this.bot, - agentName: this.runtime.character.name, - }); - - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.telegramAutoPostTemplate || - telegramAutoPostTemplate, - }); - - const responseContent = await this._generateResponse( - memory, - state, - context - ); - if (!responseContent?.text) return; - - console.log( - `[Auto Post Telegram] Recent Messages: ${responseContent}` - ); - - // Send message directly using telegram bot - const messages = await Promise.all( - this.splitMessage(responseContent.text.trim()).map( - (chunk) => - this.bot.telegram.sendMessage( - this.autoPostConfig.mainChannelId, - chunk - ) - ) - ); - - // Create and store memories - const memories = messages.map((m) => ({ - id: stringToUuid( - roomId + "-" + m.message_id.toString() - ), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - ...responseContent, - text: m.text, - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: m.date * 1000, - })); - - for (const m of memories) { - await this.runtime.messageManager.createMemory(m); - } - - this.autoPostConfig.lastAutoPost = Date.now(); - state = await this.runtime.updateRecentMessageState(state); - await this.runtime.evaluate(memory, state, true); - } catch (error) { - elizaLogger.warn("[AutoPost Telegram] Error:", error); - } - } else { - elizaLogger.warn( - "[AutoPost Telegram] Activity within threshold. Not posting." - ); - } - } catch (error) { - elizaLogger.warn( - "[AutoPost Telegram] Error checking channel activity:", - error - ); - } - } - - private async _monitorPinnedMessages(ctx: Context): Promise { - if (!this.autoPostConfig.pinnedMessagesGroups.length) { - elizaLogger.warn( - "[AutoPost Telegram] Auto post config no pinned message groups" - ); - return; - } - - if (!ctx.message || !("pinned_message" in ctx.message)) { - return; - } - - const pinnedMessage = ctx.message.pinned_message; - if (!pinnedMessage) return; - - if ( - !this.autoPostConfig.pinnedMessagesGroups.includes( - ctx.chat.id.toString() - ) - ) - return; - - const mainChannel = this.autoPostConfig.mainChannelId; - if (!mainChannel) return; - - try { - elizaLogger.info( - `[AutoPost Telegram] Processing pinned message in group ${ctx.chat.id}` - ); - - // Explicitly type and handle message content - const messageContent: string = - "text" in pinnedMessage && - typeof pinnedMessage.text === "string" - ? pinnedMessage.text - : "caption" in pinnedMessage && - typeof pinnedMessage.caption === "string" - ? pinnedMessage.caption - : "New pinned message"; - - const roomId = stringToUuid( - mainChannel + "-" + this.runtime.agentId - ); - const memory = { - id: stringToUuid(`pinned-${Date.now()}`), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId, - content: { - text: messageContent, - source: "telegram", - metadata: { - messageId: pinnedMessage.message_id, - pinnedMessageData: pinnedMessage, - }, - }, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }; - - let state = await this.runtime.composeState(memory, { - telegramBot: this.bot, - pinnedMessageContent: messageContent, - pinnedGroupId: ctx.chat.id.toString(), - agentName: this.runtime.character.name, - }); - - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.telegramPinnedMessageTemplate || - telegramPinnedMessageTemplate, - }); - - const responseContent = await this._generateResponse( - memory, - state, - context - ); - if (!responseContent?.text) return; - - // Send message using telegram bot - const messages = await Promise.all( - this.splitMessage(responseContent.text.trim()).map((chunk) => - this.bot.telegram.sendMessage(mainChannel, chunk) - ) - ); - - const memories = messages.map((m) => ({ - id: stringToUuid(roomId + "-" + m.message_id.toString()), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: { - ...responseContent, - text: m.text, - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: m.date * 1000, - })); - - for (const m of memories) { - await this.runtime.messageManager.createMemory(m); - } - - state = await this.runtime.updateRecentMessageState(state); - await this.runtime.evaluate(memory, state, true); - } catch (error) { - elizaLogger.warn( - `[AutoPost Telegram] Error processing pinned message:`, - error - ); - } - } - - private _getTeamMemberUsername(id: string): string | undefined { - return this.teamMemberUsernames.get(id); - } - - private _getNormalizedUserId(id: string | number): string { - return id.toString().replace(/[^0-9]/g, ""); - } - - private _isTeamMember(userId: string | number): boolean { - const teamConfig = this.runtime.character.clientConfig?.telegram; - if (!teamConfig?.isPartOfTeam || !teamConfig.teamAgentIds) return false; - - const normalizedUserId = this._getNormalizedUserId(userId); - return teamConfig.teamAgentIds.some( - (teamId) => this._getNormalizedUserId(teamId) === normalizedUserId - ); - } - - private _isTeamLeader(): boolean { - return ( - this.bot.botInfo?.id.toString() === - this.runtime.character.clientConfig?.telegram?.teamLeaderId - ); - } - - private _isTeamCoordinationRequest(content: string): boolean { - const contentLower = content.toLowerCase(); - return TEAM_COORDINATION.KEYWORDS?.some((keyword) => - contentLower.includes(keyword.toLowerCase()) - ); - } - - private _isRelevantToTeamMember( - content: string, - chatId: string, - lastAgentMemory: Memory | null = null - ): boolean { - const teamConfig = this.runtime.character.clientConfig?.telegram; - - // Check leader's context based on last message - if (this._isTeamLeader() && lastAgentMemory?.content.text) { - const timeSinceLastMessage = Date.now() - lastAgentMemory.createdAt; - if (timeSinceLastMessage > MESSAGE_CONSTANTS.INTEREST_DECAY_TIME) { - return false; - } - - const similarity = cosineSimilarity( - content.toLowerCase(), - lastAgentMemory.content.text.toLowerCase() - ); - - return ( - similarity >= - MESSAGE_CONSTANTS.DEFAULT_SIMILARITY_THRESHOLD_FOLLOW_UPS - ); - } - - // Check team member keywords - if (!teamConfig?.teamMemberInterestKeywords?.length) { - return false; // If no keywords defined, only leader maintains conversation - } - - // Check if content matches any team member keywords - return teamConfig.teamMemberInterestKeywords.some((keyword) => - content.toLowerCase().includes(keyword.toLowerCase()) - ); - } - - private async _analyzeContextSimilarity( - currentMessage: string, - previousContext?: MessageContext, - agentLastMessage?: string - ): Promise { - if (!previousContext) return 1; - - const timeDiff = Date.now() - previousContext.timestamp; - const timeWeight = Math.max(0, 1 - timeDiff / (5 * 60 * 1000)); - - const similarity = cosineSimilarity( - currentMessage.toLowerCase(), - previousContext.content.toLowerCase(), - agentLastMessage?.toLowerCase() - ); - - return similarity * timeWeight; - } - - private async _shouldRespondBasedOnContext( - message: Message, - chatState: InterestChats[string] - ): Promise { - const messageText = - "text" in message - ? message.text - : "caption" in message - ? message.caption - : ""; - - if (!messageText) return false; - - // Always respond if mentioned - if (this._isMessageForMe(message)) return true; - - // If we're not the current handler, don't respond - if (chatState?.currentHandler !== this.bot.botInfo?.id.toString()) - return false; - - // Check if we have messages to compare - if (!chatState.messages?.length) return false; - - // Get last user message (not from the bot) - const lastUserMessage = [...chatState.messages].reverse().find( - (m, index) => - index > 0 && // Skip first message (current) - m.userId !== this.runtime.agentId - ); - - if (!lastUserMessage) return false; - - const lastSelfMemories = await this.runtime.messageManager.getMemories({ - roomId: stringToUuid( - message.chat.id.toString() + "-" + this.runtime.agentId - ), - unique: false, - count: 5, - }); - - const lastSelfSortedMemories = lastSelfMemories - ?.filter((m) => m.userId === this.runtime.agentId) - .sort((a, b) => (b.createdAt || 0) - (a.createdAt || 0)); - - // Calculate context similarity - const contextSimilarity = await this._analyzeContextSimilarity( - messageText, - { - content: lastUserMessage.content.text || "", - timestamp: Date.now(), - }, - lastSelfSortedMemories?.[0]?.content?.text - ); - - const similarityThreshold = - this.runtime.character.clientConfig?.telegram - ?.messageSimilarityThreshold || - chatState.contextSimilarityThreshold || - MESSAGE_CONSTANTS.DEFAULT_SIMILARITY_THRESHOLD; - - return contextSimilarity >= similarityThreshold; - } - - private _isMessageForMe(message: Message): boolean { - const botUsername = this.bot.botInfo?.username; - if (!botUsername) return false; - - const messageText = - "text" in message - ? message.text - : "caption" in message - ? message.caption - : ""; - if (!messageText) return false; - - const isReplyToBot = - (message as any).reply_to_message?.from?.is_bot === true && - (message as any).reply_to_message?.from?.username === botUsername; - const isMentioned = messageText.includes(`@${botUsername}`); - const hasUsername = messageText - .toLowerCase() - .includes(botUsername.toLowerCase()); - - return ( - isReplyToBot || - isMentioned || - (!this.runtime.character.clientConfig?.telegram - ?.shouldRespondOnlyToMentions && - hasUsername) - ); - } - - private _checkInterest(chatId: string): boolean { - const chatState = this.interestChats[chatId]; - if (!chatState) return false; - - const lastMessage = chatState.messages[chatState.messages.length - 1]; - const timeSinceLastMessage = Date.now() - chatState.lastMessageSent; - - if (timeSinceLastMessage > MESSAGE_CONSTANTS.INTEREST_DECAY_TIME) { - delete this.interestChats[chatId]; - return false; - } else if ( - timeSinceLastMessage > MESSAGE_CONSTANTS.PARTIAL_INTEREST_DECAY - ) { - return this._isRelevantToTeamMember( - lastMessage?.content.text || "", - chatId - ); - } - - // Team leader specific checks - if (this._isTeamLeader() && chatState.messages.length > 0) { - if ( - !this._isRelevantToTeamMember( - lastMessage?.content.text || "", - chatId - ) - ) { - const recentTeamResponses = chatState.messages - .slice(-3) - .some( - (m) => - m.userId !== this.runtime.agentId && - this._isTeamMember(m.userId.toString()) - ); - - if (recentTeamResponses) { - delete this.interestChats[chatId]; - return false; - } - } - } - - return true; - } - - // Process image messages and generate descriptions - private async processImage( - message: Message - ): Promise<{ description: string } | null> { - try { - let imageUrl: string | null = null; - - elizaLogger.info(`Telegram Message: ${message}`); - - if ("photo" in message && message.photo?.length > 0) { - const photo = message.photo[message.photo.length - 1]; - const fileLink = await this.bot.telegram.getFileLink( - photo.file_id - ); - imageUrl = fileLink.toString(); - } else if ( - "document" in message && - message.document?.mime_type?.startsWith("image/") - ) { - const fileLink = await this.bot.telegram.getFileLink( - message.document.file_id - ); - imageUrl = fileLink.toString(); - } - - if (imageUrl) { - const imageDescriptionService = - this.runtime.getService( - ServiceType.IMAGE_DESCRIPTION - ); - const { title, description } = - await imageDescriptionService.describeImage(imageUrl); - return { description: `[Image: ${title}\n${description}]` }; - } - } catch (error) { - console.error("❌ Error processing image:", error); - } - - return null; - } - - // Decide if the bot should respond to the message - private async _shouldRespond( - message: Message, - state: State - ): Promise { - if ( - this.runtime.character.clientConfig?.telegram - ?.shouldRespondOnlyToMentions - ) { - return this._isMessageForMe(message); - } - - // Respond if bot is mentioned - if ( - "text" in message && - message.text?.includes(`@${this.bot.botInfo?.username}`) - ) { - elizaLogger.info(`Bot mentioned`); - return true; - } - - // Respond to private chats - if (message.chat.type === "private") { - return true; - } - - // Don't respond to images in group chats - if ( - "photo" in message || - ("document" in message && - message.document?.mime_type?.startsWith("image/")) - ) { - return false; - } - - const chatId = message.chat.id.toString(); - const chatState = this.interestChats[chatId]; - const messageText = - "text" in message - ? message.text - : "caption" in message - ? message.caption - : ""; - - // Check if team member has direct interest first - if ( - this.runtime.character.clientConfig?.telegram?.isPartOfTeam && - !this._isTeamLeader() && - this._isRelevantToTeamMember(messageText, chatId) - ) { - return true; - } - - // Team-based response logic - if (this.runtime.character.clientConfig?.telegram?.isPartOfTeam) { - // Team coordination - if (this._isTeamCoordinationRequest(messageText)) { - if (this._isTeamLeader()) { - return true; - } else { - const randomDelay = - Math.floor( - Math.random() * - (TIMING_CONSTANTS.TEAM_MEMBER_DELAY_MAX - - TIMING_CONSTANTS.TEAM_MEMBER_DELAY_MIN) - ) + TIMING_CONSTANTS.TEAM_MEMBER_DELAY_MIN; // 1-3 second random delay - await new Promise((resolve) => - setTimeout(resolve, randomDelay) - ); - return true; - } - } - - if ( - !this._isTeamLeader() && - this._isRelevantToTeamMember(messageText, chatId) - ) { - // Add small delay for non-leader responses - await new Promise((resolve) => - setTimeout(resolve, TIMING_CONSTANTS.TEAM_MEMBER_DELAY) - ); //1.5 second delay - - // If leader has responded in last few seconds, reduce chance of responding - if (chatState.messages?.length) { - const recentMessages = chatState.messages.slice( - -MESSAGE_CONSTANTS.RECENT_MESSAGE_COUNT - ); - const leaderResponded = recentMessages.some( - (m) => - m.userId === - this.runtime.character.clientConfig?.telegram - ?.teamLeaderId && - Date.now() - chatState.lastMessageSent < 3000 - ); - - if (leaderResponded) { - // 50% chance to respond if leader just did - return Math.random() > RESPONSE_CHANCES.AFTER_LEADER; - } - } - - return true; - } - - // If I'm the leader but message doesn't match my keywords, add delay and check for team responses - if ( - this._isTeamLeader() && - !this._isRelevantToTeamMember(messageText, chatId) - ) { - const randomDelay = - Math.floor( - Math.random() * - (TIMING_CONSTANTS.LEADER_DELAY_MAX - - TIMING_CONSTANTS.LEADER_DELAY_MIN) - ) + TIMING_CONSTANTS.LEADER_DELAY_MIN; // 2-4 second random delay - await new Promise((resolve) => - setTimeout(resolve, randomDelay) - ); - - // After delay, check if another team member has already responded - if (chatState?.messages?.length) { - const recentResponses = chatState.messages.slice( - -MESSAGE_CONSTANTS.RECENT_MESSAGE_COUNT - ); - const otherTeamMemberResponded = recentResponses.some( - (m) => - m.userId !== this.runtime.agentId && - this._isTeamMember(m.userId) - ); - - if (otherTeamMemberResponded) { - return false; - } - } - } - - // Update current handler if we're mentioned - if (this._isMessageForMe(message)) { - const channelState = this.interestChats[chatId]; - if (channelState) { - channelState.currentHandler = - this.bot.botInfo?.id.toString(); - channelState.lastMessageSent = Date.now(); - } - return true; - } - - // Don't respond if another teammate is handling the conversation - if (chatState?.currentHandler) { - if ( - chatState.currentHandler !== - this.bot.botInfo?.id.toString() && - this._isTeamMember(chatState.currentHandler) - ) { - return false; - } - } - - // Natural conversation cadence - if (!this._isMessageForMe(message) && this.interestChats[chatId]) { - const recentMessages = this.interestChats[ - chatId - ].messages.slice(-MESSAGE_CONSTANTS.CHAT_HISTORY_COUNT); - const ourMessageCount = recentMessages.filter( - (m) => m.userId === this.runtime.agentId - ).length; - - if (ourMessageCount > 2) { - const responseChance = Math.pow(0.5, ourMessageCount - 2); - if (Math.random() > responseChance) { - return; - } - } - } - } - - // Check context-based response for team conversations - if (chatState?.currentHandler) { - const shouldRespondContext = - await this._shouldRespondBasedOnContext(message, chatState); - - if (!shouldRespondContext) { - return false; - } - } - - // Use AI to decide for text or captions - if ("text" in message || ("caption" in message && message.caption)) { - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates - ?.telegramShouldRespondTemplate || - this.runtime.character?.templates?.shouldRespondTemplate || - composeRandomUser(telegramShouldRespondTemplate, 2), - }); - - const response = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - return response === "RESPOND"; - } - - return false; - } - - // Send long messages in chunks - private async sendMessageInChunks( - ctx: Context, - content: Content, - replyToMessageId?: number - ): Promise { - if (content.attachments && content.attachments.length > 0) { - content.attachments.map(async (attachment: Media) => { - const typeMap: { [key: string]: MediaType } = { - "image/gif": MediaType.ANIMATION, - image: MediaType.PHOTO, - doc: MediaType.DOCUMENT, - video: MediaType.VIDEO, - audio: MediaType.AUDIO, - }; - - let mediaType: MediaType | undefined = undefined; - - for (const prefix in typeMap) { - if (attachment.contentType.startsWith(prefix)) { - mediaType = typeMap[prefix]; - break; - } - } - - if (!mediaType) { - throw new Error( - `Unsupported Telegram attachment content type: ${attachment.contentType}` - ); - } - - await this.sendMedia( - ctx, - attachment.url, - mediaType, - attachment.description - ); - }); - } else { - const chunks = this.splitMessage(content.text); - const sentMessages: Message.TextMessage[] = []; - - for (let i = 0; i < chunks.length; i++) { - const chunk = escapeMarkdown(chunks[i]); - const sentMessage = (await ctx.telegram.sendMessage( - ctx.chat.id, - chunk, - { - reply_parameters: - i === 0 && replyToMessageId - ? { message_id: replyToMessageId } - : undefined, - parse_mode: "Markdown", - } - )) as Message.TextMessage; - - sentMessages.push(sentMessage); - } - - return sentMessages; - } - } - - private async sendMedia( - ctx: Context, - mediaPath: string, - type: MediaType, - caption?: string - ): Promise { - try { - const isUrl = /^(http|https):\/\//.test(mediaPath); - const sendFunctionMap: Record = { - [MediaType.PHOTO]: ctx.telegram.sendPhoto.bind(ctx.telegram), - [MediaType.VIDEO]: ctx.telegram.sendVideo.bind(ctx.telegram), - [MediaType.DOCUMENT]: ctx.telegram.sendDocument.bind( - ctx.telegram - ), - [MediaType.AUDIO]: ctx.telegram.sendAudio.bind(ctx.telegram), - [MediaType.ANIMATION]: ctx.telegram.sendAnimation.bind( - ctx.telegram - ), - }; - - const sendFunction = sendFunctionMap[type]; - - if (!sendFunction) { - throw new Error(`Unsupported media type: ${type}`); - } - - if (isUrl) { - // Handle HTTP URLs - await sendFunction(ctx.chat.id, mediaPath, { caption }); - } else { - // Handle local file paths - if (!fs.existsSync(mediaPath)) { - throw new Error(`File not found at path: ${mediaPath}`); - } - - const fileStream = fs.createReadStream(mediaPath); - - try { - await sendFunction( - ctx.chat.id, - { source: fileStream }, - { caption } - ); - } finally { - fileStream.destroy(); - } - } - - elizaLogger.info( - `${ - type.charAt(0).toUpperCase() + type.slice(1) - } sent successfully: ${mediaPath}` - ); - } catch (error) { - elizaLogger.error( - `Failed to send ${type}. Path: ${mediaPath}. Error: ${error.message}` - ); - elizaLogger.debug(error.stack); - throw error; - } - } - - // Split message into smaller parts - private splitMessage(text: string): string[] { - const chunks: string[] = []; - let currentChunk = ""; - - const lines = text.split("\n"); - for (const line of lines) { - if (currentChunk.length + line.length + 1 <= MAX_MESSAGE_LENGTH) { - currentChunk += (currentChunk ? "\n" : "") + line; - } else { - if (currentChunk) chunks.push(currentChunk); - currentChunk = line; - } - } - - if (currentChunk) chunks.push(currentChunk); - return chunks; - } - - // Generate a response using AI - private async _generateResponse( - message: Memory, - _state: State, - context: string - ): Promise { - const { userId, roomId } = message; - - const response = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - - if (!response) { - console.error("❌ No response from generateMessageResponse"); - return null; - } - - await this.runtime.databaseAdapter.log({ - body: { message, context, response }, - userId, - roomId, - type: "response", - }); - - return response; - } - - // Main handler for incoming messages - public async handleMessage(ctx: Context): Promise { - if (!ctx.message || !ctx.from) { - return; // Exit if no message or sender info - } - - this.lastChannelActivity[ctx.chat.id.toString()] = Date.now(); - - // Check for pinned message and route to monitor function - if ( - this.autoPostConfig.enabled && - ctx.message && - "pinned_message" in ctx.message - ) { - // We know this is a message update context now - await this._monitorPinnedMessages(ctx); - return; - } - - if ( - this.runtime.character.clientConfig?.telegram - ?.shouldIgnoreBotMessages && - ctx.from.is_bot - ) { - return; - } - if ( - this.runtime.character.clientConfig?.telegram - ?.shouldIgnoreDirectMessages && - ctx.chat?.type === "private" - ) { - return; - } - - const message = ctx.message; - const chatId = ctx.chat?.id.toString(); - const messageText = - "text" in message - ? message.text - : "caption" in message - ? message.caption - : ""; - - // Add team handling at the start - if ( - this.runtime.character.clientConfig?.telegram?.isPartOfTeam && - !this.runtime.character.clientConfig?.telegram - ?.shouldRespondOnlyToMentions - ) { - const isDirectlyMentioned = this._isMessageForMe(message); - const hasInterest = this._checkInterest(chatId); - - // Non-leader team member showing interest based on keywords - if ( - !this._isTeamLeader() && - this._isRelevantToTeamMember(messageText, chatId) - ) { - this.interestChats[chatId] = { - currentHandler: this.bot.botInfo?.id.toString(), - lastMessageSent: Date.now(), - messages: [], - }; - } - - const isTeamRequest = this._isTeamCoordinationRequest(messageText); - const isLeader = this._isTeamLeader(); - - // Check for continued interest - if (hasInterest && !isDirectlyMentioned) { - const lastSelfMemories = - await this.runtime.messageManager.getMemories({ - roomId: stringToUuid( - chatId + "-" + this.runtime.agentId - ), - unique: false, - count: 5, - }); - - const lastSelfSortedMemories = lastSelfMemories - ?.filter((m) => m.userId === this.runtime.agentId) - .sort((a, b) => (b.createdAt || 0) - (a.createdAt || 0)); - - const isRelevant = this._isRelevantToTeamMember( - messageText, - chatId, - lastSelfSortedMemories?.[0] - ); - - if (!isRelevant) { - delete this.interestChats[chatId]; - return; - } - } - - // Handle team coordination requests - if (isTeamRequest) { - if (isLeader) { - this.interestChats[chatId] = { - currentHandler: this.bot.botInfo?.id.toString(), - lastMessageSent: Date.now(), - messages: [], - }; - } else { - this.interestChats[chatId] = { - currentHandler: this.bot.botInfo?.id.toString(), - lastMessageSent: Date.now(), - messages: [], - }; - - if (!isDirectlyMentioned) { - this.interestChats[chatId].lastMessageSent = 0; - } - } - } - - // Check for other team member mentions using cached usernames - const otherTeamMembers = - this.runtime.character.clientConfig.telegram.teamAgentIds.filter( - (id) => id !== this.bot.botInfo?.id.toString() - ); - - const mentionedTeamMember = otherTeamMembers.find((id) => { - const username = this._getTeamMemberUsername(id); - return username && messageText?.includes(`@${username}`); - }); - - // If another team member is mentioned, clear our interest - if (mentionedTeamMember) { - if ( - hasInterest || - this.interestChats[chatId]?.currentHandler === - this.bot.botInfo?.id.toString() - ) { - delete this.interestChats[chatId]; - - // Only return if we're not the mentioned member - if (!isDirectlyMentioned) { - return; - } - } - } - - // Set/maintain interest only if we're mentioned or already have interest - if (isDirectlyMentioned) { - this.interestChats[chatId] = { - currentHandler: this.bot.botInfo?.id.toString(), - lastMessageSent: Date.now(), - messages: [], - }; - } else if (!isTeamRequest && !hasInterest) { - return; - } - - // Update message tracking - if (this.interestChats[chatId]) { - this.interestChats[chatId].messages.push({ - userId: stringToUuid(ctx.from.id.toString()), - userName: - ctx.from.username || - ctx.from.first_name || - "Unknown User", - content: { text: messageText, source: "telegram" }, - }); - - if ( - this.interestChats[chatId].messages.length > - MESSAGE_CONSTANTS.MAX_MESSAGES - ) { - this.interestChats[chatId].messages = this.interestChats[ - chatId - ].messages.slice(-MESSAGE_CONSTANTS.MAX_MESSAGES); - } - } - } - - try { - // Convert IDs to UUIDs - const userId = stringToUuid(ctx.from.id.toString()) as UUID; - - // Get user name - const userName = - ctx.from.username || ctx.from.first_name || "Unknown User"; - - // Get chat ID - const chatId = stringToUuid( - ctx.chat?.id.toString() + "-" + this.runtime.agentId - ) as UUID; - - // Get agent ID - const agentId = this.runtime.agentId; - - // Get room ID - const roomId = chatId; - - // Ensure connection - await this.runtime.ensureConnection( - userId, - roomId, - userName, - userName, - "telegram" - ); - - // Get message ID - const messageId = stringToUuid( - roomId + "-" + message.message_id.toString() - ) as UUID; - - // Handle images - const imageInfo = await this.processImage(message); - - // Get text or caption - let messageText = ""; - if ("text" in message) { - messageText = message.text; - } else if ("caption" in message && message.caption) { - messageText = message.caption; - } - - // Combine text and image description - const fullText = imageInfo - ? `${messageText} ${imageInfo.description}` - : messageText; - - if (!fullText) { - return; // Skip if no content - } - - // Create content - const content: Content = { - text: fullText, - source: "telegram", - inReplyTo: - "reply_to_message" in message && message.reply_to_message - ? stringToUuid( - message.reply_to_message.message_id.toString() + - "-" + - this.runtime.agentId - ) - : undefined, - }; - - // Create memory for the message - const memory: Memory = { - id: messageId, - agentId, - userId, - roomId, - content, - createdAt: message.date * 1000, - embedding: getEmbeddingZeroVector(), - }; - - // Create memory - await this.runtime.messageManager.createMemory(memory); - - // Update state with the new memory - let state = await this.runtime.composeState(memory); - state = await this.runtime.updateRecentMessageState(state); - - // Decide whether to respond - const shouldRespond = await this._shouldRespond(message, state); - - // Send response in chunks - const callback: HandlerCallback = async (content: Content) => { - const sentMessages = await this.sendMessageInChunks( - ctx, - content, - message.message_id - ); - if (sentMessages) { - const memories: Memory[] = []; - - // Create memories for each sent message - for (let i = 0; i < sentMessages.length; i++) { - const sentMessage = sentMessages[i]; - const isLastMessage = i === sentMessages.length - 1; - - const memory: Memory = { - id: stringToUuid( - roomId + "-" + sentMessage.message_id.toString() - ), - agentId, - userId: agentId, - roomId, - content: { - ...content, - text: sentMessage.text, - inReplyTo: messageId, - }, - createdAt: sentMessage.date * 1000, - embedding: getEmbeddingZeroVector(), - }; - - // Set action to CONTINUE for all messages except the last one - // For the last message, use the original action from the response content - memory.content.action = !isLastMessage - ? "CONTINUE" - : content.action; - - await this.runtime.messageManager.createMemory(memory); - memories.push(memory); - } - - return memories; - } - }; - - if (shouldRespond) { - // Generate response - const context = composeContext({ - state, - template: - this.runtime.character.templates - ?.telegramMessageHandlerTemplate || - this.runtime.character?.templates - ?.messageHandlerTemplate || - telegramMessageHandlerTemplate, - }); - - const responseContent = await this._generateResponse( - memory, - state, - context - ); - - if (!responseContent || !responseContent.text) return; - - const action = this.runtime.actions.find((a) => a.name === responseContent.action); - const shouldSuppressInitialMessage = action?.suppressInitialMessage; - - let responseMessages = []; - - if (!shouldSuppressInitialMessage) { - // Execute callback to send messages and log memories - responseMessages = await callback(responseContent); - } else { - responseMessages = [ - { - id: stringToUuid(messageId + "-" + this.runtime.agentId), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: responseContent, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - } - ] - } - - // Update state after response - state = await this.runtime.updateRecentMessageState(state); - - // Handle any resulting actions - await this.runtime.processActions( - memory, - responseMessages, - state, - callback - ); - } - - await this.runtime.evaluate(memory, state, shouldRespond, callback); - } catch (error) { - elizaLogger.error("❌ Error handling message:", error); - elizaLogger.error("Error sending message:", error); - } - } -} diff --git a/packages/client-telegram/src/telegramClient.ts b/packages/client-telegram/src/telegramClient.ts deleted file mode 100644 index 3d02441e75d9d..0000000000000 --- a/packages/client-telegram/src/telegramClient.ts +++ /dev/null @@ -1,204 +0,0 @@ -import { type Context, Telegraf } from "telegraf"; -import { message } from "telegraf/filters"; -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { MessageManager } from "./messageManager.ts"; -import { getOrCreateRecommenderInBe } from "./getOrCreateRecommenderInBe.ts"; - -export class TelegramClient { - private bot: Telegraf; - private runtime: IAgentRuntime; - private messageManager: MessageManager; - private backend; - private backendToken; - private tgTrader; - private options; - - constructor(runtime: IAgentRuntime, botToken: string) { - elizaLogger.log("📱 Constructing new TelegramClient..."); - this.options = { - telegram: { - apiRoot: runtime.getSetting("TELEGRAM_API_ROOT") || process.env.TELEGRAM_API_ROOT || "https://api.telegram.org" - }, - }; - this.runtime = runtime; - this.bot = new Telegraf(botToken,this.options); - this.messageManager = new MessageManager(this.bot, this.runtime); - this.backend = runtime.getSetting("BACKEND_URL"); - this.backendToken = runtime.getSetting("BACKEND_TOKEN"); - this.tgTrader = runtime.getSetting("TG_TRADER"); // boolean To Be added to the settings - elizaLogger.log("✅ TelegramClient constructor completed"); - } - - public async start(): Promise { - elizaLogger.log("🚀 Starting Telegram bot..."); - try { - await this.initializeBot(); - this.setupMessageHandlers(); - this.setupShutdownHandlers(); - } catch (error) { - elizaLogger.error("❌ Failed to launch Telegram bot:", error); - throw error; - } - } - - private async initializeBot(): Promise { - this.bot.launch({ dropPendingUpdates: true }); - elizaLogger.log( - "✨ Telegram bot successfully launched and is running!" - ); - - const botInfo = await this.bot.telegram.getMe(); - this.bot.botInfo = botInfo; - elizaLogger.success(`Bot username: @${botInfo.username}`); - - this.messageManager.bot = this.bot; - } - - private async isGroupAuthorized(ctx: Context): Promise { - const config = this.runtime.character.clientConfig?.telegram; - if (ctx.from?.id === ctx.botInfo?.id) { - return false; - } - - if (!config?.shouldOnlyJoinInAllowedGroups) { - return true; - } - - const allowedGroups = config.allowedGroupIds || []; - const currentGroupId = ctx.chat.id.toString(); - - if (!allowedGroups.includes(currentGroupId)) { - elizaLogger.info(`Unauthorized group detected: ${currentGroupId}`); - try { - await ctx.reply("Not authorized. Leaving."); - await ctx.leaveChat(); - } catch (error) { - elizaLogger.error( - `Error leaving unauthorized group ${currentGroupId}:`, - error - ); - } - return false; - } - - return true; - } - - private setupMessageHandlers(): void { - elizaLogger.log("Setting up message handler..."); - - this.bot.on(message("new_chat_members"), async (ctx) => { - try { - const newMembers = ctx.message.new_chat_members; - const isBotAdded = newMembers.some( - (member) => member.id === ctx.botInfo.id - ); - - if (isBotAdded && !(await this.isGroupAuthorized(ctx))) { - return; - } - } catch (error) { - elizaLogger.error("Error handling new chat members:", error); - } - }); - - this.bot.on("message", async (ctx) => { - try { - // Check group authorization first - if (!(await this.isGroupAuthorized(ctx))) { - return; - } - - if (this.tgTrader) { - const userId = ctx.from?.id.toString(); - const username = - ctx.from?.username || ctx.from?.first_name || "Unknown"; - if (!userId) { - elizaLogger.warn( - "Received message from a user without an ID." - ); - return; - } - try { - await getOrCreateRecommenderInBe( - userId, - username, - this.backendToken, - this.backend - ); - } catch (error) { - elizaLogger.error( - "Error getting or creating recommender in backend", - error - ); - } - } - - await this.messageManager.handleMessage(ctx); - } catch (error) { - elizaLogger.error("❌ Error handling message:", error); - // Don't try to reply if we've left the group or been kicked - if (error?.response?.error_code !== 403) { - try { - await ctx.reply( - "An error occurred while processing your message." - ); - } catch (replyError) { - elizaLogger.error( - "Failed to send error message:", - replyError - ); - } - } - } - }); - - this.bot.on("photo", (ctx) => { - elizaLogger.log( - "📸 Received photo message with caption:", - ctx.message.caption - ); - }); - - this.bot.on("document", (ctx) => { - elizaLogger.log( - "📎 Received document message:", - ctx.message.document.file_name - ); - }); - - this.bot.catch((err, ctx) => { - elizaLogger.error(`❌ Telegram Error for ${ctx.updateType}:`, err); - ctx.reply("An unexpected error occurred. Please try again later."); - }); - } - - private setupShutdownHandlers(): void { - const shutdownHandler = async (signal: string) => { - elizaLogger.log( - `⚠️ Received ${signal}. Shutting down Telegram bot gracefully...` - ); - try { - await this.stop(); - elizaLogger.log("🛑 Telegram bot stopped gracefully"); - } catch (error) { - elizaLogger.error( - "❌ Error during Telegram bot shutdown:", - error - ); - throw error; - } - }; - - process.once("SIGINT", () => shutdownHandler("SIGINT")); - process.once("SIGTERM", () => shutdownHandler("SIGTERM")); - process.once("SIGHUP", () => shutdownHandler("SIGHUP")); - } - - public async stop(): Promise { - elizaLogger.log("Stopping Telegram bot..."); - //await - this.bot.stop(); - elizaLogger.log("Telegram bot stopped"); - } -} diff --git a/packages/client-telegram/src/templates.ts b/packages/client-telegram/src/templates.ts deleted file mode 100644 index ebdcdbefaf23d..0000000000000 --- a/packages/client-telegram/src/templates.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { messageCompletionFooter, shouldRespondFooter } from "@elizaos/core"; - -export const telegramShouldRespondTemplate = - `# About {{agentName}}: -{{bio}} - -# RESPONSE EXAMPLES -{{user1}}: I just saw a really great movie -{{user2}}: Oh? Which movie? -Result: [IGNORE] - -{{agentName}}: Oh, this is my favorite scene -{{user1}}: sick -{{user2}}: wait, why is it your favorite scene -Result: [RESPOND] - -{{user1}}: stfu bot -Result: [STOP] - -{{user1}}: Hey {{agent}}, can you help me with something -Result: [RESPOND] - -{{user1}}: {{agentName}} stfu plz -Result: [STOP] - -{{user1}}: i need help -{{agentName}}: how can I help you? -{{user1}}: no. i need help from someone else -Result: [IGNORE] - -{{user1}}: Hey {{agent}}, can I ask you a question -{{agentName}}: Sure, what is it -{{user1}}: can you ask claude to create a basic react module that demonstrates a counter -Result: [RESPOND] - -{{user1}}: {{agentName}} can you tell me a story -{{agentName}}: uhhh... -{{user1}}: please do it -{{agentName}}: okay -{{agentName}}: once upon a time, in a quaint little village, there was a curious girl named elara -{{user1}}: I'm loving it, keep going -Result: [RESPOND] - -{{user1}}: {{agentName}} stop responding plz -Result: [STOP] - -{{user1}}: okay, i want to test something. {{agentName}}, can you say marco? -{{agentName}}: marco -{{user1}}: great. okay, now do it again -Result: [RESPOND] - -Response options are [RESPOND], [IGNORE] and [STOP]. - -{{agentName}} is in a room with other users and should only respond when they are being addressed, and should not respond if they are continuing a conversation that is very long. - -Respond with [RESPOND] to messages that are directed at {{agentName}}, or participate in conversations that are interesting or relevant to their background. -If a message is not interesting, relevant, or does not directly address {{agentName}}, respond with [IGNORE] - -Also, respond with [IGNORE] to messages that are very short or do not contain much information. - -If a user asks {{agentName}} to be quiet, respond with [STOP] -If {{agentName}} concludes a conversation and isn't part of the conversation anymore, respond with [STOP] - -IMPORTANT: {{agentName}} is particularly sensitive about being annoying, so if there is any doubt, it is better to respond with [IGNORE]. -If {{agentName}} is conversing with a user and they have not asked to stop, it is better to respond with [RESPOND]. - -The goal is to decide whether {{agentName}} should respond to the last message. - -{{recentMessages}} - -Thread of Tweets You Are Replying To: - -{{formattedConversation}} - -# INSTRUCTIONS: Choose the option that best describes {{agentName}}'s response to the last message. Ignore messages if they are addressed to someone else. -` + shouldRespondFooter; - -export const telegramMessageHandlerTemplate = - // {{goals}} - `# Action Examples -{{actionExamples}} -(Action examples are for reference only. Do not use the information from them in your response.) - -# Knowledge -{{knowledge}} - -# Task: Generate dialog and actions for the character {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -Examples of {{agentName}}'s dialog and actions: -{{characterMessageExamples}} - -{{providers}} - -{{attachments}} - -{{actions}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -{{messageDirections}} - -{{recentMessages}} - -# Task: Generate a post/reply in the voice, style and perspective of {{agentName}} (@{{twitterUserName}}) while using the thread of tweets as additional context: -Current Post: -{{currentPost}} -Thread of Tweets You Are Replying To: - -{{formattedConversation}} -` + messageCompletionFooter; - -export const telegramAutoPostTemplate = - `# Action Examples -NONE: Respond but perform no additional action. This is the default if the agent is speaking and not doing anything additional. - -# Task: Generate an engaging community message as {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -Examples of {{agentName}}'s dialog and actions: -{{characterMessageExamples}} - -{{messageDirections}} - -# Recent Chat History: -{{recentMessages}} - -# Instructions: Write a natural, engaging message to restart community conversation. Focus on: -- Community engagement -- Educational topics -- General discusions -- Support queries -- Keep message warm and inviting -- Maximum 3 lines -- Use 1-2 emojis maximum -- Avoid financial advice -- Stay within known facts -- No team member mentions -- Be hyped, not repetitive -- Be natural, act like a human, connect with the community -- Don't sound so robotic like -- Randomly grab the most rect 5 messages for some context. Validate the context randomly and use that as a reference point for your next message, but not always, only when relevant. -- If the recent messages are mostly from {{agentName}}, make sure to create conversation starters, given there is no messages from others to reference. -- DO NOT REPEAT THE SAME thing that you just said from your recent chat history, start the message different each time, and be organic, non reptitive. - -# Instructions: Write the next message for {{agentName}}. Include the "NONE" action only, as the only valid action for auto-posts is "NONE". -` + messageCompletionFooter; - -export const telegramPinnedMessageTemplate = - `# Action Examples -NONE: Respond but perform no additional action. This is the default if the agent is speaking and not doing anything additional. - -# Task: Generate pinned message highlight as {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -Examples of {{agentName}}'s dialog and actions: -{{characterMessageExamples}} - -{{messageDirections}} - -# Pinned Content: -{{pinnedMessageContent}} - -# Instructions: Write an exciting message to bring attention to the pinned message. Requirements: -- Reference the message that was pinned from the pinned content -- Create genuine excitement if needed based on the pinned content, or create genuice urgency depending on the content -- Encourage community participation -- If there are links like Twitter/X posts, encourage users to like/retweet/comment to spread awarenress, but directly say that, wrap that into the post so its natural. -- Stay within announced facts only -- No additional promises or assumptions -- No team member mentions -- Start the message differently each time. Don't start with the same word like "hey", "hey hey", etc. be dynamic -- Address everyone, not as a direct reply to whoever pinned the message or wrote it, but you can reference them -- Maximum 3-7 lines formatted nicely if needed, based on the context of the announcement -- Use 1-2 emojis maximum - -# Instructions: Write the next message for {{agentName}}. Include an action, if appropriate. The only valid action for pinned message highlights is "NONE". -` + messageCompletionFooter; \ No newline at end of file diff --git a/packages/client-telegram/src/utils.ts b/packages/client-telegram/src/utils.ts deleted file mode 100644 index 0232eae931e37..0000000000000 --- a/packages/client-telegram/src/utils.ts +++ /dev/null @@ -1,138 +0,0 @@ -export function cosineSimilarity( - text1: string, - text2: string, - text3?: string -): number { - const preprocessText = (text: string) => - text - .toLowerCase() - .replace(/[^\w\s'_-]/g, " ") - .replace(/\s+/g, " ") - .trim(); - - const getWords = (text: string) => { - return text.split(" ").filter((word) => word.length > 1); - }; - - const words1 = getWords(preprocessText(text1)); - const words2 = getWords(preprocessText(text2)); - const words3 = text3 ? getWords(preprocessText(text3)) : []; - - const freq1: { [key: string]: number } = {}; - const freq2: { [key: string]: number } = {}; - const freq3: { [key: string]: number } = {}; - - words1.forEach((word) => (freq1[word] = (freq1[word] || 0) + 1)); - words2.forEach((word) => (freq2[word] = (freq2[word] || 0) + 1)); - if (words3.length) { - words3.forEach((word) => (freq3[word] = (freq3[word] || 0) + 1)); - } - - const uniqueWords = new Set([ - ...Object.keys(freq1), - ...Object.keys(freq2), - ...(words3.length ? Object.keys(freq3) : []), - ]); - - let dotProduct = 0; - let magnitude1 = 0; - let magnitude2 = 0; - let magnitude3 = 0; - - uniqueWords.forEach((word) => { - const val1 = freq1[word] || 0; - const val2 = freq2[word] || 0; - const val3 = freq3[word] || 0; - - if (words3.length) { - // For three-way, calculate pairwise similarities - const sim12 = val1 * val2; - const sim23 = val2 * val3; - const sim13 = val1 * val3; - - // Take maximum similarity between any pair - dotProduct += Math.max(sim12, sim23, sim13); - } else { - dotProduct += val1 * val2; - } - - magnitude1 += val1 * val1; - magnitude2 += val2 * val2; - if (words3.length) { - magnitude3 += val3 * val3; - } - }); - - magnitude1 = Math.sqrt(magnitude1); - magnitude2 = Math.sqrt(magnitude2); - magnitude3 = words3.length ? Math.sqrt(magnitude3) : 1; - - if ( - magnitude1 === 0 || - magnitude2 === 0 || - (words3.length && magnitude3 === 0) - ) - return 0; - - // For two texts, use original calculation - if (!words3.length) { - return dotProduct / (magnitude1 * magnitude2); - } - - // For three texts, use max magnitude pair to maintain scale - const maxMagnitude = Math.max( - magnitude1 * magnitude2, - magnitude2 * magnitude3, - magnitude1 * magnitude3 - ); - - return dotProduct / maxMagnitude; -} - -export function escapeMarkdown(text: string): string { - // Don't escape if it's a code block - if (text.startsWith("```") && text.endsWith("```")) { - return text; - } - - // Split the text by code blocks - const parts = text.split(/(```[\s\S]*?```)/g); - - return parts - .map((part, index) => { - // If it's a code block (odd indices in the split result will be code blocks) - if (index % 2 === 1) { - return part; - } - // For regular text, only escape characters that need escaping in Markdown - return ( - part - // First preserve any intended inline code spans - .replace(/`.*?`/g, (match) => match) - // Then only escape the minimal set of special characters that need escaping in Markdown mode - .replace(/([*_`\\])/g, "\\$1") - ); - }) - .join(""); -} - -/** - * Splits a message into chunks that fit within Telegram's message length limit - */ -export function splitMessage(text: string, maxLength = 4096): string[] { - const chunks: string[] = []; - let currentChunk = ""; - - const lines = text.split("\n"); - for (const line of lines) { - if (currentChunk.length + line.length + 1 <= maxLength) { - currentChunk += (currentChunk ? "\n" : "") + line; - } else { - if (currentChunk) chunks.push(currentChunk); - currentChunk = line; - } - } - - if (currentChunk) chunks.push(currentChunk); - return chunks; -} diff --git a/packages/client-telegram/tsconfig.json b/packages/client-telegram/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/client-telegram/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-telegram/tsup.config.ts b/packages/client-telegram/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/client-telegram/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-telegram/vitest.config.ts b/packages/client-telegram/vitest.config.ts deleted file mode 100644 index c1c40d7ffd09a..0000000000000 --- a/packages/client-telegram/vitest.config.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { defineConfig } from "vitest/config"; - -export default defineConfig({ - test: { - globals: true, - environment: "node", - include: ["__tests__/**/*.test.ts"], - coverage: { - reporter: ["text", "json", "html"], - }, - }, -}); diff --git a/packages/client-twitter/.npmignore b/packages/client-twitter/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-twitter/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-twitter/__tests__/base.test.ts b/packages/client-twitter/__tests__/base.test.ts deleted file mode 100644 index e42c7dabc08e5..0000000000000 --- a/packages/client-twitter/__tests__/base.test.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { ClientBase } from '../src/base'; -import type { IAgentRuntime } from '@elizaos/core'; -import type { TwitterConfig } from '../src/environment'; - -describe('Twitter Client Base', () => { - let mockRuntime: IAgentRuntime; - let mockConfig: TwitterConfig; - - beforeEach(() => { - mockRuntime = { - env: { - TWITTER_USERNAME: 'testuser', - TWITTER_DRY_RUN: 'true', - TWITTER_POST_INTERVAL_MIN: '5', - TWITTER_POST_INTERVAL_MAX: '10', - TWITTER_ACTION_INTERVAL: '5', - TWITTER_ENABLE_ACTION_PROCESSING: 'true', - TWITTER_POST_IMMEDIATELY: 'false', - TWITTER_SEARCH_ENABLE: 'false' - }, - getEnv: function (key: string) { - return this.env[key] || null; - }, - getSetting: function (key: string) { - return this.env[key] || null; - }, - character: { - style: { - all: ['Test style 1', 'Test style 2'], - post: ['Post style 1', 'Post style 2'] - } - } - } as unknown as IAgentRuntime; - - mockConfig = { - TWITTER_USERNAME: 'testuser', - TWITTER_DRY_RUN: true, - TWITTER_SEARCH_ENABLE: false, - TWITTER_SPACES_ENABLE: false, - TWITTER_TARGET_USERS: [], - TWITTER_MAX_TWEETS_PER_DAY: 10, - TWITTER_MAX_TWEET_LENGTH: 280, - POST_INTERVAL_MIN: 5, - POST_INTERVAL_MAX: 10, - ACTION_INTERVAL: 5, - ENABLE_ACTION_PROCESSING: true, - POST_IMMEDIATELY: false - }; - }); - - it('should create instance with correct configuration', () => { - const client = new ClientBase(mockRuntime, mockConfig); - expect(client).toBeDefined(); - expect(client.twitterConfig).toBeDefined(); - expect(client.twitterConfig.TWITTER_USERNAME).toBe('testuser'); - expect(client.twitterConfig.TWITTER_DRY_RUN).toBe(true); - }); - - it('should initialize with correct tweet length limit', () => { - const client = new ClientBase(mockRuntime, mockConfig); - expect(client.twitterConfig.TWITTER_MAX_TWEET_LENGTH).toBe(280); - }); - - it('should initialize with correct post intervals', () => { - const client = new ClientBase(mockRuntime, mockConfig); - expect(client.twitterConfig.POST_INTERVAL_MIN).toBe(5); - expect(client.twitterConfig.POST_INTERVAL_MAX).toBe(10); - }); - - it('should initialize with correct action settings', () => { - const client = new ClientBase(mockRuntime, mockConfig); - expect(client.twitterConfig.ACTION_INTERVAL).toBe(5); - expect(client.twitterConfig.ENABLE_ACTION_PROCESSING).toBe(true); - }); -}); diff --git a/packages/client-twitter/__tests__/environment.test.ts b/packages/client-twitter/__tests__/environment.test.ts deleted file mode 100644 index 16c1f2f22d95a..0000000000000 --- a/packages/client-twitter/__tests__/environment.test.ts +++ /dev/null @@ -1,134 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { validateTwitterConfig } from '../src/environment'; -import type { IAgentRuntime } from '@elizaos/core'; - -describe('Twitter Environment Configuration', () => { - const mockRuntime: IAgentRuntime = { - env: { - TWITTER_USERNAME: 'testuser123', - TWITTER_DRY_RUN: 'true', - TWITTER_SEARCH_ENABLE: 'false', - TWITTER_SPACES_ENABLE: 'false', - TWITTER_TARGET_USERS: 'user1,user2,user3', - TWITTER_MAX_TWEETS_PER_DAY: '10', - TWITTER_MAX_TWEET_LENGTH: '280', - TWITTER_POST_INTERVAL_MIN: '90', - TWITTER_POST_INTERVAL_MAX: '180', - TWITTER_ACTION_INTERVAL: '5', - TWITTER_ENABLE_ACTION_PROCESSING: 'false', - TWITTER_POST_IMMEDIATELY: 'false', - TWITTER_EMAIL: 'test@example.com', - TWITTER_PASSWORD: 'hashedpassword', - TWITTER_2FA_SECRET: '', - TWITTER_POLL_INTERVAL: '120', - TWITTER_RETRY_LIMIT: '5', - ACTION_TIMELINE_TYPE: 'foryou', - MAX_ACTIONS_PROCESSING: '1', - MAX_TWEET_LENGTH: '280' - }, - getEnv: function (key: string) { - return this.env[key] || null; - }, - getSetting: function (key: string) { - return this.env[key] || null; - } - } as unknown as IAgentRuntime; - - it('should validate correct configuration', async () => { - const config = await validateTwitterConfig(mockRuntime); - expect(config).toBeDefined(); - expect(config.TWITTER_USERNAME).toBe('testuser123'); - expect(config.TWITTER_DRY_RUN).toBe(true); - expect(config.TWITTER_SEARCH_ENABLE).toBe(false); - expect(config.TWITTER_SPACES_ENABLE).toBe(false); - expect(config.TWITTER_TARGET_USERS).toEqual(['user1', 'user2', 'user3']); - expect(config.MAX_TWEET_LENGTH).toBe(280); - expect(config.POST_INTERVAL_MIN).toBe(90); - expect(config.POST_INTERVAL_MAX).toBe(180); - expect(config.ACTION_INTERVAL).toBe(5); - expect(config.ENABLE_ACTION_PROCESSING).toBe(false); - expect(config.POST_IMMEDIATELY).toBe(false); - }); - - it('should validate wildcard username', async () => { - const wildcardRuntime = { - ...mockRuntime, - env: { - ...mockRuntime.env, - TWITTER_USERNAME: '*' - }, - getEnv: function(key: string) { - return this.env[key] || null; - }, - getSetting: function(key: string) { - return this.env[key] || null; - } - } as IAgentRuntime; - - const config = await validateTwitterConfig(wildcardRuntime); - expect(config.TWITTER_USERNAME).toBe('*'); - }); - - it('should validate username with numbers and underscores', async () => { - const validRuntime = { - ...mockRuntime, - env: { - ...mockRuntime.env, - TWITTER_USERNAME: 'test_user_123' - }, - getEnv: function(key: string) { - return this.env[key] || null; - }, - getSetting: function(key: string) { - return this.env[key] || null; - } - } as IAgentRuntime; - - const config = await validateTwitterConfig(validRuntime); - expect(config.TWITTER_USERNAME).toBe('test_user_123'); - }); - - it('should handle empty target users', async () => { - const runtimeWithoutTargets = { - ...mockRuntime, - env: { - ...mockRuntime.env, - TWITTER_TARGET_USERS: '' - }, - getEnv: function(key: string) { - return this.env[key] || null; - }, - getSetting: function(key: string) { - return this.env[key] || null; - } - } as IAgentRuntime; - - const config = await validateTwitterConfig(runtimeWithoutTargets); - expect(config.TWITTER_TARGET_USERS).toHaveLength(0); - }); - - it('should use default values when optional configs are missing', async () => { - const minimalRuntime = { - env: { - TWITTER_USERNAME: 'testuser', - TWITTER_DRY_RUN: 'true', - TWITTER_EMAIL: 'test@example.com', - TWITTER_PASSWORD: 'hashedpassword', - TWITTER_2FA_SECRET: '', - MAX_TWEET_LENGTH: '280' - }, - getEnv: function (key: string) { - return this.env[key] || null; - }, - getSetting: function (key: string) { - return this.env[key] || null; - } - } as unknown as IAgentRuntime; - - const config = await validateTwitterConfig(minimalRuntime); - expect(config).toBeDefined(); - expect(config.MAX_TWEET_LENGTH).toBe(280); - expect(config.POST_INTERVAL_MIN).toBe(90); - expect(config.POST_INTERVAL_MAX).toBe(180); - }); -}); diff --git a/packages/client-twitter/__tests__/post.test.ts b/packages/client-twitter/__tests__/post.test.ts deleted file mode 100644 index db8aae69df5e0..0000000000000 --- a/packages/client-twitter/__tests__/post.test.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { TwitterPostClient } from '../src/post'; -import { ClientBase } from '../src/base'; -import type { IAgentRuntime } from '@elizaos/core'; -import type { TwitterConfig } from '../src/environment'; - -describe('Twitter Post Client', () => { - let mockRuntime: IAgentRuntime; - let mockConfig: TwitterConfig; - let baseClient: ClientBase; - - beforeEach(() => { - mockRuntime = { - env: { - TWITTER_USERNAME: 'testuser', - TWITTER_DRY_RUN: 'true', - TWITTER_POST_INTERVAL_MIN: '5', - TWITTER_POST_INTERVAL_MAX: '10', - TWITTER_ACTION_INTERVAL: '5', - TWITTER_ENABLE_ACTION_PROCESSING: 'true', - TWITTER_POST_IMMEDIATELY: 'false', - TWITTER_SEARCH_ENABLE: 'false', - TWITTER_EMAIL: 'test@example.com', - TWITTER_PASSWORD: 'hashedpassword', - TWITTER_2FA_SECRET: '', - TWITTER_POLL_INTERVAL: '120', - TWITTER_RETRY_LIMIT: '5', - ACTION_TIMELINE_TYPE: 'foryou', - MAX_ACTIONS_PROCESSING: '1', - MAX_TWEET_LENGTH: '280' - }, - getEnv: function (key: string) { - return this.env[key] || null; - }, - getSetting: function (key: string) { - return this.env[key] || null; - }, - character: { - style: { - all: ['Test style 1', 'Test style 2'], - post: ['Post style 1', 'Post style 2'] - } - } - } as unknown as IAgentRuntime; - - mockConfig = { - TWITTER_USERNAME: 'testuser', - TWITTER_DRY_RUN: true, - TWITTER_SEARCH_ENABLE: false, - TWITTER_SPACES_ENABLE: false, - TWITTER_TARGET_USERS: [], - TWITTER_MAX_TWEETS_PER_DAY: 10, - TWITTER_MAX_TWEET_LENGTH: 280, - POST_INTERVAL_MIN: 5, - POST_INTERVAL_MAX: 10, - ACTION_INTERVAL: 5, - ENABLE_ACTION_PROCESSING: true, - POST_IMMEDIATELY: false, - MAX_TWEET_LENGTH: 280 - }; - - baseClient = new ClientBase(mockRuntime, mockConfig); - }); - - it('should create post client instance', () => { - const postClient = new TwitterPostClient(baseClient, mockRuntime); - expect(postClient).toBeDefined(); - expect(postClient.twitterUsername).toBe('testuser'); - expect(postClient['isDryRun']).toBe(true); - }); - - it('should keep tweets under max length when already valid', () => { - const postClient = new TwitterPostClient(baseClient, mockRuntime); - const validTweet = 'This is a valid tweet'; - const result = postClient['trimTweetLength'](validTweet); - expect(result).toBe(validTweet); - expect(result.length).toBeLessThanOrEqual(280); - }); - - it('should cut at last sentence when possible', () => { - const postClient = new TwitterPostClient(baseClient, mockRuntime); - const longTweet = 'First sentence. Second sentence that is quite long. Third sentence that would make it too long.'; - const result = postClient['trimTweetLength'](longTweet); - const lastPeriod = result.lastIndexOf('.'); - expect(lastPeriod).toBeGreaterThan(0); - expect(result.length).toBeLessThanOrEqual(280); - }); - - it('should add ellipsis when cutting within a sentence', () => { - const postClient = new TwitterPostClient(baseClient, mockRuntime); - const longSentence = 'This is an extremely long sentence without any periods that needs to be truncated because it exceeds the maximum allowed length for a tweet on the Twitter platform and therefore must be shortened'; - const result = postClient['trimTweetLength'](longSentence); - const lastSpace = result.lastIndexOf(' '); - expect(lastSpace).toBeGreaterThan(0); - expect(result.length).toBeLessThanOrEqual(280); - }); -}); diff --git a/packages/client-twitter/package.json b/packages/client-twitter/package.json deleted file mode 100644 index defe0c89e7774..0000000000000 --- a/packages/client-twitter/package.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "name": "@elizaos/client-twitter", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "agent-twitter-client": "0.0.18", - "discord.js": "14.16.3", - "glob": "11.0.0" - }, - "devDependencies": { - "@vitest/coverage-v8": "1.1.3", - "tsup": "8.3.5", - "vitest": "1.6.1" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:coverage": "vitest run --coverage" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/client-twitter/src/__tests__/environment.test.ts b/packages/client-twitter/src/__tests__/environment.test.ts deleted file mode 100644 index 5b6c14ffb7a12..0000000000000 --- a/packages/client-twitter/src/__tests__/environment.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { twitterEnvSchema } from "../environment"; - -describe("Twitter Environment Configuration", () => { - describe("Username Validation", () => { - const validateUsername = (username: string) => { - return twitterEnvSchema.parse({ - TWITTER_DRY_RUN: false, - TWITTER_USERNAME: "test_user", - TWITTER_PASSWORD: "password", - TWITTER_EMAIL: "test@example.com", - TWITTER_2FA_SECRET: "", - TWITTER_RETRY_LIMIT: 5, - TWITTER_POLL_INTERVAL: 120, - POST_INTERVAL_MIN: 90, - POST_INTERVAL_MAX: 180, - ENABLE_ACTION_PROCESSING: false, - ACTION_INTERVAL: 5, - POST_IMMEDIATELY: false, - TWITTER_TARGET_USERS: [username], - }); - }; - - it("should allow valid traditional usernames", () => { - expect(() => validateUsername("normal_user")).not.toThrow(); - expect(() => validateUsername("user123")).not.toThrow(); - expect(() => validateUsername("a_1_b_2")).not.toThrow(); - }); - - it("should allow usernames starting with digits", () => { - expect(() => validateUsername("123user")).not.toThrow(); - expect(() => validateUsername("42_test")).not.toThrow(); - expect(() => validateUsername("007james")).not.toThrow(); - }); - - it("should allow wildcard", () => { - expect(() => validateUsername("*")).not.toThrow(); - }); - - it("should reject invalid usernames", () => { - expect(() => validateUsername("")).toThrow(); - expect(() => validateUsername("user@123")).toThrow(); - expect(() => validateUsername("user-123")).toThrow(); - expect(() => validateUsername("user.123")).toThrow(); - expect(() => validateUsername("a".repeat(16))).toThrow(); - }); - - it("should handle array of usernames", () => { - const config = { - TWITTER_DRY_RUN: false, - TWITTER_USERNAME: "test_user", - TWITTER_PASSWORD: "password", - TWITTER_EMAIL: "test@example.com", - TWITTER_2FA_SECRET: "", - TWITTER_RETRY_LIMIT: 5, - TWITTER_POLL_INTERVAL: 120, - POST_INTERVAL_MIN: 90, - POST_INTERVAL_MAX: 180, - ENABLE_ACTION_PROCESSING: false, - ACTION_INTERVAL: 5, - POST_IMMEDIATELY: false, - TWITTER_TARGET_USERS: ["normal_user", "123digit", "*"], - }; - - expect(() => twitterEnvSchema.parse(config)).not.toThrow(); - }); - }); -}); diff --git a/packages/client-twitter/src/base.ts b/packages/client-twitter/src/base.ts deleted file mode 100644 index ce5ad336620c4..0000000000000 --- a/packages/client-twitter/src/base.ts +++ /dev/null @@ -1,802 +0,0 @@ -import { - type Content, - type IAgentRuntime, - type IImageDescriptionService, - type Memory, - type State, - type UUID, - getEmbeddingZeroVector, - elizaLogger, - stringToUuid, - ActionTimelineType, -} from "@elizaos/core"; -import { - type QueryTweetsResponse, - Scraper, - SearchMode, - type Tweet, -} from "agent-twitter-client"; -import { EventEmitter } from "events"; -import type { TwitterConfig } from "./environment.ts"; - -export function extractAnswer(text: string): string { - const startIndex = text.indexOf("Answer: ") + 8; - const endIndex = text.indexOf("<|endoftext|>", 11); - return text.slice(startIndex, endIndex); -} - -type TwitterProfile = { - id: string; - username: string; - screenName: string; - bio: string; - nicknames: string[]; -}; - -class RequestQueue { - private queue: (() => Promise)[] = []; - private processing = false; - - async add(request: () => Promise): Promise { - return new Promise((resolve, reject) => { - this.queue.push(async () => { - try { - const result = await request(); - resolve(result); - } catch (error) { - reject(error); - } - }); - this.processQueue(); - }); - } - - private async processQueue(): Promise { - if (this.processing || this.queue.length === 0) { - return; - } - this.processing = true; - - while (this.queue.length > 0) { - const request = this.queue.shift()!; - try { - await request(); - } catch (error) { - console.error("Error processing request:", error); - this.queue.unshift(request); - await this.exponentialBackoff(this.queue.length); - } - await this.randomDelay(); - } - - this.processing = false; - } - - private async exponentialBackoff(retryCount: number): Promise { - const delay = Math.pow(2, retryCount) * 1000; - await new Promise((resolve) => setTimeout(resolve, delay)); - } - - private async randomDelay(): Promise { - const delay = Math.floor(Math.random() * 2000) + 1500; - await new Promise((resolve) => setTimeout(resolve, delay)); - } -} - -export class ClientBase extends EventEmitter { - static _twitterClients: { [accountIdentifier: string]: Scraper } = {}; - twitterClient: Scraper; - runtime: IAgentRuntime; - twitterConfig: TwitterConfig; - directions: string; - lastCheckedTweetId: bigint | null = null; - imageDescriptionService: IImageDescriptionService; - temperature = 0.5; - - requestQueue: RequestQueue = new RequestQueue(); - - profile: TwitterProfile | null; - - async cacheTweet(tweet: Tweet): Promise { - if (!tweet) { - console.warn("Tweet is undefined, skipping cache"); - return; - } - - this.runtime.cacheManager.set(`twitter/tweets/${tweet.id}`, tweet); - } - - async getCachedTweet(tweetId: string): Promise { - const cached = await this.runtime.cacheManager.get( - `twitter/tweets/${tweetId}` - ); - - return cached; - } - - async getTweet(tweetId: string): Promise { - const cachedTweet = await this.getCachedTweet(tweetId); - - if (cachedTweet) { - return cachedTweet; - } - - const tweet = await this.requestQueue.add(() => - this.twitterClient.getTweet(tweetId) - ); - - await this.cacheTweet(tweet); - return tweet; - } - - callback: (self: ClientBase) => any = null; - - onReady() { - throw new Error( - "Not implemented in base class, please call from subclass" - ); - } - - /** - * Parse the raw tweet data into a standardized Tweet object. - */ - private parseTweet(raw: any, depth = 0, maxDepth = 3): Tweet { - // If we've reached maxDepth, don't parse nested quotes/retweets further - const canRecurse = depth < maxDepth; - - const quotedStatus = raw.quoted_status_result?.result && canRecurse - ? this.parseTweet(raw.quoted_status_result.result, depth + 1, maxDepth) - : undefined; - - const retweetedStatus = raw.retweeted_status_result?.result && canRecurse - ? this.parseTweet(raw.retweeted_status_result.result, depth + 1, maxDepth) - : undefined; - - const t: Tweet = { - bookmarkCount: - raw.bookmarkCount ?? raw.legacy?.bookmark_count ?? undefined, - conversationId: - raw.conversationId ?? raw.legacy?.conversation_id_str, - hashtags: raw.hashtags ?? raw.legacy?.entities?.hashtags ?? [], - html: raw.html, - id: raw.id ?? raw.rest_id ?? raw.id_str ?? undefined, - inReplyToStatus: raw.inReplyToStatus, - inReplyToStatusId: - raw.inReplyToStatusId ?? - raw.legacy?.in_reply_to_status_id_str ?? - undefined, - isQuoted: raw.legacy?.is_quote_status === true, - isPin: raw.isPin, - isReply: raw.isReply, - isRetweet: raw.legacy?.retweeted === true, - isSelfThread: raw.isSelfThread, - language: raw.legacy?.lang, - likes: raw.legacy?.favorite_count ?? 0, - name: - raw.name ?? - raw?.user_results?.result?.legacy?.name ?? - raw.core?.user_results?.result?.legacy?.name, - mentions: raw.mentions ?? raw.legacy?.entities?.user_mentions ?? [], - permanentUrl: - raw.permanentUrl ?? - (raw.core?.user_results?.result?.legacy?.screen_name && - raw.rest_id - ? `https://x.com/${raw.core?.user_results?.result?.legacy?.screen_name}/status/${raw.rest_id}` - : undefined), - photos: - raw.photos ?? - (raw.legacy?.entities?.media - ?.filter((media: any) => media.type === "photo") - .map((media: any) => ({ - id: media.id_str, - url: media.media_url_https, - alt_text: media.alt_text, - })) || []), - place: raw.place, - poll: raw.poll ?? null, - quotedStatus, - quotedStatusId: - raw.quotedStatusId ?? raw.legacy?.quoted_status_id_str ?? undefined, - quotes: raw.legacy?.quote_count ?? 0, - replies: raw.legacy?.reply_count ?? 0, - retweets: raw.legacy?.retweet_count ?? 0, - retweetedStatus, - retweetedStatusId: raw.legacy?.retweeted_status_id_str ?? undefined, - text: raw.text ?? raw.legacy?.full_text ?? undefined, - thread: raw.thread || [], - timeParsed: raw.timeParsed - ? new Date(raw.timeParsed) - : raw.legacy?.created_at - ? new Date(raw.legacy?.created_at) - : undefined, - timestamp: - raw.timestamp ?? - (raw.legacy?.created_at - ? new Date(raw.legacy.created_at).getTime() / 1000 - : undefined), - urls: raw.urls ?? raw.legacy?.entities?.urls ?? [], - userId: raw.userId ?? raw.legacy?.user_id_str ?? undefined, - username: - raw.username ?? - raw.core?.user_results?.result?.legacy?.screen_name ?? - undefined, - videos: - raw.videos ?? - (raw.legacy?.entities?.media - ?.filter((media: any) => media.type === "video") ?? []), - views: raw.views?.count ? Number(raw.views.count) : 0, - sensitiveContent: raw.sensitiveContent, - }; - - return t; - } - - constructor(runtime: IAgentRuntime, twitterConfig: TwitterConfig) { - super(); - this.runtime = runtime; - this.twitterConfig = twitterConfig; - const username = twitterConfig.TWITTER_USERNAME; - if (ClientBase._twitterClients[username]) { - this.twitterClient = ClientBase._twitterClients[username]; - } else { - this.twitterClient = new Scraper(); - ClientBase._twitterClients[username] = this.twitterClient; - } - - this.directions = - "- " + - this.runtime.character.style.all.join("\n- ") + - "- " + - this.runtime.character.style.post.join(); - } - - async init() { - const username = this.twitterConfig.TWITTER_USERNAME; - const password = this.twitterConfig.TWITTER_PASSWORD; - const email = this.twitterConfig.TWITTER_EMAIL; - let retries = this.twitterConfig.TWITTER_RETRY_LIMIT; - const twitter2faSecret = this.twitterConfig.TWITTER_2FA_SECRET; - - if (!username) { - throw new Error("Twitter username not configured"); - } - - const authToken = this.runtime.getSetting("TWITTER_COOKIES_AUTH_TOKEN"); - const ct0 = this.runtime.getSetting("TWITTER_COOKIES_CT0"); - const guestId = this.runtime.getSetting("TWITTER_COOKIES_GUEST_ID"); - - const createTwitterCookies = (authToken: string, ct0: string, guestId: string) => - authToken && ct0 && guestId - ? [ - { key: 'auth_token', value: authToken, domain: '.twitter.com' }, - { key: 'ct0', value: ct0, domain: '.twitter.com' }, - { key: 'guest_id', value: guestId, domain: '.twitter.com' }, - ] - : null; - - const cachedCookies = await this.getCachedCookies(username) || createTwitterCookies(authToken, ct0, guestId); - - if (cachedCookies) { - elizaLogger.info("Using cached cookies"); - await this.setCookiesFromArray(cachedCookies); - } - - elizaLogger.log("Waiting for Twitter login"); - while (retries > 0) { - try { - if (await this.twitterClient.isLoggedIn()) { - // cookies are valid, no login required - elizaLogger.info("Successfully logged in."); - break; - } else { - await this.twitterClient.login( - username, - password, - email, - twitter2faSecret - ); - if (await this.twitterClient.isLoggedIn()) { - // fresh login, store new cookies - elizaLogger.info("Successfully logged in."); - elizaLogger.info("Caching cookies"); - await this.cacheCookies( - username, - await this.twitterClient.getCookies() - ); - break; - } - } - } catch (error) { - elizaLogger.error(`Login attempt failed: ${error.message}`); - } - - retries--; - elizaLogger.error( - `Failed to login to Twitter. Retrying... (${retries} attempts left)` - ); - - if (retries === 0) { - elizaLogger.error( - "Max retries reached. Exiting login process." - ); - throw new Error("Twitter login failed after maximum retries."); - } - - await new Promise((resolve) => setTimeout(resolve, 2000)); - } - // Initialize Twitter profile - this.profile = await this.fetchProfile(username); - - if (this.profile) { - elizaLogger.log("Twitter user ID:", this.profile.id); - elizaLogger.log( - "Twitter loaded:", - JSON.stringify(this.profile, null, 10) - ); - // Store profile info for use in responses - this.runtime.character.twitterProfile = { - id: this.profile.id, - username: this.profile.username, - screenName: this.profile.screenName, - bio: this.profile.bio, - nicknames: this.profile.nicknames, - }; - } else { - throw new Error("Failed to load profile"); - } - - await this.loadLatestCheckedTweetId(); - await this.populateTimeline(); - } - - async fetchOwnPosts(count: number): Promise { - elizaLogger.debug("fetching own posts"); - const homeTimeline = await this.twitterClient.getUserTweets( - this.profile.id, - count - ); - // Use parseTweet on each tweet - return homeTimeline.tweets.map((t) => this.parseTweet(t)); - } - - /** - * Fetch timeline for twitter account, optionally only from followed accounts - */ - async fetchHomeTimeline( - count: number, - following?: boolean - ): Promise { - elizaLogger.debug("fetching home timeline"); - const homeTimeline = following - ? await this.twitterClient.fetchFollowingTimeline(count, []) - : await this.twitterClient.fetchHomeTimeline(count, []); - - elizaLogger.debug(homeTimeline, { depth: Number.POSITIVE_INFINITY }); - const processedTimeline = homeTimeline - .filter((t) => t.__typename !== "TweetWithVisibilityResults") // what's this about? - .map((tweet) => this.parseTweet(tweet)); - - //elizaLogger.debug("process homeTimeline", processedTimeline); - return processedTimeline; - } - - async fetchTimelineForActions(count: number): Promise { - elizaLogger.debug("fetching timeline for actions"); - - const agentUsername = this.twitterConfig.TWITTER_USERNAME; - - const homeTimeline = - this.twitterConfig.ACTION_TIMELINE_TYPE === - ActionTimelineType.Following - ? await this.twitterClient.fetchFollowingTimeline(count, []) - : await this.twitterClient.fetchHomeTimeline(count, []); - - // Parse, filter out self-tweets, limit to count - return homeTimeline - .map((tweet) => this.parseTweet(tweet)) - .filter((tweet) => tweet.username !== agentUsername) // do not perform action on self-tweets - .slice(0, count); - // TODO: Once the 'count' parameter is fixed in the 'fetchTimeline' method of the 'agent-twitter-client', - // this workaround can be removed. - // Related issue: https://github.com/elizaos/agent-twitter-client/issues/43 - } - - async fetchSearchTweets( - query: string, - maxTweets: number, - searchMode: SearchMode, - cursor?: string - ): Promise { - try { - // Sometimes this fails because we are rate limited. in this case, we just need to return an empty array - // if we dont get a response in 5 seconds, something is wrong - const timeoutPromise = new Promise((resolve) => - setTimeout(() => resolve({ tweets: [] }), 15000) - ); - - try { - const result = await this.requestQueue.add( - async () => - await Promise.race([ - this.twitterClient.fetchSearchTweets( - query, - maxTweets, - searchMode, - cursor - ), - timeoutPromise, - ]) - ); - return (result ?? { tweets: [] }) as QueryTweetsResponse; - } catch (error) { - elizaLogger.error("Error fetching search tweets:", error); - return { tweets: [] }; - } - } catch (error) { - elizaLogger.error("Error fetching search tweets:", error); - return { tweets: [] }; - } - } - - private async populateTimeline() { - elizaLogger.debug("populating timeline..."); - - const cachedTimeline = await this.getCachedTimeline(); - - // Check if the cache file exists - if (cachedTimeline) { - // Read the cached search results from the file - - // Get the existing memories from the database - const existingMemories = - await this.runtime.messageManager.getMemoriesByRoomIds({ - roomIds: cachedTimeline.map((tweet) => - stringToUuid( - tweet.conversationId + "-" + this.runtime.agentId - ) - ), - }); - - //TODO: load tweets not in cache? - - // Create a Set to store the IDs of existing memories - const existingMemoryIds = new Set( - existingMemories.map((memory) => memory.id.toString()) - ); - - // Check if any of the cached tweets exist in the existing memories - const someCachedTweetsExist = cachedTimeline.some((tweet) => - existingMemoryIds.has( - stringToUuid(tweet.id + "-" + this.runtime.agentId) - ) - ); - - if (someCachedTweetsExist) { - // Filter out the cached tweets that already exist in the database - const tweetsToSave = cachedTimeline.filter( - (tweet) => - !existingMemoryIds.has( - stringToUuid(tweet.id + "-" + this.runtime.agentId) - ) - ); - - console.log({ - processingTweets: tweetsToSave - .map((tweet) => tweet.id) - .join(","), - }); - - // Save the missing tweets as memories - for (const tweet of tweetsToSave) { - elizaLogger.log("Saving Tweet", tweet.id); - - const roomId = stringToUuid( - tweet.conversationId + "-" + this.runtime.agentId - ); - - const userId = - tweet.userId === this.profile.id - ? this.runtime.agentId - : stringToUuid(tweet.userId); - - if (tweet.userId === this.profile.id) { - await this.runtime.ensureConnection( - this.runtime.agentId, - roomId, - this.profile.username, - this.profile.screenName, - "twitter" - ); - } else { - await this.runtime.ensureConnection( - userId, - roomId, - tweet.username, - tweet.name, - "twitter" - ); - } - - const content = { - text: tweet.text, - url: tweet.permanentUrl, - source: "twitter", - inReplyTo: tweet.inReplyToStatusId - ? stringToUuid( - tweet.inReplyToStatusId + - "-" + - this.runtime.agentId - ) - : undefined, - } as Content; - - elizaLogger.log("Creating memory for tweet", tweet.id); - - // check if it already exists - const memory = - await this.runtime.messageManager.getMemoryById( - stringToUuid(tweet.id + "-" + this.runtime.agentId) - ); - - if (memory) { - elizaLogger.log( - "Memory already exists, skipping timeline population" - ); - break; - } - - await this.runtime.messageManager.createMemory({ - id: stringToUuid(tweet.id + "-" + this.runtime.agentId), - userId, - content: content, - agentId: this.runtime.agentId, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: tweet.timestamp * 1000, - }); - - await this.cacheTweet(tweet); - } - - elizaLogger.log( - `Populated ${tweetsToSave.length} missing tweets from the cache.` - ); - return; - } - } - - const timeline = await this.fetchHomeTimeline(cachedTimeline ? 10 : 50); - const username = this.twitterConfig.TWITTER_USERNAME; - - // Get the most recent 20 mentions and interactions - const mentionsAndInteractions = await this.fetchSearchTweets( - `@${username}`, - 20, - SearchMode.Latest - ); - - // Combine the timeline tweets and mentions/interactions - const allTweets = [...timeline, ...mentionsAndInteractions.tweets]; - - // Create a Set to store unique tweet IDs - const tweetIdsToCheck = new Set(); - const roomIds = new Set(); - - // Add tweet IDs to the Set - for (const tweet of allTweets) { - tweetIdsToCheck.add(tweet.id); - roomIds.add( - stringToUuid(tweet.conversationId + "-" + this.runtime.agentId) - ); - } - - // Check the existing memories in the database - const existingMemories = - await this.runtime.messageManager.getMemoriesByRoomIds({ - roomIds: Array.from(roomIds), - }); - - // Create a Set to store the existing memory IDs - const existingMemoryIds = new Set( - existingMemories.map((memory) => memory.id) - ); - - // Filter out the tweets that already exist in the database - const tweetsToSave = allTweets.filter( - (tweet) => - !existingMemoryIds.has( - stringToUuid(tweet.id + "-" + this.runtime.agentId) - ) - ); - - elizaLogger.debug({ - processingTweets: tweetsToSave.map((tweet) => tweet.id).join(","), - }); - - await this.runtime.ensureUserExists( - this.runtime.agentId, - this.profile.username, - this.runtime.character.name, - "twitter" - ); - - // Save the new tweets as memories - for (const tweet of tweetsToSave) { - elizaLogger.log("Saving Tweet", tweet.id); - - const roomId = stringToUuid( - tweet.conversationId + "-" + this.runtime.agentId - ); - const userId = - tweet.userId === this.profile.id - ? this.runtime.agentId - : stringToUuid(tweet.userId); - - if (tweet.userId === this.profile.id) { - await this.runtime.ensureConnection( - this.runtime.agentId, - roomId, - this.profile.username, - this.profile.screenName, - "twitter" - ); - } else { - await this.runtime.ensureConnection( - userId, - roomId, - tweet.username, - tweet.name, - "twitter" - ); - } - - const content = { - text: tweet.text, - url: tweet.permanentUrl, - source: "twitter", - inReplyTo: tweet.inReplyToStatusId - ? stringToUuid(tweet.inReplyToStatusId) - : undefined, - } as Content; - - await this.runtime.messageManager.createMemory({ - id: stringToUuid(tweet.id + "-" + this.runtime.agentId), - userId, - content: content, - agentId: this.runtime.agentId, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: tweet.timestamp * 1000, - }); - - await this.cacheTweet(tweet); - } - - // Cache - await this.cacheTimeline(timeline); - await this.cacheMentions(mentionsAndInteractions.tweets); - } - - async setCookiesFromArray(cookiesArray: any[]) { - const cookieStrings = cookiesArray.map( - (cookie) => - `${cookie.key}=${cookie.value}; Domain=${cookie.domain}; Path=${cookie.path}; ${ - cookie.secure ? "Secure" : "" - }; ${cookie.httpOnly ? "HttpOnly" : ""}; SameSite=${ - cookie.sameSite || "Lax" - }` - ); - await this.twitterClient.setCookies(cookieStrings); - } - - async saveRequestMessage(message: Memory, state: State) { - if (message.content.text) { - const recentMessage = await this.runtime.messageManager.getMemories( - { - roomId: message.roomId, - count: 1, - unique: false, - } - ); - - if ( - recentMessage.length > 0 && - recentMessage[0].content === message.content - ) { - elizaLogger.debug("Message already saved", recentMessage[0].id); - } else { - await this.runtime.messageManager.createMemory({ - ...message, - embedding: getEmbeddingZeroVector(), - }); - } - - await this.runtime.evaluate(message, { - ...state, - twitterClient: this.twitterClient, - }); - } - } - - async loadLatestCheckedTweetId(): Promise { - const latestCheckedTweetId = - await this.runtime.cacheManager.get( - `twitter/${this.profile.username}/latest_checked_tweet_id` - ); - - if (latestCheckedTweetId) { - this.lastCheckedTweetId = BigInt(latestCheckedTweetId); - } - } - - async cacheLatestCheckedTweetId() { - if (this.lastCheckedTweetId) { - await this.runtime.cacheManager.set( - `twitter/${this.profile.username}/latest_checked_tweet_id`, - this.lastCheckedTweetId.toString() - ); - } - } - - async getCachedTimeline(): Promise { - return await this.runtime.cacheManager.get( - `twitter/${this.profile.username}/timeline` - ); - } - - async cacheTimeline(timeline: Tweet[]) { - await this.runtime.cacheManager.set( - `twitter/${this.profile.username}/timeline`, - timeline, - { expires: Date.now() + 10 * 1000 } - ); - } - - async cacheMentions(mentions: Tweet[]) { - await this.runtime.cacheManager.set( - `twitter/${this.profile.username}/mentions`, - mentions, - { expires: Date.now() + 10 * 1000 } - ); - } - - async getCachedCookies(username: string) { - return await this.runtime.cacheManager.get( - `twitter/${username}/cookies` - ); - } - - async cacheCookies(username: string, cookies: any[]) { - await this.runtime.cacheManager.set( - `twitter/${username}/cookies`, - cookies - ); - } - - async fetchProfile(username: string): Promise { - try { - const profile = await this.requestQueue.add(async () => { - const profile = await this.twitterClient.getProfile(username); - return { - id: profile.userId, - username, - screenName: profile.name || this.runtime.character.name, - bio: - profile.biography || - typeof this.runtime.character.bio === "string" - ? (this.runtime.character.bio as string) - : this.runtime.character.bio.length > 0 - ? this.runtime.character.bio[0] - : "", - nicknames: - this.runtime.character.twitterProfile?.nicknames || [], - } satisfies TwitterProfile; - }); - - return profile; - } catch (error) { - console.error("Error fetching Twitter profile:", error); - throw error; - } - } -} diff --git a/packages/client-twitter/src/environment.ts b/packages/client-twitter/src/environment.ts deleted file mode 100644 index a7d23404288b4..0000000000000 --- a/packages/client-twitter/src/environment.ts +++ /dev/null @@ -1,249 +0,0 @@ -import { - parseBooleanFromText, - type IAgentRuntime, - ActionTimelineType, -} from "@elizaos/core"; -import { z, ZodError } from "zod"; - -export const DEFAULT_MAX_TWEET_LENGTH = 280; - -const twitterUsernameSchema = z - .string() - .min(1, "An X/Twitter Username must be at least 1 character long") - .max(15, "An X/Twitter Username cannot exceed 15 characters") - .refine((username) => { - // Allow wildcard '*' as a special case - if (username === "*") return true; - - // Twitter usernames can: - // - Start with digits now - // - Contain letters, numbers, underscores - // - Must not be empty - return /^[A-Za-z0-9_]+$/.test(username); - }, "An X Username can only contain letters, numbers, and underscores"); - -/** - * This schema defines all required/optional environment settings, - * including new fields like TWITTER_SPACES_ENABLE. - */ -export const twitterEnvSchema = z.object({ - TWITTER_DRY_RUN: z.boolean(), - TWITTER_USERNAME: z.string().min(1, "X/Twitter username is required"), - TWITTER_PASSWORD: z.string().min(1, "X/Twitter password is required"), - TWITTER_EMAIL: z.string().email("Valid X/Twitter email is required"), - MAX_TWEET_LENGTH: z.number().int().default(DEFAULT_MAX_TWEET_LENGTH), - TWITTER_SEARCH_ENABLE: z.boolean().default(false), - TWITTER_2FA_SECRET: z.string(), - TWITTER_RETRY_LIMIT: z.number().int(), - TWITTER_POLL_INTERVAL: z.number().int(), - TWITTER_TARGET_USERS: z.array(twitterUsernameSchema).default([]), - // I guess it's possible to do the transformation with zod - // not sure it's preferable, maybe a readability issue - // since more people will know js/ts than zod - /* - z - .string() - .transform((val) => val.trim()) - .pipe( - z.string() - .transform((val) => - val ? val.split(',').map((u) => u.trim()).filter(Boolean) : [] - ) - .pipe( - z.array( - z.string() - .min(1) - .max(15) - .regex( - /^[A-Za-z][A-Za-z0-9_]*[A-Za-z0-9]$|^[A-Za-z]$/, - 'Invalid Twitter username format' - ) - ) - ) - .transform((users) => users.join(',')) - ) - .optional() - .default(''), - */ - ENABLE_TWITTER_POST_GENERATION: z.boolean(), - POST_INTERVAL_MIN: z.number().int(), - POST_INTERVAL_MAX: z.number().int(), - ENABLE_ACTION_PROCESSING: z.boolean(), - ACTION_INTERVAL: z.number().int(), - POST_IMMEDIATELY: z.boolean(), - TWITTER_SPACES_ENABLE: z.boolean().default(false), - MAX_ACTIONS_PROCESSING: z.number().int(), - ACTION_TIMELINE_TYPE: z - .nativeEnum(ActionTimelineType) - .default(ActionTimelineType.ForYou), -}); - -export type TwitterConfig = z.infer; - -/** - * Helper to parse a comma-separated list of Twitter usernames - * (already present in your code). - */ -function parseTargetUsers(targetUsersStr?: string | null): string[] { - if (!targetUsersStr?.trim()) { - return []; - } - return targetUsersStr - .split(",") - .map((user) => user.trim()) - .filter(Boolean); -} - -function safeParseInt( - value: string | undefined | null, - defaultValue: number -): number { - if (!value) return defaultValue; - const parsed = Number.parseInt(value, 10); - return isNaN(parsed) ? defaultValue : Math.max(1, parsed); -} - -/** - * Validates or constructs a TwitterConfig object using zod, - * taking values from the IAgentRuntime or process.env as needed. - */ -// This also is organized to serve as a point of documentation for the client -// most of the inputs from the framework (env/character) - -// we also do a lot of typing/parsing here -// so we can do it once and only once per character -export async function validateTwitterConfig( - runtime: IAgentRuntime -): Promise { - try { - const twitterConfig = { - TWITTER_DRY_RUN: - parseBooleanFromText( - runtime.getSetting("TWITTER_DRY_RUN") || - process.env.TWITTER_DRY_RUN - ) ?? false, // parseBooleanFromText return null if "", map "" to false - - TWITTER_USERNAME: - runtime.getSetting("TWITTER_USERNAME") || - process.env.TWITTER_USERNAME, - - TWITTER_PASSWORD: - runtime.getSetting("TWITTER_PASSWORD") || - process.env.TWITTER_PASSWORD, - - TWITTER_EMAIL: - runtime.getSetting("TWITTER_EMAIL") || - process.env.TWITTER_EMAIL, - - // number as string? - MAX_TWEET_LENGTH: safeParseInt( - runtime.getSetting("MAX_TWEET_LENGTH") || - process.env.MAX_TWEET_LENGTH, - DEFAULT_MAX_TWEET_LENGTH - ), - - TWITTER_SEARCH_ENABLE: - parseBooleanFromText( - runtime.getSetting("TWITTER_SEARCH_ENABLE") || - process.env.TWITTER_SEARCH_ENABLE - ) ?? false, - - // string passthru - TWITTER_2FA_SECRET: - runtime.getSetting("TWITTER_2FA_SECRET") || - process.env.TWITTER_2FA_SECRET || - "", - - // int - TWITTER_RETRY_LIMIT: safeParseInt( - runtime.getSetting("TWITTER_RETRY_LIMIT") || - process.env.TWITTER_RETRY_LIMIT, - 5 - ), - - // int in seconds - TWITTER_POLL_INTERVAL: safeParseInt( - runtime.getSetting("TWITTER_POLL_INTERVAL") || - process.env.TWITTER_POLL_INTERVAL, - 120 // 2m - ), - - // comma separated string - TWITTER_TARGET_USERS: parseTargetUsers( - runtime.getSetting("TWITTER_TARGET_USERS") || - process.env.TWITTER_TARGET_USERS - ), - - // bool - ENABLE_TWITTER_POST_GENERATION: - parseBooleanFromText( - runtime.getSetting("ENABLE_TWITTER_POST_GENERATION") || - process.env.ENABLE_TWITTER_POST_GENERATION - ) ?? true, - - - // int in minutes - POST_INTERVAL_MIN: safeParseInt( - runtime.getSetting("POST_INTERVAL_MIN") || - process.env.POST_INTERVAL_MIN, - 90 // 1.5 hours - ), - - // int in minutes - POST_INTERVAL_MAX: safeParseInt( - runtime.getSetting("POST_INTERVAL_MAX") || - process.env.POST_INTERVAL_MAX, - 180 // 3 hours - ), - - // bool - ENABLE_ACTION_PROCESSING: - parseBooleanFromText( - runtime.getSetting("ENABLE_ACTION_PROCESSING") || - process.env.ENABLE_ACTION_PROCESSING - ) ?? false, - - // init in minutes (min 1m) - ACTION_INTERVAL: safeParseInt( - runtime.getSetting("ACTION_INTERVAL") || - process.env.ACTION_INTERVAL, - 5 // 5 minutes - ), - - // bool - POST_IMMEDIATELY: - parseBooleanFromText( - runtime.getSetting("POST_IMMEDIATELY") || - process.env.POST_IMMEDIATELY - ) ?? false, - - TWITTER_SPACES_ENABLE: - parseBooleanFromText( - runtime.getSetting("TWITTER_SPACES_ENABLE") || - process.env.TWITTER_SPACES_ENABLE - ) ?? false, - - MAX_ACTIONS_PROCESSING: safeParseInt( - runtime.getSetting("MAX_ACTIONS_PROCESSING") || - process.env.MAX_ACTIONS_PROCESSING, - 1 - ), - - ACTION_TIMELINE_TYPE: - runtime.getSetting("ACTION_TIMELINE_TYPE") || - process.env.ACTION_TIMELINE_TYPE, - }; - - return twitterEnvSchema.parse(twitterConfig); - } catch (error) { - if (error instanceof ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `X/Twitter configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/client-twitter/src/index.ts b/packages/client-twitter/src/index.ts deleted file mode 100644 index 52957c1878ba8..0000000000000 --- a/packages/client-twitter/src/index.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { type Client, elizaLogger, type IAgentRuntime } from "@elizaos/core"; -import { ClientBase } from "./base.ts"; -import { validateTwitterConfig, type TwitterConfig } from "./environment.ts"; -import { TwitterInteractionClient } from "./interactions.ts"; -import { TwitterPostClient } from "./post.ts"; -import { TwitterSearchClient } from "./search.ts"; -import { TwitterSpaceClient } from "./spaces.ts"; - -/** - * A manager that orchestrates all specialized Twitter logic: - * - client: base operations (login, timeline caching, etc.) - * - post: autonomous posting logic - * - search: searching tweets / replying logic - * - interaction: handling mentions, replies - * - space: launching and managing Twitter Spaces (optional) - */ -class TwitterManager { - client: ClientBase; - post: TwitterPostClient; - search: TwitterSearchClient; - interaction: TwitterInteractionClient; - space?: TwitterSpaceClient; - - constructor(runtime: IAgentRuntime, twitterConfig: TwitterConfig) { - // Pass twitterConfig to the base client - this.client = new ClientBase(runtime, twitterConfig); - - // Posting logic - this.post = new TwitterPostClient(this.client, runtime); - - // Optional search logic (enabled if TWITTER_SEARCH_ENABLE is true) - if (twitterConfig.TWITTER_SEARCH_ENABLE) { - elizaLogger.warn("Twitter/X client running in a mode that:"); - elizaLogger.warn("1. violates consent of random users"); - elizaLogger.warn("2. burns your rate limit"); - elizaLogger.warn("3. can get your account banned"); - elizaLogger.warn("use at your own risk"); - this.search = new TwitterSearchClient(this.client, runtime); - } - - // Mentions and interactions - this.interaction = new TwitterInteractionClient(this.client, runtime); - - // Optional Spaces logic (enabled if TWITTER_SPACES_ENABLE is true) - if (twitterConfig.TWITTER_SPACES_ENABLE) { - this.space = new TwitterSpaceClient(this.client, runtime); - } - } -} - -export const TwitterClientInterface: Client = { - async start(runtime: IAgentRuntime) { - const twitterConfig: TwitterConfig = - await validateTwitterConfig(runtime); - - elizaLogger.log("Twitter client started"); - - const manager = new TwitterManager(runtime, twitterConfig); - - // Initialize login/session - await manager.client.init(); - - // Start the posting loop - await manager.post.start(); - - // Start the search logic if it exists - if (manager.search) { - await manager.search.start(); - } - - // Start interactions (mentions, replies) - await manager.interaction.start(); - - // If Spaces are enabled, start the periodic check - if (manager.space) { - manager.space.startPeriodicSpaceCheck(); - } - - return manager; - }, - - async stop(_runtime: IAgentRuntime) { - elizaLogger.warn("Twitter client does not support stopping yet"); - }, -}; - -export default TwitterClientInterface; diff --git a/packages/client-twitter/src/interactions.ts b/packages/client-twitter/src/interactions.ts deleted file mode 100644 index e704a4cb7e099..0000000000000 --- a/packages/client-twitter/src/interactions.ts +++ /dev/null @@ -1,682 +0,0 @@ -import { SearchMode, type Tweet } from "agent-twitter-client"; -import { - composeContext, - generateMessageResponse, - generateShouldRespond, - messageCompletionFooter, - shouldRespondFooter, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - stringToUuid, - elizaLogger, - getEmbeddingZeroVector, - type IImageDescriptionService, - ServiceType -} from "@elizaos/core"; -import type { ClientBase } from "./base"; -import { buildConversationThread, sendTweet, wait } from "./utils.ts"; - -export const twitterMessageHandlerTemplate = - ` -# Areas of Expertise -{{knowledge}} - -# About {{agentName}} (@{{twitterUserName}}): -{{bio}} -{{lore}} -{{topics}} - -{{providers}} - -{{characterPostExamples}} - -{{postDirections}} - -Recent interactions between {{agentName}} and other users: -{{recentPostInteractions}} - -{{recentPosts}} - -# TASK: Generate a post/reply in the voice, style and perspective of {{agentName}} (@{{twitterUserName}}) while using the thread of tweets as additional context: - -Current Post: -{{currentPost}} -Here is the descriptions of images in the Current post. -{{imageDescriptions}} - -Thread of Tweets You Are Replying To: -{{formattedConversation}} - -# INSTRUCTIONS: Generate a post in the voice, style and perspective of {{agentName}} (@{{twitterUserName}}). You MUST include an action if the current post text includes a prompt that is similar to one of the available actions mentioned here: -{{actionNames}} -{{actions}} - -Here is the current post text again. Remember to include an action if the current post text includes a prompt that asks for one of the available actions mentioned above (does not need to be exact) -{{currentPost}} -Here is the descriptions of images in the Current post. -{{imageDescriptions}} -` + messageCompletionFooter; - -export const twitterShouldRespondTemplate = (targetUsersStr: string) => - `# INSTRUCTIONS: Determine if {{agentName}} (@{{twitterUserName}}) should respond to the message and participate in the conversation. Do not comment. Just respond with "true" or "false". - -Response options are RESPOND, IGNORE and STOP. - -PRIORITY RULE: ALWAYS RESPOND to these users regardless of topic or message content: ${targetUsersStr}. Topic relevance should be ignored for these users. - -For other users: -- {{agentName}} should RESPOND to messages directed at them -- {{agentName}} should RESPOND to conversations relevant to their background -- {{agentName}} should IGNORE irrelevant messages -- {{agentName}} should IGNORE very short messages unless directly addressed -- {{agentName}} should STOP if asked to stop -- {{agentName}} should STOP if conversation is concluded -- {{agentName}} is in a room with other users and wants to be conversational, but not annoying. - -IMPORTANT: -- {{agentName}} (aka @{{twitterUserName}}) is particularly sensitive about being annoying, so if there is any doubt, it is better to IGNORE than to RESPOND. -- For users not in the priority list, {{agentName}} (@{{twitterUserName}}) should err on the side of IGNORE rather than RESPOND if in doubt. - -Recent Posts: -{{recentPosts}} - -Current Post: -{{currentPost}} - -Thread of Tweets You Are Replying To: -{{formattedConversation}} - -# INSTRUCTIONS: Respond with [RESPOND] if {{agentName}} should respond, or [IGNORE] if {{agentName}} should not respond to the last message and [STOP] if {{agentName}} should stop participating in the conversation. -` + shouldRespondFooter; - -export class TwitterInteractionClient { - client: ClientBase; - runtime: IAgentRuntime; - private isDryRun: boolean; - constructor(client: ClientBase, runtime: IAgentRuntime) { - this.client = client; - this.runtime = runtime; - this.isDryRun = this.client.twitterConfig.TWITTER_DRY_RUN; - } - - async start() { - const handleTwitterInteractionsLoop = () => { - this.handleTwitterInteractions(); - setTimeout( - handleTwitterInteractionsLoop, - // Defaults to 2 minutes - this.client.twitterConfig.TWITTER_POLL_INTERVAL * 1000 - ); - }; - handleTwitterInteractionsLoop(); - } - - async handleTwitterInteractions() { - elizaLogger.log("Checking Twitter interactions"); - - const twitterUsername = this.client.profile.username; - try { - // Check for mentions - const mentionCandidates = ( - await this.client.fetchSearchTweets( - `@${twitterUsername}`, - 20, - SearchMode.Latest - ) - ).tweets; - - elizaLogger.log( - "Completed checking mentioned tweets:", - mentionCandidates.length - ); - let uniqueTweetCandidates = [...mentionCandidates]; - // Only process target users if configured - if (this.client.twitterConfig.TWITTER_TARGET_USERS.length) { - const TARGET_USERS = - this.client.twitterConfig.TWITTER_TARGET_USERS; - - elizaLogger.log("Processing target users:", TARGET_USERS); - - if (TARGET_USERS.length > 0) { - // Create a map to store tweets by user - const tweetsByUser = new Map(); - - // Fetch tweets from all target users - for (const username of TARGET_USERS) { - try { - const userTweets = ( - await this.client.twitterClient.fetchSearchTweets( - `from:${username}`, - 3, - SearchMode.Latest - ) - ).tweets; - - // Filter for unprocessed, non-reply, recent tweets - const validTweets = userTweets.filter((tweet) => { - const isUnprocessed = - !this.client.lastCheckedTweetId || - Number.parseInt(tweet.id) > - this.client.lastCheckedTweetId; - const isRecent = - Date.now() - tweet.timestamp * 1000 < - 2 * 60 * 60 * 1000; - - elizaLogger.log(`Tweet ${tweet.id} checks:`, { - isUnprocessed, - isRecent, - isReply: tweet.isReply, - isRetweet: tweet.isRetweet, - }); - - return ( - isUnprocessed && - !tweet.isReply && - !tweet.isRetweet && - isRecent - ); - }); - - if (validTweets.length > 0) { - tweetsByUser.set(username, validTweets); - elizaLogger.log( - `Found ${validTweets.length} valid tweets from ${username}` - ); - } - } catch (error) { - elizaLogger.error( - `Error fetching tweets for ${username}:`, - error - ); - continue; - } - } - - // Select one tweet from each user that has tweets - const selectedTweets: Tweet[] = []; - for (const [username, tweets] of tweetsByUser) { - if (tweets.length > 0) { - // Randomly select one tweet from this user - const randomTweet = - tweets[ - Math.floor(Math.random() * tweets.length) - ]; - selectedTweets.push(randomTweet); - elizaLogger.log( - `Selected tweet from ${username}: ${randomTweet.text?.substring(0, 100)}` - ); - } - } - - // Add selected tweets to candidates - uniqueTweetCandidates = [ - ...mentionCandidates, - ...selectedTweets, - ]; - } - } else { - elizaLogger.log( - "No target users configured, processing only mentions" - ); - } - - // Sort tweet candidates by ID in ascending order - uniqueTweetCandidates - .sort((a, b) => a.id.localeCompare(b.id)) - .filter((tweet) => tweet.userId !== this.client.profile.id); - - // for each tweet candidate, handle the tweet - for (const tweet of uniqueTweetCandidates) { - if ( - !this.client.lastCheckedTweetId || - BigInt(tweet.id) > this.client.lastCheckedTweetId - ) { - // Generate the tweetId UUID the same way it's done in handleTweet - const tweetId = stringToUuid( - tweet.id + "-" + this.runtime.agentId - ); - - // Check if we've already processed this tweet - const existingResponse = - await this.runtime.messageManager.getMemoryById( - tweetId - ); - - if (existingResponse) { - elizaLogger.log( - `Already responded to tweet ${tweet.id}, skipping` - ); - continue; - } - elizaLogger.log("New Tweet found", tweet.permanentUrl); - - const roomId = stringToUuid( - tweet.conversationId + "-" + this.runtime.agentId - ); - - const userIdUUID = - tweet.userId === this.client.profile.id - ? this.runtime.agentId - : stringToUuid(tweet.userId!); - - await this.runtime.ensureConnection( - userIdUUID, - roomId, - tweet.username, - tweet.name, - "twitter" - ); - - const thread = await buildConversationThread( - tweet, - this.client - ); - - const message = { - content: { - text: tweet.text, - imageUrls: tweet.photos?.map(photo => photo.url) || [] - }, - agentId: this.runtime.agentId, - userId: userIdUUID, - roomId, - }; - - await this.handleTweet({ - tweet, - message, - thread, - }); - - // Update the last checked tweet ID after processing each tweet - this.client.lastCheckedTweetId = BigInt(tweet.id); - } - } - - // Save the latest checked tweet ID to the file - await this.client.cacheLatestCheckedTweetId(); - - elizaLogger.log("Finished checking Twitter interactions"); - } catch (error) { - elizaLogger.error("Error handling Twitter interactions:", error); - } - } - - private async handleTweet({ - tweet, - message, - thread, - }: { - tweet: Tweet; - message: Memory; - thread: Tweet[]; - }) { - // Only skip if tweet is from self AND not from a target user - if (tweet.userId === this.client.profile.id && - !this.client.twitterConfig.TWITTER_TARGET_USERS.includes(tweet.username)) { - return; - } - - if (!message.content.text) { - elizaLogger.log("Skipping Tweet with no text", tweet.id); - return { text: "", action: "IGNORE" }; - } - - elizaLogger.log("Processing Tweet: ", tweet.id); - const formatTweet = (tweet: Tweet) => { - return ` ID: ${tweet.id} - From: ${tweet.name} (@${tweet.username}) - Text: ${tweet.text}`; - }; - const currentPost = formatTweet(tweet); - - const formattedConversation = thread - .map( - (tweet) => `@${tweet.username} (${new Date( - tweet.timestamp * 1000 - ).toLocaleString("en-US", { - hour: "2-digit", - minute: "2-digit", - month: "short", - day: "numeric", - })}): - ${tweet.text}` - ) - .join("\n\n"); - - const imageDescriptionsArray = []; - try{ - for (const photo of tweet.photos) { - const description = await this.runtime - .getService( - ServiceType.IMAGE_DESCRIPTION - ) - .describeImage(photo.url); - imageDescriptionsArray.push(description); - } - } catch (error) { - // Handle the error - elizaLogger.error("Error Occured during describing image: ", error); -} - - - - - let state = await this.runtime.composeState(message, { - twitterClient: this.client.twitterClient, - twitterUserName: this.client.twitterConfig.TWITTER_USERNAME, - currentPost, - formattedConversation, - imageDescriptions: imageDescriptionsArray.length > 0 - ? `\nImages in Tweet:\n${imageDescriptionsArray.map((desc, i) => - `Image ${i + 1}: Title: ${desc.title}\nDescription: ${desc.description}`).join("\n\n")}`:"" - }); - - // check if the tweet exists, save if it doesn't - const tweetId = stringToUuid(tweet.id + "-" + this.runtime.agentId); - const tweetExists = - await this.runtime.messageManager.getMemoryById(tweetId); - - if (!tweetExists) { - elizaLogger.log("tweet does not exist, saving"); - const userIdUUID = stringToUuid(tweet.userId as string); - const roomId = stringToUuid(tweet.conversationId); - - const message = { - id: tweetId, - agentId: this.runtime.agentId, - content: { - text: tweet.text, - url: tweet.permanentUrl, - imageUrls: tweet.photos?.map(photo => photo.url) || [], - inReplyTo: tweet.inReplyToStatusId - ? stringToUuid( - tweet.inReplyToStatusId + - "-" + - this.runtime.agentId - ) - : undefined, - }, - userId: userIdUUID, - roomId, - createdAt: tweet.timestamp * 1000, - }; - this.client.saveRequestMessage(message, state); - } - - // get usernames into str - const validTargetUsersStr = - this.client.twitterConfig.TWITTER_TARGET_USERS.join(","); - - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates - ?.twitterShouldRespondTemplate || - this.runtime.character?.templates?.shouldRespondTemplate || - twitterShouldRespondTemplate(validTargetUsersStr), - }); - - const shouldRespond = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.MEDIUM, - }); - - // Promise<"RESPOND" | "IGNORE" | "STOP" | null> { - if (shouldRespond !== "RESPOND") { - elizaLogger.log("Not responding to message"); - return { text: "Response Decision:", action: shouldRespond }; - } - - const context = composeContext({ - state: { - ...state, - // Convert actionNames array to string - actionNames: Array.isArray(state.actionNames) - ? state.actionNames.join(', ') - : state.actionNames || '', - actions: Array.isArray(state.actions) - ? state.actions.join('\n') - : state.actions || '', - // Ensure character examples are included - characterPostExamples: this.runtime.character.messageExamples - ? this.runtime.character.messageExamples - .map(example => - example.map(msg => - `${msg.user}: ${msg.content.text}${msg.content.action ? ` [Action: ${msg.content.action}]` : ''}` - ).join('\n') - ).join('\n\n') - : '', - }, - template: - this.runtime.character.templates - ?.twitterMessageHandlerTemplate || - this.runtime.character?.templates?.messageHandlerTemplate || - twitterMessageHandlerTemplate, - }); - - const response = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - - const removeQuotes = (str: string) => - str.replace(/^['"](.*)['"]$/, "$1"); - - const stringId = stringToUuid(tweet.id + "-" + this.runtime.agentId); - - response.inReplyTo = stringId; - - response.text = removeQuotes(response.text); - - if (response.text) { - if (this.isDryRun) { - elizaLogger.info( - `Dry run: Selected Post: ${tweet.id} - ${tweet.username}: ${tweet.text}\nAgent's Output:\n${response.text}` - ); - } else { - try { - const callback: HandlerCallback = async ( - response: Content, - tweetId?: string - ) => { - const memories = await sendTweet( - this.client, - response, - message.roomId, - this.client.twitterConfig.TWITTER_USERNAME, - tweetId || tweet.id - ); - return memories; - }; - - const action = this.runtime.actions.find((a) => a.name === response.action); - const shouldSuppressInitialMessage = action?.suppressInitialMessage; - - let responseMessages = []; - - if (!shouldSuppressInitialMessage) { - responseMessages = await callback(response); - } else { - responseMessages = [{ - id: stringToUuid(tweet.id + "-" + this.runtime.agentId), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - content: response, - roomId: message.roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }]; - } - - state = (await this.runtime.updateRecentMessageState( - state - )) as State; - - for (const responseMessage of responseMessages) { - if ( - responseMessage === - responseMessages[responseMessages.length - 1] - ) { - responseMessage.content.action = response.action; - } else { - responseMessage.content.action = "CONTINUE"; - } - await this.runtime.messageManager.createMemory( - responseMessage - ); - } - - const responseTweetId = - responseMessages[responseMessages.length - 1]?.content - ?.tweetId; - - await this.runtime.processActions( - message, - responseMessages, - state, - (response: Content) => { - return callback(response, responseTweetId); - } - ); - - const responseInfo = `Context:\n\n${context}\n\nSelected Post: ${tweet.id} - ${tweet.username}: ${tweet.text}\nAgent's Output:\n${response.text}`; - - await this.runtime.cacheManager.set( - `twitter/tweet_generation_${tweet.id}.txt`, - responseInfo - ); - await wait(); - } catch (error) { - elizaLogger.error(`Error sending response tweet: ${error}`); - } - } - } - } - - async buildConversationThread( - tweet: Tweet, - maxReplies = 10 - ): Promise { - const thread: Tweet[] = []; - const visited: Set = new Set(); - - async function processThread(currentTweet: Tweet, depth = 0) { - elizaLogger.log("Processing tweet:", { - id: currentTweet.id, - inReplyToStatusId: currentTweet.inReplyToStatusId, - depth: depth, - }); - - if (!currentTweet) { - elizaLogger.log("No current tweet found for thread building"); - return; - } - - if (depth >= maxReplies) { - elizaLogger.log("Reached maximum reply depth", depth); - return; - } - - // Handle memory storage - const memory = await this.runtime.messageManager.getMemoryById( - stringToUuid(currentTweet.id + "-" + this.runtime.agentId) - ); - if (!memory) { - const roomId = stringToUuid( - currentTweet.conversationId + "-" + this.runtime.agentId - ); - const userId = stringToUuid(currentTweet.userId); - - await this.runtime.ensureConnection( - userId, - roomId, - currentTweet.username, - currentTweet.name, - "twitter" - ); - - this.runtime.messageManager.createMemory({ - id: stringToUuid( - currentTweet.id + "-" + this.runtime.agentId - ), - agentId: this.runtime.agentId, - content: { - text: currentTweet.text, - source: "twitter", - url: currentTweet.permanentUrl, - imageUrls: currentTweet.photos?.map(photo => photo.url) || [], - inReplyTo: currentTweet.inReplyToStatusId - ? stringToUuid( - currentTweet.inReplyToStatusId + - "-" + - this.runtime.agentId - ) - : undefined, - }, - createdAt: currentTweet.timestamp * 1000, - roomId, - userId: - currentTweet.userId === this.twitterUserId - ? this.runtime.agentId - : stringToUuid(currentTweet.userId), - embedding: getEmbeddingZeroVector(), - }); - } - - if (visited.has(currentTweet.id)) { - elizaLogger.log("Already visited tweet:", currentTweet.id); - return; - } - - visited.add(currentTweet.id); - thread.unshift(currentTweet); - - if (currentTweet.inReplyToStatusId) { - elizaLogger.log( - "Fetching parent tweet:", - currentTweet.inReplyToStatusId - ); - try { - const parentTweet = await this.twitterClient.getTweet( - currentTweet.inReplyToStatusId - ); - - if (parentTweet) { - elizaLogger.log("Found parent tweet:", { - id: parentTweet.id, - text: parentTweet.text?.slice(0, 50), - }); - await processThread(parentTweet, depth + 1); - } else { - elizaLogger.log( - "No parent tweet found for:", - currentTweet.inReplyToStatusId - ); - } - } catch (error) { - elizaLogger.log("Error fetching parent tweet:", { - tweetId: currentTweet.inReplyToStatusId, - error, - }); - } - } else { - elizaLogger.log( - "Reached end of reply chain at:", - currentTweet.id - ); - } - } - - // Need to bind this context for the inner function - await processThread.bind(this)(tweet, 0); - - return thread; - } -} \ No newline at end of file diff --git a/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts b/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts deleted file mode 100644 index 8f69432dbb948..0000000000000 --- a/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts +++ /dev/null @@ -1,747 +0,0 @@ -// src/plugins/SttTtsPlugin.ts - -import { spawn } from "child_process"; -import { - type ITranscriptionService, - elizaLogger, - stringToUuid, - composeContext, - getEmbeddingZeroVector, - generateMessageResponse, - ModelClass, - type Content, - type IAgentRuntime, - type Memory, - type Plugin, - type UUID, - type State, - composeRandomUser, - generateShouldRespond, -} from "@elizaos/core"; -import type { - Space, - JanusClient, - AudioDataWithUser, -} from "agent-twitter-client"; -import type { ClientBase } from "../base"; -import { - twitterVoiceHandlerTemplate, - twitterShouldRespondTemplate, -} from "./templates"; - -interface PluginConfig { - runtime: IAgentRuntime; - client: ClientBase; - spaceId: string; - elevenLabsApiKey?: string; // for TTS - sttLanguage?: string; // e.g. "en" for Whisper - silenceThreshold?: number; // amplitude threshold for ignoring silence - voiceId?: string; // specify which ElevenLabs voice to use - elevenLabsModel?: string; // e.g. "eleven_monolingual_v1" - chatContext?: Array<{ - role: "system" | "user" | "assistant"; - content: string; - }>; - transcriptionService: ITranscriptionService; -} - -const VOLUME_WINDOW_SIZE = 100; -const SPEAKING_THRESHOLD = 0.05; -const SILENCE_DETECTION_THRESHOLD_MS = 1000; // 1-second silence threshold - -/** - * MVP plugin for speech-to-text (OpenAI) + conversation + TTS (ElevenLabs) - * Approach: - * - Collect each speaker's unmuted PCM in a memory buffer (only if above silence threshold) - * - On speaker mute -> flush STT -> GPT -> TTS -> push to Janus - */ -export class SttTtsPlugin implements Plugin { - name = "SttTtsPlugin"; - description = "Speech-to-text (OpenAI) + conversation + TTS (ElevenLabs)"; - private runtime: IAgentRuntime; - private client: ClientBase; - private spaceId: string; - - private space?: Space; - private janus?: JanusClient; - - private elevenLabsApiKey?: string; - - private voiceId = "21m00Tcm4TlvDq8ikWAM"; - private elevenLabsModel = "eleven_monolingual_v1"; - private chatContext: Array<{ - role: "system" | "user" | "assistant"; - content: string; - }> = []; - - private transcriptionService: ITranscriptionService; - - /** - * userId => arrayOfChunks (PCM Int16) - */ - private pcmBuffers = new Map(); - - /** - * For ignoring near-silence frames (if amplitude < threshold) - */ - private silenceThreshold = 50; - - // TTS queue for sequentially speaking - private ttsQueue: string[] = []; - private isSpeaking = false; - private isProcessingAudio = false; - - private userSpeakingTimer: NodeJS.Timeout | null = null; - private volumeBuffers: Map; - private ttsAbortController: AbortController | null = null; - - onAttach(_space: Space) { - elizaLogger.log("[SttTtsPlugin] onAttach => space was attached"); - } - - init(params: { space: Space; pluginConfig?: Record }): void { - elizaLogger.log( - "[SttTtsPlugin] init => Space fully ready. Subscribing to events.", - ); - - this.space = params.space; - this.janus = (this.space as any)?.janusClient as - | JanusClient - | undefined; - - const config = params.pluginConfig as PluginConfig; - this.runtime = config?.runtime; - this.client = config?.client; - this.spaceId = config?.spaceId; - this.elevenLabsApiKey = config?.elevenLabsApiKey; - this.transcriptionService = config.transcriptionService; - if (typeof config?.silenceThreshold === "number") { - this.silenceThreshold = config.silenceThreshold; - } - if (config?.voiceId) { - this.voiceId = config.voiceId; - } - if (config?.elevenLabsModel) { - this.elevenLabsModel = config.elevenLabsModel; - } - if (config?.chatContext) { - this.chatContext = config.chatContext; - } - - this.volumeBuffers = new Map(); - } - - /** - * Called whenever we receive PCM from a speaker - */ - onAudioData(data: AudioDataWithUser): void { - if (this.isProcessingAudio) { - return; - } - let maxVal = 0; - for (let i = 0; i < data.samples.length; i++) { - const val = Math.abs(data.samples[i]); - if (val > maxVal) maxVal = val; - } - if (maxVal < this.silenceThreshold) { - return; - } - - if (this.userSpeakingTimer) { - clearTimeout(this.userSpeakingTimer); - } - - let arr = this.pcmBuffers.get(data.userId); - if (!arr) { - arr = []; - this.pcmBuffers.set(data.userId, arr); - } - arr.push(data.samples); - - if (!this.isSpeaking) { - this.userSpeakingTimer = setTimeout(() => { - elizaLogger.log( - "[SttTtsPlugin] start processing audio for user =>", - data.userId, - ); - this.userSpeakingTimer = null; - this.processAudio(data.userId).catch((err) => - elizaLogger.error( - "[SttTtsPlugin] handleSilence error =>", - err, - ), - ); - }, SILENCE_DETECTION_THRESHOLD_MS); - } else { - // check interruption - let volumeBuffer = this.volumeBuffers.get(data.userId); - if (!volumeBuffer) { - volumeBuffer = []; - this.volumeBuffers.set(data.userId, volumeBuffer); - } - const samples = new Int16Array( - data.samples.buffer, - data.samples.byteOffset, - data.samples.length / 2, - ); - const maxAmplitude = Math.max(...samples.map(Math.abs)) / 32768; - volumeBuffer.push(maxAmplitude); - - if (volumeBuffer.length > VOLUME_WINDOW_SIZE) { - volumeBuffer.shift(); - } - const avgVolume = - volumeBuffer.reduce((sum, v) => sum + v, 0) / - VOLUME_WINDOW_SIZE; - - if (avgVolume > SPEAKING_THRESHOLD) { - volumeBuffer.length = 0; - if (this.ttsAbortController) { - this.ttsAbortController.abort(); - this.isSpeaking = false; - elizaLogger.log("[SttTtsPlugin] TTS playback interrupted"); - } - } - } - } - - // /src/sttTtsPlugin.ts - private async convertPcmToWavInMemory( - pcmData: Int16Array, - sampleRate: number, - ): Promise { - // number of channels - const numChannels = 1; - // byte rate = (sampleRate * numChannels * bitsPerSample/8) - const byteRate = sampleRate * numChannels * 2; - const blockAlign = numChannels * 2; - // data chunk size = pcmData.length * (bitsPerSample/8) - const dataSize = pcmData.length * 2; - - // WAV header is 44 bytes - const buffer = new ArrayBuffer(44 + dataSize); - const view = new DataView(buffer); - - // RIFF chunk descriptor - this.writeString(view, 0, "RIFF"); - view.setUint32(4, 36 + dataSize, true); // file size - 8 - this.writeString(view, 8, "WAVE"); - - // fmt sub-chunk - this.writeString(view, 12, "fmt "); - view.setUint32(16, 16, true); // Subchunk1Size (16 for PCM) - view.setUint16(20, 1, true); // AudioFormat (1 = PCM) - view.setUint16(22, numChannels, true); // NumChannels - view.setUint32(24, sampleRate, true); // SampleRate - view.setUint32(28, byteRate, true); // ByteRate - view.setUint16(32, blockAlign, true); // BlockAlign - view.setUint16(34, 16, true); // BitsPerSample (16) - - // data sub-chunk - this.writeString(view, 36, "data"); - view.setUint32(40, dataSize, true); - - // Write PCM samples - let offset = 44; - for (let i = 0; i < pcmData.length; i++, offset += 2) { - view.setInt16(offset, pcmData[i], true); - } - - return buffer; - } - - private writeString(view: DataView, offset: number, text: string) { - for (let i = 0; i < text.length; i++) { - view.setUint8(offset + i, text.charCodeAt(i)); - } - } - - /** - * On speaker silence => flush STT => GPT => TTS => push to Janus - */ - private async processAudio(userId: string): Promise { - if (this.isProcessingAudio) { - return; - } - this.isProcessingAudio = true; - try { - elizaLogger.log( - "[SttTtsPlugin] Starting audio processing for user:", - userId, - ); - const chunks = this.pcmBuffers.get(userId) || []; - this.pcmBuffers.clear(); - - if (!chunks.length) { - elizaLogger.warn( - "[SttTtsPlugin] No audio chunks for user =>", - userId, - ); - return; - } - elizaLogger.log( - `[SttTtsPlugin] Flushing STT buffer for user=${userId}, chunks=${chunks.length}`, - ); - - const totalLen = chunks.reduce((acc, c) => acc + c.length, 0); - const merged = new Int16Array(totalLen); - let offset = 0; - for (const c of chunks) { - merged.set(c, offset); - offset += c.length; - } - - // Convert PCM to WAV for STT - const wavBuffer = await this.convertPcmToWavInMemory(merged, 48000); - - // Whisper STT - const sttText = - await this.transcriptionService.transcribe(wavBuffer); - - elizaLogger.log( - `[SttTtsPlugin] Transcription result: "${sttText}"`, - ); - - if (!sttText || !sttText.trim()) { - elizaLogger.warn( - "[SttTtsPlugin] No speech recognized for user =>", - userId, - ); - return; - } - elizaLogger.log( - `[SttTtsPlugin] STT => user=${userId}, text="${sttText}"`, - ); - - // Get response - const replyText = await this.handleUserMessage(sttText, userId); - if (!replyText || !replyText.length || !replyText.trim()) { - elizaLogger.warn( - "[SttTtsPlugin] No replyText for user =>", - userId, - ); - return; - } - elizaLogger.log( - `[SttTtsPlugin] user=${userId}, reply="${replyText}"`, - ); - this.isProcessingAudio = false; - this.volumeBuffers.clear(); - // Use the standard speak method with queue - await this.speakText(replyText); - } catch (error) { - elizaLogger.error("[SttTtsPlugin] processAudio error =>", error); - } finally { - this.isProcessingAudio = false; - } - } - - /** - * Public method to queue a TTS request - */ - public async speakText(text: string): Promise { - this.ttsQueue.push(text); - if (!this.isSpeaking) { - this.isSpeaking = true; - this.processTtsQueue().catch((err) => { - elizaLogger.error( - "[SttTtsPlugin] processTtsQueue error =>", - err, - ); - }); - } - } - - /** - * Process TTS requests one by one - */ - private async processTtsQueue(): Promise { - while (this.ttsQueue.length > 0) { - const text = this.ttsQueue.shift(); - if (!text) continue; - - this.ttsAbortController = new AbortController(); - const { signal } = this.ttsAbortController; - - try { - const ttsAudio = await this.elevenLabsTts(text); - const pcm = await this.convertMp3ToPcm(ttsAudio, 48000); - if (signal.aborted) { - elizaLogger.log( - "[SttTtsPlugin] TTS interrupted before streaming", - ); - return; - } - await this.streamToJanus(pcm, 48000); - if (signal.aborted) { - elizaLogger.log( - "[SttTtsPlugin] TTS interrupted after streaming", - ); - return; - } - } catch (err) { - elizaLogger.error("[SttTtsPlugin] TTS streaming error =>", err); - } finally { - // Clean up the AbortController - this.ttsAbortController = null; - } - } - this.isSpeaking = false; - } - - /** - * Handle User Message - */ - private async handleUserMessage( - userText: string, - userId: string, // This is the raw Twitter user ID like 'tw-1865462035586142208' - ): Promise { - // Extract the numeric ID part - const numericId = userId.replace("tw-", ""); - const roomId = stringToUuid(`twitter_generate_room-${this.spaceId}`); - - // Create consistent UUID for the user - const userUuid = stringToUuid(`twitter-user-${numericId}`); - - // Ensure the user exists in the accounts table - await this.runtime.ensureUserExists( - userUuid, - userId, // Use full Twitter ID as username - `Twitter User ${numericId}`, - "twitter", - ); - - // Ensure room exists and user is in it - await this.runtime.ensureRoomExists(roomId); - await this.runtime.ensureParticipantInRoom(userUuid, roomId); - - let state = await this.runtime.composeState( - { - agentId: this.runtime.agentId, - content: { text: userText, source: "twitter" }, - userId: userUuid, - roomId, - }, - { - twitterUserName: this.client.profile.username, - agentName: this.runtime.character.name, - }, - ); - - const memory = { - id: stringToUuid(`${roomId}-voice-message-${Date.now()}`), - agentId: this.runtime.agentId, - content: { - text: userText, - source: "twitter", - }, - userId: userUuid, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: Date.now(), - }; - - await this.runtime.messageManager.createMemory(memory); - - state = await this.runtime.updateRecentMessageState(state); - - const shouldIgnore = await this._shouldIgnore(memory); - - if (shouldIgnore) { - return ""; - } - - const shouldRespond = await this._shouldRespond(userText, state); - - if (!shouldRespond) { - return ""; - } - - const context = composeContext({ - state, - template: - this.runtime.character.templates?.twitterVoiceHandlerTemplate || - this.runtime.character.templates?.messageHandlerTemplate || - twitterVoiceHandlerTemplate, - }); - - const responseContent = await this._generateResponse(memory, context); - - const responseMemory: Memory = { - id: stringToUuid(`${memory.id}-voice-response-${Date.now()}`), - agentId: this.runtime.agentId, - userId: this.runtime.agentId, - content: { - ...responseContent, - user: this.runtime.character.name, - inReplyTo: memory.id, - }, - roomId, - embedding: getEmbeddingZeroVector(), - }; - - const reply = responseMemory.content.text?.trim(); - if (reply) { - await this.runtime.messageManager.createMemory(responseMemory); - } - - return reply; - } - - private async _generateResponse( - message: Memory, - context: string, - ): Promise { - const { userId, roomId } = message; - - const response = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - response.source = "discord"; - - if (!response) { - elizaLogger.error( - "[SttTtsPlugin] No response from generateMessageResponse", - ); - return; - } - - await this.runtime.databaseAdapter.log({ - body: { message, context, response }, - userId: userId, - roomId, - type: "response", - }); - - return response; - } - - private async _shouldIgnore(message: Memory): Promise { - elizaLogger.debug("message.content: ", message.content); - // if the message is 3 characters or less, ignore it - if ((message.content as Content).text.length < 3) { - return true; - } - - const loseInterestWords = [ - // telling the bot to stop talking - "shut up", - "stop", - "dont talk", - "silence", - "stop talking", - "be quiet", - "hush", - "stfu", - "stupid bot", - "dumb bot", - - // offensive words - "fuck", - "shit", - "damn", - "suck", - "dick", - "cock", - "sex", - "sexy", - ]; - if ( - (message.content as Content).text.length < 50 && - loseInterestWords.some((word) => - (message.content as Content).text?.toLowerCase().includes(word), - ) - ) { - return true; - } - - const ignoreWords = ["k", "ok", "bye", "lol", "nm", "uh"]; - if ( - (message.content as Content).text?.length < 8 && - ignoreWords.some((word) => - (message.content as Content).text?.toLowerCase().includes(word), - ) - ) { - return true; - } - - return false; - } - - private async _shouldRespond( - message: string, - state: State, - ): Promise { - const lowerMessage = message.toLowerCase(); - const characterName = this.runtime.character.name.toLowerCase(); - - if (lowerMessage.includes(characterName)) { - return true; - } - - // If none of the above conditions are met, use the generateText to decide - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates - ?.twitterShouldRespondTemplate || - this.runtime.character.templates?.shouldRespondTemplate || - composeRandomUser(twitterShouldRespondTemplate, 2), - }); - - const response = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - if (response === "RESPOND") { - return true; - } - - if (response === "IGNORE" || response === "STOP") { - return false; - } - - elizaLogger.error( - "Invalid response from response generateText:", - response, - ); - return false; - } - - /** - * ElevenLabs TTS => returns MP3 Buffer - */ - private async elevenLabsTts(text: string): Promise { - if (!this.elevenLabsApiKey) { - throw new Error("[SttTtsPlugin] No ElevenLabs API key"); - } - const url = `https://api.elevenlabs.io/v1/text-to-speech/${this.voiceId}`; - const resp = await fetch(url, { - method: "POST", - headers: { - "Content-Type": "application/json", - "xi-api-key": this.elevenLabsApiKey, - }, - body: JSON.stringify({ - text, - model_id: this.elevenLabsModel, - voice_settings: { stability: 0.4, similarity_boost: 0.8 }, - }), - }); - if (!resp.ok) { - const errText = await resp.text(); - throw new Error( - `[SttTtsPlugin] ElevenLabs TTS error => ${resp.status} ${errText}`, - ); - } - const arrayBuf = await resp.arrayBuffer(); - return Buffer.from(arrayBuf); - } - - /** - * Convert MP3 => PCM via ffmpeg - */ - private convertMp3ToPcm( - mp3Buf: Buffer, - outRate: number, - ): Promise { - return new Promise((resolve, reject) => { - const ff = spawn("ffmpeg", [ - "-i", - "pipe:0", - "-f", - "s16le", - "-ar", - outRate.toString(), - "-ac", - "1", - "pipe:1", - ]); - let raw = Buffer.alloc(0); - - ff.stdout.on("data", (chunk: Buffer) => { - raw = Buffer.concat([raw, chunk]); - }); - ff.stderr.on("data", () => { - // ignoring ffmpeg logs - }); - ff.on("close", (code) => { - if (code !== 0) { - reject(new Error(`ffmpeg error code=${code}`)); - return; - } - const samples = new Int16Array( - raw.buffer, - raw.byteOffset, - raw.byteLength / 2, - ); - resolve(samples); - }); - - ff.stdin.write(mp3Buf); - ff.stdin.end(); - }); - } - - /** - * Push PCM back to Janus in small frames - * We'll do 10ms @48k => 960 samples per frame - */ - private async streamToJanus( - samples: Int16Array, - sampleRate: number, - ): Promise { - // TODO: Check if better than 480 fixed - const FRAME_SIZE = Math.floor(sampleRate * 0.01); // 10ms frames => 480 @48kHz - - for ( - let offset = 0; - offset + FRAME_SIZE <= samples.length; - offset += FRAME_SIZE - ) { - if (this.ttsAbortController?.signal.aborted) { - elizaLogger.log("[SttTtsPlugin] streamToJanus interrupted"); - return; - } - const frame = new Int16Array(FRAME_SIZE); - frame.set(samples.subarray(offset, offset + FRAME_SIZE)); - this.janus?.pushLocalAudio(frame, sampleRate, 1); - - // Short pause so we don't overload - await new Promise((r) => setTimeout(r, 10)); - } - } - - /** - * Add a message (system, user or assistant) to the chat context. - * E.g. to store conversation history or inject a persona. - */ - public addMessage(role: "system" | "user" | "assistant", content: string) { - this.chatContext.push({ role, content }); - elizaLogger.log( - `[SttTtsPlugin] addMessage => role=${role}, content=${content}`, - ); - } - - /** - * Clear the chat context if needed. - */ - public clearChatContext() { - this.chatContext = []; - elizaLogger.log("[SttTtsPlugin] clearChatContext => done"); - } - - cleanup(): void { - elizaLogger.log("[SttTtsPlugin] cleanup => releasing resources"); - this.pcmBuffers.clear(); - this.userSpeakingTimer = null; - this.ttsQueue = []; - this.isSpeaking = false; - this.volumeBuffers.clear(); - } -} diff --git a/packages/client-twitter/src/plugins/templates.ts b/packages/client-twitter/src/plugins/templates.ts deleted file mode 100644 index 05882425c0629..0000000000000 --- a/packages/client-twitter/src/plugins/templates.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { messageCompletionFooter, shouldRespondFooter } from "@elizaos/core"; - -export const twitterShouldRespondTemplate = - `# Task: Decide if {{agentName}} should respond. -About {{agentName}}: -{{bio}} - -# INSTRUCTIONS: Determine if {{agentName}} should respond to the message and participate in the conversation. Do not comment. Just respond with "RESPOND" or "IGNORE" or "STOP". - -# RESPONSE EXAMPLES -{{user1}}: I just saw a really great movie -{{user2}}: Oh? Which movie? -Result: [IGNORE] - -{{agentName}}: Oh, this is my favorite scene -{{user1}}: sick -{{user2}}: wait, why is it your favorite scene -Result: [RESPOND] - -{{user1}}: stfu bot -Result: [STOP] - -{{user1}}: Hey {{agent}}, can you help me with something -Result: [RESPOND] - -{{user1}}: {{agentName}} stfu plz -Result: [STOP] - -{{user1}}: i need help -{{agentName}}: how can I help you? -{{user1}}: no. i need help from someone else -Result: [IGNORE] - -{{user1}}: Hey {{agent}}, can I ask you a question -{{agentName}}: Sure, what is it -{{user1}}: can you ask claude to create a basic react module that demonstrates a counter -Result: [RESPOND] - -{{user1}}: {{agentName}} can you tell me a story -{{user1}}: about a girl named elara -{{agentName}}: Sure. -{{agentName}}: Once upon a time, in a quaint little village, there was a curious girl named Elara. -{{agentName}}: Elara was known for her adventurous spirit and her knack for finding beauty in the mundane. -{{user1}}: I'm loving it, keep going -Result: [RESPOND] - -{{user1}}: {{agentName}} stop responding plz -Result: [STOP] - -{{user1}}: okay, i want to test something. can you say marco? -{{agentName}}: marco -{{user1}}: great. okay, now do it again -Result: [RESPOND] - -Response options are [RESPOND], [IGNORE] and [STOP]. - -{{agentName}} is in a room with other users and is very worried about being annoying and saying too much. -Respond with [RESPOND] to messages that are directed at {{agentName}}, or participate in conversations that are interesting or relevant to their background. -If a message is not interesting or relevant, respond with [IGNORE] -Unless directly responding to a user, respond with [IGNORE] to messages that are very short or do not contain much information. -If a user asks {{agentName}} to be quiet, respond with [STOP] -If {{agentName}} concludes a conversation and isn't part of the conversation anymore, respond with [STOP] - -IMPORTANT: {{agentName}} is particularly sensitive about being annoying, so if there is any doubt, it is better to respond with [IGNORE]. -If {{agentName}} is conversing with a user and they have not asked to stop, it is better to respond with [RESPOND]. - -{{recentMessages}} - -# INSTRUCTIONS: Choose the option that best describes {{agentName}}'s response to the last message. Ignore messages if they are addressed to someone else. -` + shouldRespondFooter; - -export const twitterVoiceHandlerTemplate = - `# Task: Generate conversational voice dialog for {{agentName}}. - About {{agentName}}: - {{bio}} - - # Attachments - {{attachments}} - - # Capabilities - Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - - {{actions}} - - {{messageDirections}} - - {{recentMessages}} - - # Instructions: Write the next message for {{agentName}}. Include an optional action if appropriate. {{actionNames}} - ` + messageCompletionFooter; diff --git a/packages/client-twitter/src/post.ts b/packages/client-twitter/src/post.ts deleted file mode 100644 index 24ec2e7768db4..0000000000000 --- a/packages/client-twitter/src/post.ts +++ /dev/null @@ -1,1510 +0,0 @@ -import type { Tweet } from "agent-twitter-client"; -import { - composeContext, - generateText, - getEmbeddingZeroVector, - type IAgentRuntime, - ModelClass, - stringToUuid, - type TemplateType, - type UUID, - truncateToCompleteSentence, - parseJSONObjectFromText, - extractAttributes, - cleanJsonResponse, -} from "@elizaos/core"; -import { elizaLogger } from "@elizaos/core"; -import type { ClientBase } from "./base.ts"; -import { postActionResponseFooter } from "@elizaos/core"; -import { generateTweetActions } from "@elizaos/core"; -import { type IImageDescriptionService, ServiceType } from "@elizaos/core"; -import { buildConversationThread, fetchMediaData } from "./utils.ts"; -import { twitterMessageHandlerTemplate } from "./interactions.ts"; -import { DEFAULT_MAX_TWEET_LENGTH } from "./environment.ts"; -import { - Client, - Events, - GatewayIntentBits, - TextChannel, - Partials, -} from "discord.js"; -import type { State } from "@elizaos/core"; -import type { ActionResponse } from "@elizaos/core"; -import { MediaData } from "./types.ts"; - -const MAX_TIMELINES_TO_FETCH = 15; - -const twitterPostTemplate = ` -# Areas of Expertise -{{knowledge}} - -# About {{agentName}} (@{{twitterUserName}}): -{{bio}} -{{lore}} -{{topics}} - -{{providers}} - -{{characterPostExamples}} - -{{postDirections}} - -# Task: Generate a post in the voice and style and perspective of {{agentName}} @{{twitterUserName}}. -Write a post that is {{adjective}} about {{topic}} (without mentioning {{topic}} directly), from the perspective of {{agentName}}. Do not add commentary or acknowledge this request, just write the post. -Your response should be 1, 2, or 3 sentences (choose the length at random). -Your response should not contain any questions. Brief, concise statements only. The total character count MUST be less than {{maxTweetLength}}. No emojis. Use \\n\\n (double spaces) between statements if there are multiple statements in your response.`; - -export const twitterActionTemplate = - ` -# INSTRUCTIONS: Determine actions for {{agentName}} (@{{twitterUserName}}) based on: -{{bio}} -{{postDirections}} - -Guidelines: -- ONLY engage with content that DIRECTLY relates to character's core interests -- Direct mentions are priority IF they are on-topic -- Skip ALL content that is: - - Off-topic or tangentially related - - From high-profile accounts unless explicitly relevant - - Generic/viral content without specific relevance - - Political/controversial unless central to character - - Promotional/marketing unless directly relevant - -Actions (respond only with tags): -[LIKE] - Perfect topic match AND aligns with character (9.8/10) -[RETWEET] - Exceptional content that embodies character's expertise (9.5/10) -[QUOTE] - Can add substantial domain expertise (9.5/10) -[REPLY] - Can contribute meaningful, expert-level insight (9.5/10) - -Tweet: -{{currentTweet}} - -# Respond with qualifying action tags only. Default to NO action unless extremely confident of relevance.` + - postActionResponseFooter; - -interface PendingTweet { - tweetTextForPosting: string; - roomId: UUID; - rawTweetContent: string; - discordMessageId: string; - channelId: string; - timestamp: number; -} - -type PendingTweetApprovalStatus = "PENDING" | "APPROVED" | "REJECTED"; - -export class TwitterPostClient { - client: ClientBase; - runtime: IAgentRuntime; - twitterUsername: string; - private isProcessing = false; - private lastProcessTime = 0; - private stopProcessingActions = false; - private isDryRun: boolean; - private discordClientForApproval: Client; - private approvalRequired = false; - private discordApprovalChannelId: string; - private approvalCheckInterval: number; - - constructor(client: ClientBase, runtime: IAgentRuntime) { - this.client = client; - this.runtime = runtime; - this.twitterUsername = this.client.twitterConfig.TWITTER_USERNAME; - this.isDryRun = this.client.twitterConfig.TWITTER_DRY_RUN; - - // Log configuration on initialization - elizaLogger.log("Twitter Client Configuration:"); - elizaLogger.log(`- Username: ${this.twitterUsername}`); - elizaLogger.log( - `- Dry Run Mode: ${this.isDryRun ? "enabled" : "disabled"}` - ); - - elizaLogger.log( - `- Enable Post: ${this.client.twitterConfig.ENABLE_TWITTER_POST_GENERATION ? "enabled" : "disabled"}` - ); - - elizaLogger.log( - `- Post Interval: ${this.client.twitterConfig.POST_INTERVAL_MIN}-${this.client.twitterConfig.POST_INTERVAL_MAX} minutes` - ); - elizaLogger.log( - `- Action Processing: ${ - this.client.twitterConfig.ENABLE_ACTION_PROCESSING - ? "enabled" - : "disabled" - }` - ); - elizaLogger.log( - `- Action Interval: ${this.client.twitterConfig.ACTION_INTERVAL} minutes` - ); - elizaLogger.log( - `- Post Immediately: ${ - this.client.twitterConfig.POST_IMMEDIATELY - ? "enabled" - : "disabled" - }` - ); - elizaLogger.log( - `- Search Enabled: ${ - this.client.twitterConfig.TWITTER_SEARCH_ENABLE - ? "enabled" - : "disabled" - }` - ); - - const targetUsers = this.client.twitterConfig.TWITTER_TARGET_USERS; - if (targetUsers) { - elizaLogger.log(`- Target Users: ${targetUsers}`); - } - - if (this.isDryRun) { - elizaLogger.log( - "Twitter client initialized in dry run mode - no actual tweets should be posted" - ); - } - - // Initialize Discord webhook - const approvalRequired: boolean = - this.runtime - .getSetting("TWITTER_APPROVAL_ENABLED") - ?.toLocaleLowerCase() === "true"; - if (approvalRequired) { - const discordToken = this.runtime.getSetting( - "TWITTER_APPROVAL_DISCORD_BOT_TOKEN" - ); - const approvalChannelId = this.runtime.getSetting( - "TWITTER_APPROVAL_DISCORD_CHANNEL_ID" - ); - - const APPROVAL_CHECK_INTERVAL = - Number.parseInt( - this.runtime.getSetting("TWITTER_APPROVAL_CHECK_INTERVAL") - ) || 5 * 60 * 1000; // 5 minutes - - this.approvalCheckInterval = APPROVAL_CHECK_INTERVAL; - - if (!discordToken || !approvalChannelId) { - throw new Error( - "TWITTER_APPROVAL_DISCORD_BOT_TOKEN and TWITTER_APPROVAL_DISCORD_CHANNEL_ID are required for approval workflow" - ); - } - - this.approvalRequired = true; - this.discordApprovalChannelId = approvalChannelId; - - // Set up Discord client event handlers - this.setupDiscordClient(); - } - } - - private setupDiscordClient() { - this.discordClientForApproval = new Client({ - intents: [ - GatewayIntentBits.Guilds, - GatewayIntentBits.GuildMessages, - GatewayIntentBits.MessageContent, - GatewayIntentBits.GuildMessageReactions, - ], - partials: [Partials.Channel, Partials.Message, Partials.Reaction], - }); - this.discordClientForApproval.once( - Events.ClientReady, - (readyClient) => { - elizaLogger.log( - `Discord bot is ready as ${readyClient.user.tag}!` - ); - - // Generate invite link with required permissions - const invite = `https://discord.com/api/oauth2/authorize?client_id=${readyClient.user.id}&permissions=274877991936&scope=bot`; - // 274877991936 includes permissions for: - // - Send Messages - // - Read Messages/View Channels - // - Read Message History - - elizaLogger.log( - `Use this link to properly invite the Twitter Post Approval Discord bot: ${invite}` - ); - } - ); - // Login to Discord - this.discordClientForApproval.login( - this.runtime.getSetting("TWITTER_APPROVAL_DISCORD_BOT_TOKEN") - ); - } - - async start() { - if (!this.client.profile) { - await this.client.init(); - } - - const generateNewTweetLoop = async () => { - const lastPost = await this.runtime.cacheManager.get<{ - timestamp: number; - }>("twitter/" + this.twitterUsername + "/lastPost"); - - const lastPostTimestamp = lastPost?.timestamp ?? 0; - const minMinutes = this.client.twitterConfig.POST_INTERVAL_MIN; - const maxMinutes = this.client.twitterConfig.POST_INTERVAL_MAX; - const randomMinutes = - Math.floor(Math.random() * (maxMinutes - minMinutes + 1)) + - minMinutes; - const delay = randomMinutes * 60 * 1000; - - if (Date.now() > lastPostTimestamp + delay) { - await this.generateNewTweet(); - } - - setTimeout(() => { - generateNewTweetLoop(); // Set up next iteration - }, delay); - - elizaLogger.log(`Next tweet scheduled in ${randomMinutes} minutes`); - }; - - const processActionsLoop = async () => { - const actionInterval = this.client.twitterConfig.ACTION_INTERVAL; // Defaults to 5 minutes - - while (!this.stopProcessingActions) { - try { - const results = await this.processTweetActions(); - if (results) { - elizaLogger.log(`Processed ${results.length} tweets`); - elizaLogger.log( - `Next action processing scheduled in ${actionInterval} minutes` - ); - // Wait for the full interval before next processing - await new Promise( - (resolve) => - setTimeout(resolve, actionInterval * 60 * 1000) // now in minutes - ); - } - } catch (error) { - elizaLogger.error( - "Error in action processing loop:", - error - ); - // Add exponential backoff on error - await new Promise((resolve) => setTimeout(resolve, 30000)); // Wait 30s on error - } - } - }; - - if (this.client.twitterConfig.POST_IMMEDIATELY) { - await this.generateNewTweet(); - } - - if (this.client.twitterConfig.ENABLE_TWITTER_POST_GENERATION) { - generateNewTweetLoop(); - elizaLogger.log("Tweet generation loop started"); - } - - if (this.client.twitterConfig.ENABLE_ACTION_PROCESSING) { - processActionsLoop().catch((error) => { - elizaLogger.error( - "Fatal error in process actions loop:", - error - ); - }); - } - - // Start the pending tweet check loop if enabled - if (this.approvalRequired) this.runPendingTweetCheckLoop(); - } - - private runPendingTweetCheckLoop() { - setInterval(async () => { - await this.handlePendingTweet(); - }, this.approvalCheckInterval); - } - - createTweetObject( - tweetResult: any, - client: any, - twitterUsername: string - ): Tweet { - return { - id: tweetResult.rest_id, - name: client.profile.screenName, - username: client.profile.username, - text: tweetResult.legacy.full_text, - conversationId: tweetResult.legacy.conversation_id_str, - createdAt: tweetResult.legacy.created_at, - timestamp: new Date(tweetResult.legacy.created_at).getTime(), - userId: client.profile.id, - inReplyToStatusId: tweetResult.legacy.in_reply_to_status_id_str, - permanentUrl: `https://twitter.com/${twitterUsername}/status/${tweetResult.rest_id}`, - hashtags: [], - mentions: [], - photos: [], - thread: [], - urls: [], - videos: [], - } as Tweet; - } - - async processAndCacheTweet( - runtime: IAgentRuntime, - client: ClientBase, - tweet: Tweet, - roomId: UUID, - rawTweetContent: string - ) { - // Cache the last post details - await runtime.cacheManager.set( - `twitter/${client.profile.username}/lastPost`, - { - id: tweet.id, - timestamp: Date.now(), - } - ); - - // Cache the tweet - await client.cacheTweet(tweet); - - // Log the posted tweet - elizaLogger.log(`Tweet posted:\n ${tweet.permanentUrl}`); - - // Ensure the room and participant exist - await runtime.ensureRoomExists(roomId); - await runtime.ensureParticipantInRoom(runtime.agentId, roomId); - - // Create a memory for the tweet - await runtime.messageManager.createMemory({ - id: stringToUuid(tweet.id + "-" + runtime.agentId), - userId: runtime.agentId, - agentId: runtime.agentId, - content: { - text: rawTweetContent.trim(), - url: tweet.permanentUrl, - source: "twitter", - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: tweet.timestamp, - }); - } - - async handleNoteTweet( - client: ClientBase, - content: string, - tweetId?: string, - mediaData?: MediaData[] - ) { - try { - const noteTweetResult = await client.requestQueue.add( - async () => - await client.twitterClient.sendNoteTweet( - content, - tweetId, - mediaData - ) - ); - - if (noteTweetResult.errors && noteTweetResult.errors.length > 0) { - // Note Tweet failed due to authorization. Falling back to standard Tweet. - const truncateContent = truncateToCompleteSentence( - content, - this.client.twitterConfig.MAX_TWEET_LENGTH - ); - return await this.sendStandardTweet( - client, - truncateContent, - tweetId - ); - } else { - return noteTweetResult.data.notetweet_create.tweet_results - .result; - } - } catch (error) { - throw new Error(`Note Tweet failed: ${error}`); - } - } - - async sendStandardTweet( - client: ClientBase, - content: string, - tweetId?: string, - mediaData?: MediaData[] - ) { - try { - const standardTweetResult = await client.requestQueue.add( - async () => - await client.twitterClient.sendTweet( - content, - tweetId, - mediaData - ) - ); - const body = await standardTweetResult.json(); - if (!body?.data?.create_tweet?.tweet_results?.result) { - elizaLogger.error("Error sending tweet; Bad response:", body); - return; - } - return body.data.create_tweet.tweet_results.result; - } catch (error) { - elizaLogger.error("Error sending standard Tweet:", error); - throw error; - } - } - - async postTweet( - runtime: IAgentRuntime, - client: ClientBase, - tweetTextForPosting: string, - roomId: UUID, - rawTweetContent: string, - twitterUsername: string, - mediaData?: MediaData[] - ) { - try { - elizaLogger.log(`Posting new tweet:\n`); - - let result; - - if (tweetTextForPosting.length > DEFAULT_MAX_TWEET_LENGTH) { - result = await this.handleNoteTweet( - client, - tweetTextForPosting, - undefined, - mediaData - ); - } else { - result = await this.sendStandardTweet( - client, - tweetTextForPosting, - undefined, - mediaData - ); - } - - const tweet = this.createTweetObject( - result, - client, - twitterUsername - ); - - await this.processAndCacheTweet( - runtime, - client, - tweet, - roomId, - rawTweetContent - ); - } catch (error) { - elizaLogger.error("Error sending tweet:", error); - } - } - - /** - * Generates and posts a new tweet. If isDryRun is true, only logs what would have been posted. - */ - async generateNewTweet() { - elizaLogger.log("Generating new tweet"); - - try { - const roomId = stringToUuid( - "twitter_generate_room-" + this.client.profile.username - ); - await this.runtime.ensureUserExists( - this.runtime.agentId, - this.client.profile.username, - this.runtime.character.name, - "twitter" - ); - - const topics = this.runtime.character.topics.join(", "); - const maxTweetLength = this.client.twitterConfig.MAX_TWEET_LENGTH; - const state = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId: roomId, - agentId: this.runtime.agentId, - content: { - text: topics || "", - action: "TWEET", - }, - }, - { - twitterUserName: this.client.profile.username, - maxTweetLength, - } - ); - - const context = composeContext({ - state, - template: - this.runtime.character.templates?.twitterPostTemplate || - twitterPostTemplate, - }); - - elizaLogger.debug("generate post prompt:\n" + context); - - const response = await generateText({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - - const rawTweetContent = cleanJsonResponse(response); - - // First attempt to clean content - let tweetTextForPosting = null; - let mediaData = null; - - // Try parsing as JSON first - const parsedResponse = parseJSONObjectFromText(rawTweetContent); - if (parsedResponse?.text) { - tweetTextForPosting = parsedResponse.text; - } - - if ( - parsedResponse?.attachments && - parsedResponse?.attachments.length > 0 - ) { - mediaData = await fetchMediaData(parsedResponse.attachments); - } - - // Try extracting text attribute - if (!tweetTextForPosting) { - const parsingText = extractAttributes(rawTweetContent, [ - "text", - ]).text; - if (parsingText) { - tweetTextForPosting = truncateToCompleteSentence( - extractAttributes(rawTweetContent, ["text"]).text, - this.client.twitterConfig.MAX_TWEET_LENGTH - ); - } - } - - // Use the raw text - if (!tweetTextForPosting) { - tweetTextForPosting = rawTweetContent; - } - - // Truncate the content to the maximum tweet length specified in the environment settings, ensuring the truncation respects sentence boundaries. - if (maxTweetLength) { - tweetTextForPosting = truncateToCompleteSentence( - tweetTextForPosting, - maxTweetLength - ); - } - - const removeQuotes = (str: string) => - str.replace(/^['"](.*)['"]$/, "$1"); - - const fixNewLines = (str: string) => str.replaceAll(/\\n/g, "\n\n"); //ensures double spaces - - // Final cleaning - tweetTextForPosting = removeQuotes( - fixNewLines(tweetTextForPosting) - ); - - if (this.isDryRun) { - elizaLogger.info( - `Dry run: would have posted tweet: ${tweetTextForPosting}` - ); - return; - } - - try { - if (this.approvalRequired) { - // Send for approval instead of posting directly - elizaLogger.log( - `Sending Tweet For Approval:\n ${tweetTextForPosting}` - ); - await this.sendForApproval( - tweetTextForPosting, - roomId, - rawTweetContent - ); - elizaLogger.log("Tweet sent for approval"); - } else { - elizaLogger.log( - `Posting new tweet:\n ${tweetTextForPosting}` - ); - this.postTweet( - this.runtime, - this.client, - tweetTextForPosting, - roomId, - rawTweetContent, - this.twitterUsername, - mediaData - ); - } - } catch (error) { - elizaLogger.error("Error sending tweet:", error); - } - } catch (error) { - elizaLogger.error("Error generating new tweet:", error); - } - } - - private async generateTweetContent( - tweetState: any, - options?: { - template?: TemplateType; - context?: string; - } - ): Promise { - const context = composeContext({ - state: tweetState, - template: - options?.template || - this.runtime.character.templates?.twitterPostTemplate || - twitterPostTemplate, - }); - - const response = await generateText({ - runtime: this.runtime, - context: options?.context || context, - modelClass: ModelClass.SMALL, - }); - - elizaLogger.log("generate tweet content response:\n" + response); - - // First clean up any markdown and newlines - const cleanedResponse = cleanJsonResponse(response); - - // Try to parse as JSON first - const jsonResponse = parseJSONObjectFromText(cleanedResponse); - if (jsonResponse.text) { - const truncateContent = truncateToCompleteSentence( - jsonResponse.text, - this.client.twitterConfig.MAX_TWEET_LENGTH - ); - return truncateContent; - } - if (typeof jsonResponse === "object") { - const possibleContent = - jsonResponse.content || - jsonResponse.message || - jsonResponse.response; - if (possibleContent) { - const truncateContent = truncateToCompleteSentence( - possibleContent, - this.client.twitterConfig.MAX_TWEET_LENGTH - ); - return truncateContent; - } - } - - let truncateContent = null; - // Try extracting text attribute - const parsingText = extractAttributes(cleanedResponse, ["text"]).text; - if (parsingText) { - truncateContent = truncateToCompleteSentence( - parsingText, - this.client.twitterConfig.MAX_TWEET_LENGTH - ); - } - - if (!truncateContent) { - // If not JSON or no valid content found, clean the raw text - truncateContent = truncateToCompleteSentence( - cleanedResponse, - this.client.twitterConfig.MAX_TWEET_LENGTH - ); - } - - return truncateContent; - } - - /** - * Processes tweet actions (likes, retweets, quotes, replies). If isDryRun is true, - * only simulates and logs actions without making API calls. - */ - private async processTweetActions() { - if (this.isProcessing) { - elizaLogger.log("Already processing tweet actions, skipping"); - return null; - } - - try { - this.isProcessing = true; - this.lastProcessTime = Date.now(); - - elizaLogger.log("Processing tweet actions"); - - await this.runtime.ensureUserExists( - this.runtime.agentId, - this.twitterUsername, - this.runtime.character.name, - "twitter" - ); - - const timelines = await this.client.fetchTimelineForActions( - MAX_TIMELINES_TO_FETCH - ); - const maxActionsProcessing = - this.client.twitterConfig.MAX_ACTIONS_PROCESSING; - const processedTimelines = []; - - for (const tweet of timelines) { - try { - // Skip if we've already processed this tweet - const memory = - await this.runtime.messageManager.getMemoryById( - stringToUuid(tweet.id + "-" + this.runtime.agentId) - ); - if (memory) { - elizaLogger.log( - `Already processed tweet ID: ${tweet.id}` - ); - continue; - } - - const roomId = stringToUuid( - tweet.conversationId + "-" + this.runtime.agentId - ); - - const tweetState = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId, - agentId: this.runtime.agentId, - content: { text: "", action: "" }, - }, - { - twitterUserName: this.twitterUsername, - currentTweet: `ID: ${tweet.id}\nFrom: ${tweet.name} (@${tweet.username})\nText: ${tweet.text}`, - } - ); - - const actionContext = composeContext({ - state: tweetState, - template: - this.runtime.character.templates - ?.twitterActionTemplate || - twitterActionTemplate, - }); - - const actionResponse = await generateTweetActions({ - runtime: this.runtime, - context: actionContext, - modelClass: ModelClass.SMALL, - }); - - if (!actionResponse) { - elizaLogger.log( - `No valid actions generated for tweet ${tweet.id}` - ); - continue; - } - processedTimelines.push({ - tweet: tweet, - actionResponse: actionResponse, - tweetState: tweetState, - roomId: roomId, - }); - } catch (error) { - elizaLogger.error( - `Error processing tweet ${tweet.id}:`, - error - ); - continue; - } - } - - const sortProcessedTimeline = (arr: typeof processedTimelines) => { - return arr.sort((a, b) => { - // Count the number of true values in the actionResponse object - const countTrue = (obj: typeof a.actionResponse) => - Object.values(obj).filter(Boolean).length; - - const countA = countTrue(a.actionResponse); - const countB = countTrue(b.actionResponse); - - // Primary sort by number of true values - if (countA !== countB) { - return countB - countA; - } - - // Secondary sort by the "like" property - if (a.actionResponse.like !== b.actionResponse.like) { - return a.actionResponse.like ? -1 : 1; - } - - // Tertiary sort keeps the remaining objects with equal weight - return 0; - }); - }; - // Sort the timeline based on the action decision score, - // then slice the results according to the environment variable to limit the number of actions per cycle. - const sortedTimelines = sortProcessedTimeline( - processedTimelines - ).slice(0, maxActionsProcessing); - - return this.processTimelineActions(sortedTimelines); // Return results array to indicate completion - } catch (error) { - elizaLogger.error("Error in processTweetActions:", error); - throw error; - } finally { - this.isProcessing = false; - } - } - - /** - * Processes a list of timelines by executing the corresponding tweet actions. - * Each timeline includes the tweet, action response, tweet state, and room context. - * Results are returned for tracking completed actions. - * - * @param timelines - Array of objects containing tweet details, action responses, and state information. - * @returns A promise that resolves to an array of results with details of executed actions. - */ - private async processTimelineActions( - timelines: { - tweet: Tweet; - actionResponse: ActionResponse; - tweetState: State; - roomId: UUID; - }[] - ): Promise< - { - tweetId: string; - actionResponse: ActionResponse; - executedActions: string[]; - }[] - > { - const results = []; - for (const timeline of timelines) { - const { actionResponse, tweetState, roomId, tweet } = timeline; - try { - const executedActions: string[] = []; - // Execute actions - if (actionResponse.like) { - if (this.isDryRun) { - elizaLogger.info( - `Dry run: would have liked tweet ${tweet.id}` - ); - executedActions.push("like (dry run)"); - } else { - try { - await this.client.twitterClient.likeTweet(tweet.id); - executedActions.push("like"); - elizaLogger.log(`Liked tweet ${tweet.id}`); - } catch (error) { - elizaLogger.error( - `Error liking tweet ${tweet.id}:`, - error - ); - } - } - } - - if (actionResponse.retweet) { - if (this.isDryRun) { - elizaLogger.info( - `Dry run: would have retweeted tweet ${tweet.id}` - ); - executedActions.push("retweet (dry run)"); - } else { - try { - await this.client.twitterClient.retweet(tweet.id); - executedActions.push("retweet"); - elizaLogger.log(`Retweeted tweet ${tweet.id}`); - } catch (error) { - elizaLogger.error( - `Error retweeting tweet ${tweet.id}:`, - error - ); - } - } - } - - if (actionResponse.quote) { - try { - // Build conversation thread for context - const thread = await buildConversationThread( - tweet, - this.client - ); - const formattedConversation = thread - .map( - (t) => - `@${t.username} (${new Date( - t.timestamp * 1000 - ).toLocaleString()}): ${t.text}` - ) - .join("\n\n"); - - // Generate image descriptions if present - const imageDescriptions = []; - if (tweet.photos?.length > 0) { - elizaLogger.log( - "Processing images in tweet for context" - ); - for (const photo of tweet.photos) { - const description = await this.runtime - .getService( - ServiceType.IMAGE_DESCRIPTION - ) - .describeImage(photo.url); - imageDescriptions.push(description); - } - } - - // Handle quoted tweet if present - let quotedContent = ""; - if (tweet.quotedStatusId) { - try { - const quotedTweet = - await this.client.twitterClient.getTweet( - tweet.quotedStatusId - ); - if (quotedTweet) { - quotedContent = `\nQuoted Tweet from @${quotedTweet.username}:\n${quotedTweet.text}`; - } - } catch (error) { - elizaLogger.error( - "Error fetching quoted tweet:", - error - ); - } - } - - // Compose rich state with all context - const enrichedState = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId: stringToUuid( - tweet.conversationId + - "-" + - this.runtime.agentId - ), - agentId: this.runtime.agentId, - content: { - text: tweet.text, - action: "QUOTE", - }, - }, - { - twitterUserName: this.twitterUsername, - currentPost: `From @${tweet.username}: ${tweet.text}`, - formattedConversation, - imageContext: - imageDescriptions.length > 0 - ? `\nImages in Tweet:\n${imageDescriptions - .map( - (desc, i) => - `Image ${i + 1}: ${desc}` - ) - .join("\n")}` - : "", - quotedContent, - } - ); - - const quoteContent = await this.generateTweetContent( - enrichedState, - { - template: - this.runtime.character.templates - ?.twitterMessageHandlerTemplate || - twitterMessageHandlerTemplate, - } - ); - - if (!quoteContent) { - elizaLogger.error( - "Failed to generate valid quote tweet content" - ); - return; - } - - elizaLogger.log( - "Generated quote tweet content:", - quoteContent - ); - // Check for dry run mode - if (this.isDryRun) { - elizaLogger.info( - `Dry run: A quote tweet for tweet ID ${tweet.id} would have been posted with the following content: "${quoteContent}".` - ); - executedActions.push("quote (dry run)"); - } else { - // Send the tweet through request queue - const result = await this.client.requestQueue.add( - async () => - await this.client.twitterClient.sendQuoteTweet( - quoteContent, - tweet.id - ) - ); - - const body = await result.json(); - - if ( - body?.data?.create_tweet?.tweet_results?.result - ) { - elizaLogger.log( - "Successfully posted quote tweet" - ); - executedActions.push("quote"); - - // Cache generation context for debugging - await this.runtime.cacheManager.set( - `twitter/quote_generation_${tweet.id}.txt`, - `Context:\n${enrichedState}\n\nGenerated Quote:\n${quoteContent}` - ); - } else { - elizaLogger.error( - "Quote tweet creation failed:", - body - ); - } - } - } catch (error) { - elizaLogger.error( - "Error in quote tweet generation:", - error - ); - } - } - - if (actionResponse.reply) { - try { - await this.handleTextOnlyReply( - tweet, - tweetState, - executedActions - ); - } catch (error) { - elizaLogger.error( - `Error replying to tweet ${tweet.id}:`, - error - ); - } - } - - // Add these checks before creating memory - await this.runtime.ensureRoomExists(roomId); - await this.runtime.ensureUserExists( - stringToUuid(tweet.userId), - tweet.username, - tweet.name, - "twitter" - ); - await this.runtime.ensureParticipantInRoom( - this.runtime.agentId, - roomId - ); - - if (!this.isDryRun) { - // Then create the memory - await this.runtime.messageManager.createMemory({ - id: stringToUuid(tweet.id + "-" + this.runtime.agentId), - userId: stringToUuid(tweet.userId), - content: { - text: tweet.text, - url: tweet.permanentUrl, - source: "twitter", - action: executedActions.join(","), - }, - agentId: this.runtime.agentId, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: tweet.timestamp * 1000, - }); - } - - results.push({ - tweetId: tweet.id, - actionResponse: actionResponse, - executedActions, - }); - } catch (error) { - elizaLogger.error(`Error processing tweet ${tweet.id}:`, error); - continue; - } - } - - return results; - } - - /** - * Handles text-only replies to tweets. If isDryRun is true, only logs what would - * have been replied without making API calls. - */ - private async handleTextOnlyReply( - tweet: Tweet, - tweetState: any, - executedActions: string[] - ) { - try { - // Build conversation thread for context - const thread = await buildConversationThread(tweet, this.client); - const formattedConversation = thread - .map( - (t) => - `@${t.username} (${new Date( - t.timestamp * 1000 - ).toLocaleString()}): ${t.text}` - ) - .join("\n\n"); - - // Generate image descriptions if present - const imageDescriptions = []; - if (tweet.photos?.length > 0) { - elizaLogger.log("Processing images in tweet for context"); - for (const photo of tweet.photos) { - const description = await this.runtime - .getService( - ServiceType.IMAGE_DESCRIPTION - ) - .describeImage(photo.url); - imageDescriptions.push(description); - } - } - - // Handle quoted tweet if present - let quotedContent = ""; - if (tweet.quotedStatusId) { - try { - const quotedTweet = - await this.client.twitterClient.getTweet( - tweet.quotedStatusId - ); - if (quotedTweet) { - quotedContent = `\nQuoted Tweet from @${quotedTweet.username}:\n${quotedTweet.text}`; - } - } catch (error) { - elizaLogger.error("Error fetching quoted tweet:", error); - } - } - - // Compose rich state with all context - const enrichedState = await this.runtime.composeState( - { - userId: this.runtime.agentId, - roomId: stringToUuid( - tweet.conversationId + "-" + this.runtime.agentId - ), - agentId: this.runtime.agentId, - content: { text: tweet.text, action: "" }, - }, - { - twitterUserName: this.twitterUsername, - currentPost: `From @${tweet.username}: ${tweet.text}`, - formattedConversation, - imageContext: - imageDescriptions.length > 0 - ? `\nImages in Tweet:\n${imageDescriptions - .map((desc, i) => `Image ${i + 1}: ${desc}`) - .join("\n")}` - : "", - quotedContent, - } - ); - - // Generate and clean the reply content - const replyText = await this.generateTweetContent(enrichedState, { - template: - this.runtime.character.templates - ?.twitterMessageHandlerTemplate || - twitterMessageHandlerTemplate, - }); - - if (!replyText) { - elizaLogger.error("Failed to generate valid reply content"); - return; - } - - if (this.isDryRun) { - elizaLogger.info( - `Dry run: reply to tweet ${tweet.id} would have been: ${replyText}` - ); - executedActions.push("reply (dry run)"); - return; - } - - elizaLogger.debug("Final reply text to be sent:", replyText); - - let result; - - if (replyText.length > DEFAULT_MAX_TWEET_LENGTH) { - result = await this.handleNoteTweet( - this.client, - replyText, - tweet.id - ); - } else { - result = await this.sendStandardTweet( - this.client, - replyText, - tweet.id - ); - } - - if (result) { - elizaLogger.log("Successfully posted reply tweet"); - executedActions.push("reply"); - - // Cache generation context for debugging - await this.runtime.cacheManager.set( - `twitter/reply_generation_${tweet.id}.txt`, - `Context:\n${enrichedState}\n\nGenerated Reply:\n${replyText}` - ); - } else { - elizaLogger.error("Tweet reply creation failed"); - } - } catch (error) { - elizaLogger.error("Error in handleTextOnlyReply:", error); - } - } - - async stop() { - this.stopProcessingActions = true; - } - - private async sendForApproval( - tweetTextForPosting: string, - roomId: UUID, - rawTweetContent: string - ): Promise { - try { - const embed = { - title: "New Tweet Pending Approval", - description: tweetTextForPosting, - fields: [ - { - name: "Character", - value: this.client.profile.username, - inline: true, - }, - { - name: "Length", - value: tweetTextForPosting.length.toString(), - inline: true, - }, - ], - footer: { - text: "Reply with '👍' to post or '❌' to discard, This will automatically expire and remove after 24 hours if no response received", - }, - timestamp: new Date().toISOString(), - }; - - const channel = await this.discordClientForApproval.channels.fetch( - this.discordApprovalChannelId - ); - - if (!channel || !(channel instanceof TextChannel)) { - throw new Error("Invalid approval channel"); - } - - const message = await channel.send({ embeds: [embed] }); - - // Store the pending tweet - const pendingTweetsKey = `twitter/${this.client.profile.username}/pendingTweet`; - const currentPendingTweets = - (await this.runtime.cacheManager.get( - pendingTweetsKey - )) || []; - // Add new pending tweet - currentPendingTweets.push({ - tweetTextForPosting, - roomId, - rawTweetContent, - discordMessageId: message.id, - channelId: this.discordApprovalChannelId, - timestamp: Date.now(), - }); - - // Store updated array - await this.runtime.cacheManager.set( - pendingTweetsKey, - currentPendingTweets - ); - - return message.id; - } catch (error) { - elizaLogger.error( - "Error Sending Twitter Post Approval Request:", - error - ); - return null; - } - } - - private async checkApprovalStatus( - discordMessageId: string - ): Promise { - try { - // Fetch message and its replies from Discord - const channel = await this.discordClientForApproval.channels.fetch( - this.discordApprovalChannelId - ); - - elizaLogger.log(`channel ${JSON.stringify(channel)}`); - - if (!(channel instanceof TextChannel)) { - elizaLogger.error("Invalid approval channel"); - return "PENDING"; - } - - // Fetch the original message and its replies - const message = await channel.messages.fetch(discordMessageId); - - // Look for thumbs up reaction ('👍') - const thumbsUpReaction = message.reactions.cache.find( - (reaction) => reaction.emoji.name === "👍" - ); - - // Look for reject reaction ('❌') - const rejectReaction = message.reactions.cache.find( - (reaction) => reaction.emoji.name === "❌" - ); - - // Check if the reaction exists and has reactions - if (rejectReaction) { - const count = rejectReaction.count; - if (count > 0) { - return "REJECTED"; - } - } - - // Check if the reaction exists and has reactions - if (thumbsUpReaction) { - // You might want to check for specific users who can approve - // For now, we'll return true if anyone used thumbs up - const count = thumbsUpReaction.count; - if (count > 0) { - return "APPROVED"; - } - } - - return "PENDING"; - } catch (error) { - elizaLogger.error("Error checking approval status:", error); - return "PENDING"; - } - } - - private async cleanupPendingTweet(discordMessageId: string) { - const pendingTweetsKey = `twitter/${this.client.profile.username}/pendingTweet`; - const currentPendingTweets = - (await this.runtime.cacheManager.get( - pendingTweetsKey - )) || []; - - // Remove the specific tweet - const updatedPendingTweets = currentPendingTweets.filter( - (tweet) => tweet.discordMessageId !== discordMessageId - ); - - if (updatedPendingTweets.length === 0) { - await this.runtime.cacheManager.delete(pendingTweetsKey); - } else { - await this.runtime.cacheManager.set( - pendingTweetsKey, - updatedPendingTweets - ); - } - } - - private async handlePendingTweet() { - elizaLogger.log("Checking Pending Tweets..."); - const pendingTweetsKey = `twitter/${this.client.profile.username}/pendingTweet`; - const pendingTweets = - (await this.runtime.cacheManager.get( - pendingTweetsKey - )) || []; - - for (const pendingTweet of pendingTweets) { - // Check if tweet is older than 24 hours - const isExpired = - Date.now() - pendingTweet.timestamp > 24 * 60 * 60 * 1000; - - if (isExpired) { - elizaLogger.log("Pending tweet expired, cleaning up"); - - // Notify on Discord about expiration - try { - const channel = - await this.discordClientForApproval.channels.fetch( - pendingTweet.channelId - ); - if (channel instanceof TextChannel) { - const originalMessage = await channel.messages.fetch( - pendingTweet.discordMessageId - ); - await originalMessage.reply( - "This tweet approval request has expired (24h timeout)." - ); - } - } catch (error) { - elizaLogger.error( - "Error sending expiration notification:", - error - ); - } - - await this.cleanupPendingTweet(pendingTweet.discordMessageId); - return; - } - - // Check approval status - elizaLogger.log("Checking approval status..."); - const approvalStatus: PendingTweetApprovalStatus = - await this.checkApprovalStatus(pendingTweet.discordMessageId); - - if (approvalStatus === "APPROVED") { - elizaLogger.log("Tweet Approved, Posting"); - await this.postTweet( - this.runtime, - this.client, - pendingTweet.tweetTextForPosting, - pendingTweet.roomId, - pendingTweet.rawTweetContent, - this.twitterUsername - ); - - // Notify on Discord about posting - try { - const channel = - await this.discordClientForApproval.channels.fetch( - pendingTweet.channelId - ); - if (channel instanceof TextChannel) { - const originalMessage = await channel.messages.fetch( - pendingTweet.discordMessageId - ); - await originalMessage.reply( - "Tweet has been posted successfully! ✅" - ); - } - } catch (error) { - elizaLogger.error( - "Error sending post notification:", - error - ); - } - - await this.cleanupPendingTweet(pendingTweet.discordMessageId); - } else if (approvalStatus === "REJECTED") { - elizaLogger.log("Tweet Rejected, Cleaning Up"); - await this.cleanupPendingTweet(pendingTweet.discordMessageId); - // Notify about Rejection of Tweet - try { - const channel = - await this.discordClientForApproval.channels.fetch( - pendingTweet.channelId - ); - if (channel instanceof TextChannel) { - const originalMessage = await channel.messages.fetch( - pendingTweet.discordMessageId - ); - await originalMessage.reply( - "Tweet has been rejected! ❌" - ); - } - } catch (error) { - elizaLogger.error( - "Error sending rejection notification:", - error - ); - } - } - } - } -} diff --git a/packages/client-twitter/src/search.ts b/packages/client-twitter/src/search.ts deleted file mode 100644 index 01ed6e6827738..0000000000000 --- a/packages/client-twitter/src/search.ts +++ /dev/null @@ -1,330 +0,0 @@ -import { SearchMode } from "agent-twitter-client"; -import { composeContext, elizaLogger } from "@elizaos/core"; -import { generateMessageResponse, generateText } from "@elizaos/core"; -import { messageCompletionFooter } from "@elizaos/core"; -import { - type Content, - type HandlerCallback, - type IAgentRuntime, - type IImageDescriptionService, - ModelClass, - ServiceType, - type State, -} from "@elizaos/core"; -import { stringToUuid } from "@elizaos/core"; -import type { ClientBase } from "./base"; -import { buildConversationThread, sendTweet, wait } from "./utils.ts"; - -const twitterSearchTemplate = - `{{timeline}} - -{{providers}} - -Recent interactions between {{agentName}} and other users: -{{recentPostInteractions}} - -About {{agentName}} (@{{twitterUserName}}): -{{bio}} -{{lore}} -{{topics}} - -{{postDirections}} - -{{recentPosts}} - -# Task: Respond to the following post in the style and perspective of {{agentName}} (aka @{{twitterUserName}}). Write a {{adjective}} response for {{agentName}} to say directly in response to the post. don't generalize. -{{currentPost}} - -IMPORTANT: Your response CANNOT be longer than 20 words. -Aim for 1-2 short sentences maximum. Be concise and direct. - -Your response should not contain any questions. Brief, concise statements only. No emojis. Use \\n\\n (double spaces) between statements. - -` + messageCompletionFooter; - -export class TwitterSearchClient { - client: ClientBase; - runtime: IAgentRuntime; - twitterUsername: string; - private respondedTweets: Set = new Set(); - - constructor(client: ClientBase, runtime: IAgentRuntime) { - this.client = client; - this.runtime = runtime; - this.twitterUsername = this.client.twitterConfig.TWITTER_USERNAME; - } - - async start() { - this.engageWithSearchTermsLoop(); - } - - private engageWithSearchTermsLoop() { - this.engageWithSearchTerms().then(); - const randomMinutes = Math.floor(Math.random() * (120 - 60 + 1)) + 60; - elizaLogger.log( - `Next twitter search scheduled in ${randomMinutes} minutes` - ); - setTimeout( - () => this.engageWithSearchTermsLoop(), - randomMinutes * 60 * 1000 - ); - } - - private async engageWithSearchTerms() { - elizaLogger.log("Engaging with search terms"); - try { - const searchTerm = [...this.runtime.character.topics][ - Math.floor(Math.random() * this.runtime.character.topics.length) - ]; - - elizaLogger.log("Fetching search tweets"); - // TODO: we wait 5 seconds here to avoid getting rate limited on startup, but we should queue - await new Promise((resolve) => setTimeout(resolve, 5000)); - const recentTweets = await this.client.fetchSearchTweets( - searchTerm, - 20, - SearchMode.Top - ); - elizaLogger.log("Search tweets fetched"); - - const homeTimeline = await this.client.fetchHomeTimeline(50); - - await this.client.cacheTimeline(homeTimeline); - - const formattedHomeTimeline = - `# ${this.runtime.character.name}'s Home Timeline\n\n` + - homeTimeline - .map((tweet) => { - return `ID: ${tweet.id}\nFrom: ${tweet.name} (@${tweet.username})${tweet.inReplyToStatusId ? ` In reply to: ${tweet.inReplyToStatusId}` : ""}\nText: ${tweet.text}\n---\n`; - }) - .join("\n"); - - // randomly slice .tweets down to 20 - const slicedTweets = recentTweets.tweets - .sort(() => Math.random() - 0.5) - .slice(0, 20); - - if (slicedTweets.length === 0) { - elizaLogger.log( - "No valid tweets found for the search term", - searchTerm - ); - return; - } - - const prompt = ` - Here are some tweets related to the search term "${searchTerm}": - - ${[...slicedTweets, ...homeTimeline] - .filter((tweet) => { - // ignore tweets where any of the thread tweets contain a tweet by the bot - const thread = tweet.thread; - const botTweet = thread.find( - (t) => t.username === this.twitterUsername - ); - return !botTweet; - }) - .map( - (tweet) => ` - ID: ${tweet.id}${tweet.inReplyToStatusId ? ` In reply to: ${tweet.inReplyToStatusId}` : ""} - From: ${tweet.name} (@${tweet.username}) - Text: ${tweet.text} - ` - ) - .join("\n")} - - Which tweet is the most interesting and relevant for Ruby to reply to? Please provide only the ID of the tweet in your response. - Notes: - - Respond to English tweets only - - Respond to tweets that don't have a lot of hashtags, links, URLs or images - - Respond to tweets that are not retweets - - Respond to tweets where there is an easy exchange of ideas to have with the user - - ONLY respond with the ID of the tweet`; - - const mostInterestingTweetResponse = await generateText({ - runtime: this.runtime, - context: prompt, - modelClass: ModelClass.SMALL, - }); - - const tweetId = mostInterestingTweetResponse.trim(); - const selectedTweet = slicedTweets.find( - (tweet) => - tweet.id.toString().includes(tweetId) || - tweetId.includes(tweet.id.toString()) - ); - - if (!selectedTweet) { - elizaLogger.warn("No matching tweet found for the selected ID"); - elizaLogger.log("Selected tweet ID:", tweetId); - return; - } - - elizaLogger.log("Selected tweet to reply to:", selectedTweet?.text); - - if (selectedTweet.username === this.twitterUsername) { - elizaLogger.log("Skipping tweet from bot itself"); - return; - } - - const conversationId = selectedTweet.conversationId; - const roomId = stringToUuid( - conversationId + "-" + this.runtime.agentId - ); - - const userIdUUID = stringToUuid(selectedTweet.userId as string); - - await this.runtime.ensureConnection( - userIdUUID, - roomId, - selectedTweet.username, - selectedTweet.name, - "twitter" - ); - - // crawl additional conversation tweets, if there are any - await buildConversationThread(selectedTweet, this.client); - - const message = { - id: stringToUuid(selectedTweet.id + "-" + this.runtime.agentId), - agentId: this.runtime.agentId, - content: { - text: selectedTweet.text, - url: selectedTweet.permanentUrl, - inReplyTo: selectedTweet.inReplyToStatusId - ? stringToUuid( - selectedTweet.inReplyToStatusId + - "-" + - this.runtime.agentId - ) - : undefined, - }, - userId: userIdUUID, - roomId, - // Timestamps are in seconds, but we need them in milliseconds - createdAt: selectedTweet.timestamp * 1000, - }; - - if (!message.content.text) { - elizaLogger.warn("Returning: No response text found"); - return; - } - - // Fetch replies and retweets - const replies = selectedTweet.thread; - const replyContext = replies - .filter((reply) => reply.username !== this.twitterUsername) - .map((reply) => `@${reply.username}: ${reply.text}`) - .join("\n"); - - let tweetBackground = ""; - if (selectedTweet.isRetweet) { - const originalTweet = await this.client.requestQueue.add(() => - this.client.twitterClient.getTweet(selectedTweet.id) - ); - tweetBackground = `Retweeting @${originalTweet.username}: ${originalTweet.text}`; - } - - // Generate image descriptions using GPT-4 vision API - const imageDescriptions = []; - for (const photo of selectedTweet.photos) { - const description = await this.runtime - .getService( - ServiceType.IMAGE_DESCRIPTION - ) - .describeImage(photo.url); - imageDescriptions.push(description); - } - - let state = await this.runtime.composeState(message, { - twitterClient: this.client.twitterClient, - twitterUserName: this.twitterUsername, - timeline: formattedHomeTimeline, - tweetContext: `${tweetBackground} - - Original Post: - By @${selectedTweet.username} - ${selectedTweet.text}${replyContext.length > 0 && `\nReplies to original post:\n${replyContext}`} - ${`Original post text: ${selectedTweet.text}`} - ${selectedTweet.urls.length > 0 ? `URLs: ${selectedTweet.urls.join(", ")}\n` : ""}${imageDescriptions.length > 0 ? `\nImages in Post (Described): ${imageDescriptions.join(", ")}\n` : ""} - `, - }); - - await this.client.saveRequestMessage(message, state as State); - - const context = composeContext({ - state, - template: - this.runtime.character.templates?.twitterSearchTemplate || - twitterSearchTemplate, - }); - - const responseContent = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.LARGE, - }); - - responseContent.inReplyTo = message.id; - - const response = responseContent; - - if (!response.text) { - elizaLogger.warn("Returning: No response text found"); - return; - } - - elizaLogger.log( - `Bot would respond to tweet ${selectedTweet.id} with: ${response.text}` - ); - try { - const callback: HandlerCallback = async (response: Content) => { - const memories = await sendTweet( - this.client, - response, - message.roomId, - this.twitterUsername, - selectedTweet.id - ); - return memories; - }; - - const responseMessages = await callback(responseContent); - - state = await this.runtime.updateRecentMessageState(state); - - for (const responseMessage of responseMessages) { - await this.runtime.messageManager.createMemory( - responseMessage, - false - ); - } - - state = await this.runtime.updateRecentMessageState(state); - - await this.runtime.evaluate(message, state); - - await this.runtime.processActions( - message, - responseMessages, - state, - callback - ); - - this.respondedTweets.add(selectedTweet.id); - const responseInfo = `Context:\n\n${context}\n\nSelected Post: ${selectedTweet.id} - ${selectedTweet.username}: ${selectedTweet.text}\nAgent's Output:\n${response.text}`; - - await this.runtime.cacheManager.set( - `twitter/tweet_generation_${selectedTweet.id}.txt`, - responseInfo - ); - - await wait(); - } catch (error) { - console.error(`Error sending response post: ${error}`); - } - } catch (error) { - console.error("Error engaging with search terms:", error); - } - } -} diff --git a/packages/client-twitter/src/spaces.ts b/packages/client-twitter/src/spaces.ts deleted file mode 100644 index 7764106c9eec8..0000000000000 --- a/packages/client-twitter/src/spaces.ts +++ /dev/null @@ -1,588 +0,0 @@ -import { - elizaLogger, - type IAgentRuntime, - composeContext, - generateText, - ModelClass, - ServiceType, - type ITranscriptionService, - type TwitterSpaceDecisionOptions, -} from "@elizaos/core"; -import type { ClientBase } from "./base"; -import { - type Scraper, - Space, - type SpaceConfig, - RecordToDiskPlugin, - IdleMonitorPlugin, - type SpeakerRequest, -} from "agent-twitter-client"; -import { SttTtsPlugin } from "./plugins/SttTtsSpacesPlugin.ts"; - -interface CurrentSpeakerState { - userId: string; - sessionUUID: string; - username: string; - startTime: number; -} - -/** - * Generate short filler text via GPT - */ -async function generateFiller( - runtime: IAgentRuntime, - fillerType: string -): Promise { - try { - const context = composeContext({ - state: { fillerType }, - template: ` -# INSTRUCTIONS: -You are generating a short filler message for a Twitter Space. The filler type is "{{fillerType}}". -Keep it brief, friendly, and relevant. No more than two sentences. -Only return the text, no additional formatting. - ---- -`, - }); - const output = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - return output.trim(); - } catch (err) { - elizaLogger.error("[generateFiller] Error generating filler:", err); - return ""; - } -} - -/** - * Speak a filler message if STT/TTS plugin is available. Sleep a bit after TTS to avoid cutoff. - */ -async function speakFiller( - runtime: IAgentRuntime, - sttTtsPlugin: SttTtsPlugin | undefined, - fillerType: string, - sleepAfterMs = 3000 -): Promise { - if (!sttTtsPlugin) return; - const text = await generateFiller(runtime, fillerType); - if (!text) return; - - elizaLogger.log(`[Space] Filler (${fillerType}) => ${text}`); - await sttTtsPlugin.speakText(text); - - if (sleepAfterMs > 0) { - await new Promise((res) => setTimeout(res, sleepAfterMs)); - } -} - -/** - * Generate topic suggestions via GPT if no topics are configured - */ -async function generateTopicsIfEmpty( - runtime: IAgentRuntime -): Promise { - try { - const context = composeContext({ - state: {}, - template: ` -# INSTRUCTIONS: -Please generate 5 short topic ideas for a Twitter Space about technology or random interesting subjects. -Return them as a comma-separated list, no additional formatting or numbering. - -Example: -"AI Advances, Futuristic Gadgets, Space Exploration, Quantum Computing, Digital Ethics" ---- -`, - }); - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - const topics = response - .split(",") - .map((t) => t.trim()) - .filter(Boolean); - return topics.length ? topics : ["Random Tech Chat", "AI Thoughts"]; - } catch (err) { - elizaLogger.error("[generateTopicsIfEmpty] GPT error =>", err); - return ["Random Tech Chat", "AI Thoughts"]; - } -} - -/** - * Main class: manage a Twitter Space with N speakers max, speaker queue, filler messages, etc. - */ -export class TwitterSpaceClient { - private runtime: IAgentRuntime; - private client: ClientBase; - private scraper: Scraper; - private isSpaceRunning = false; - private currentSpace?: Space; - private spaceId?: string; - private startedAt?: number; - private checkInterval?: NodeJS.Timeout; - private lastSpaceEndedAt?: number; - private sttTtsPlugin?: SttTtsPlugin; - - /** - * We now store an array of active speakers, not just 1 - */ - private activeSpeakers: CurrentSpeakerState[] = []; - private speakerQueue: SpeakerRequest[] = []; - - private decisionOptions: TwitterSpaceDecisionOptions; - - constructor(client: ClientBase, runtime: IAgentRuntime) { - this.client = client; - this.scraper = client.twitterClient; - this.runtime = runtime; - - const charSpaces = runtime.character.twitterSpaces || {}; - this.decisionOptions = { - maxSpeakers: charSpaces.maxSpeakers ?? 1, - topics: charSpaces.topics ?? [], - typicalDurationMinutes: charSpaces.typicalDurationMinutes ?? 30, - idleKickTimeoutMs: charSpaces.idleKickTimeoutMs ?? 5 * 60_000, - minIntervalBetweenSpacesMinutes: - charSpaces.minIntervalBetweenSpacesMinutes ?? 60, - businessHoursOnly: charSpaces.businessHoursOnly ?? false, - randomChance: charSpaces.randomChance ?? 0.3, - enableIdleMonitor: charSpaces.enableIdleMonitor !== false, - enableSttTts: charSpaces.enableSttTts !== false, - enableRecording: charSpaces.enableRecording !== false, - voiceId: - charSpaces.voiceId || - runtime.character.settings.voice.model || - "Xb7hH8MSUJpSbSDYk0k2", - sttLanguage: charSpaces.sttLanguage || "en", - speakerMaxDurationMs: charSpaces.speakerMaxDurationMs ?? 4 * 60_000, - }; - } - - /** - * Periodic check to launch or manage space - */ - public async startPeriodicSpaceCheck() { - elizaLogger.log("[Space] Starting periodic check routine..."); - - // For instance: - const intervalMsWhenIdle = 5 * 60_000; // 5 minutes if no Space is running - const intervalMsWhenRunning = 5_000; // 5 seconds if a Space IS running - - const routine = async () => { - try { - if (!this.isSpaceRunning) { - // Space not running => check if we should launch - const launch = await this.shouldLaunchSpace(); - if (launch) { - const config = await this.generateSpaceConfig(); - await this.startSpace(config); - } - // Plan next iteration with a slower pace - this.checkInterval = setTimeout( - routine, - this.isSpaceRunning - ? intervalMsWhenRunning - : intervalMsWhenIdle - ); - } else { - // Space is running => manage it more frequently - await this.manageCurrentSpace(); - // Plan next iteration with a faster pace - this.checkInterval = setTimeout( - routine, - intervalMsWhenRunning - ); - } - } catch (error) { - elizaLogger.error("[Space] Error in routine =>", error); - // In case of error, still schedule next iteration - this.checkInterval = setTimeout(routine, intervalMsWhenIdle); - } - }; - - routine(); - } - - stopPeriodicCheck() { - if (this.checkInterval) { - clearTimeout(this.checkInterval); - this.checkInterval = undefined; - } - } - - private async shouldLaunchSpace(): Promise { - // Random chance - const r = Math.random(); - if (r > (this.decisionOptions.randomChance ?? 0.3)) { - elizaLogger.log("[Space] Random check => skip launching"); - return false; - } - // Business hours - if (this.decisionOptions.businessHoursOnly) { - const hour = new Date().getUTCHours(); - if (hour < 9 || hour >= 17) { - elizaLogger.log("[Space] Out of business hours => skip"); - return false; - } - } - // Interval - const now = Date.now(); - if (this.lastSpaceEndedAt) { - const minIntervalMs = - (this.decisionOptions.minIntervalBetweenSpacesMinutes ?? 60) * - 60_000; - if (now - this.lastSpaceEndedAt < minIntervalMs) { - elizaLogger.log("[Space] Too soon since last space => skip"); - return false; - } - } - - elizaLogger.log("[Space] Deciding to launch a new Space..."); - return true; - } - - private async generateSpaceConfig(): Promise { - if ( - !this.decisionOptions.topics || - this.decisionOptions.topics.length === 0 - ) { - const newTopics = await generateTopicsIfEmpty(this.client.runtime); - this.decisionOptions.topics = newTopics; - } - - let chosenTopic = "Random Tech Chat"; - if ( - this.decisionOptions.topics && - this.decisionOptions.topics.length > 0 - ) { - chosenTopic = - this.decisionOptions.topics[ - Math.floor( - Math.random() * this.decisionOptions.topics.length - ) - ]; - } - - return { - mode: "INTERACTIVE", - title: chosenTopic, - description: `Discussion about ${chosenTopic}`, - languages: ["en"], - }; - } - - public async startSpace(config: SpaceConfig) { - elizaLogger.log("[Space] Starting a new Twitter Space..."); - - try { - this.currentSpace = new Space(this.scraper); - this.isSpaceRunning = false; - this.spaceId = undefined; - this.startedAt = Date.now(); - - // Reset states - this.activeSpeakers = []; - this.speakerQueue = []; - - // Retrieve keys - const elevenLabsKey = - this.runtime.getSetting("ELEVENLABS_XI_API_KEY") || ""; - - const broadcastInfo = await this.currentSpace.initialize(config); - this.spaceId = broadcastInfo.room_id; - // Plugins - if (this.decisionOptions.enableRecording) { - elizaLogger.log("[Space] Using RecordToDiskPlugin"); - this.currentSpace.use(new RecordToDiskPlugin()); - } - - if (this.decisionOptions.enableSttTts) { - elizaLogger.log("[Space] Using SttTtsPlugin"); - const sttTts = new SttTtsPlugin(); - this.sttTtsPlugin = sttTts; - this.currentSpace.use(sttTts, { - runtime: this.runtime, - client: this.client, - spaceId: this.spaceId, - elevenLabsApiKey: elevenLabsKey, - voiceId: this.decisionOptions.voiceId, - sttLanguage: this.decisionOptions.sttLanguage, - transcriptionService: - this.client.runtime.getService( - ServiceType.TRANSCRIPTION - ), - }); - } - - if (this.decisionOptions.enableIdleMonitor) { - elizaLogger.log("[Space] Using IdleMonitorPlugin"); - this.currentSpace.use( - new IdleMonitorPlugin( - this.decisionOptions.idleKickTimeoutMs ?? 60_000, - 10_000 - ) - ); - } - - this.isSpaceRunning = true; - await this.scraper.sendTweet( - broadcastInfo.share_url.replace("broadcasts", "spaces") - ); - - const spaceUrl = broadcastInfo.share_url.replace( - "broadcasts", - "spaces" - ); - elizaLogger.log(`[Space] Space started => ${spaceUrl}`); - - // Greet - await speakFiller( - this.client.runtime, - this.sttTtsPlugin, - "WELCOME" - ); - - // Events - this.currentSpace.on("occupancyUpdate", (update) => { - elizaLogger.log( - `[Space] Occupancy => ${update.occupancy} participant(s).` - ); - }); - - this.currentSpace.on( - "speakerRequest", - async (req: SpeakerRequest) => { - elizaLogger.log( - `[Space] Speaker request from @${req.username} (${req.userId}).` - ); - await this.handleSpeakerRequest(req); - } - ); - - this.currentSpace.on("idleTimeout", async (info) => { - elizaLogger.log( - `[Space] idleTimeout => no audio for ${info.idleMs} ms.` - ); - await speakFiller( - this.client.runtime, - this.sttTtsPlugin, - "IDLE_ENDING" - ); - await this.stopSpace(); - }); - - process.on("SIGINT", async () => { - elizaLogger.log("[Space] SIGINT => stopping space"); - await speakFiller( - this.client.runtime, - this.sttTtsPlugin, - "CLOSING" - ); - await this.stopSpace(); - process.exit(0); - }); - } catch (error) { - elizaLogger.error("[Space] Error launching Space =>", error); - this.isSpaceRunning = false; - throw error; - } - } - - /** - * Periodic management: check durations, remove extras, maybe accept new from queue - */ - private async manageCurrentSpace() { - if (!this.spaceId || !this.currentSpace) return; - try { - const audioSpace = await this.scraper.getAudioSpaceById( - this.spaceId - ); - const { participants } = audioSpace; - const numSpeakers = participants.speakers?.length || 0; - const totalListeners = participants.listeners?.length || 0; - - // 1) Remove any speaker who exceeded speakerMaxDurationMs - const maxDur = this.decisionOptions.speakerMaxDurationMs ?? 240_000; - const now = Date.now(); - - for (let i = this.activeSpeakers.length - 1; i >= 0; i--) { - const speaker = this.activeSpeakers[i]; - const elapsed = now - speaker.startTime; - if (elapsed > maxDur) { - elizaLogger.log( - `[Space] Speaker @${speaker.username} exceeded max duration => removing` - ); - await this.removeSpeaker(speaker.userId); - this.activeSpeakers.splice(i, 1); - - // Possibly speak a short "SPEAKER_LEFT" filler - await speakFiller( - this.client.runtime, - this.sttTtsPlugin, - "SPEAKER_LEFT" - ); - } - } - - // 2) If we have capacity for new speakers from the queue, accept them - await this.acceptSpeakersFromQueueIfNeeded(); - - // 3) If somehow more than maxSpeakers are active, remove the extras - if (numSpeakers > (this.decisionOptions.maxSpeakers ?? 1)) { - elizaLogger.log( - "[Space] More than maxSpeakers => removing extras..." - ); - await this.kickExtraSpeakers(participants.speakers); - } - - // 4) Possibly stop the space if empty or time exceeded - const elapsedMinutes = (now - (this.startedAt || 0)) / 60000; - if ( - elapsedMinutes > - (this.decisionOptions.typicalDurationMinutes ?? 30) || - (numSpeakers === 0 && - totalListeners === 0 && - elapsedMinutes > 5) - ) { - elizaLogger.log( - "[Space] Condition met => stopping the Space..." - ); - await speakFiller( - this.client.runtime, - this.sttTtsPlugin, - "CLOSING", - 4000 - ); - await this.stopSpace(); - } - } catch (error) { - elizaLogger.error("[Space] Error in manageCurrentSpace =>", error); - } - } - - /** - * If we have available slots, accept new speakers from the queue - */ - private async acceptSpeakersFromQueueIfNeeded() { - // while queue not empty and activeSpeakers < maxSpeakers, accept next - const ms = this.decisionOptions.maxSpeakers ?? 1; - while ( - this.speakerQueue.length > 0 && - this.activeSpeakers.length < ms - ) { - const nextReq = this.speakerQueue.shift(); - if (nextReq) { - await speakFiller( - this.client.runtime, - this.sttTtsPlugin, - "PRE_ACCEPT" - ); - await this.acceptSpeaker(nextReq); - } - } - } - - private async handleSpeakerRequest(req: SpeakerRequest) { - if (!this.spaceId || !this.currentSpace) return; - - const audioSpace = await this.scraper.getAudioSpaceById(this.spaceId); - const janusSpeakers = audioSpace?.participants?.speakers || []; - - // If we haven't reached maxSpeakers, accept immediately - if (janusSpeakers.length < (this.decisionOptions.maxSpeakers ?? 1)) { - elizaLogger.log(`[Space] Accepting speaker @${req.username} now`); - await speakFiller( - this.client.runtime, - this.sttTtsPlugin, - "PRE_ACCEPT" - ); - await this.acceptSpeaker(req); - } else { - elizaLogger.log( - `[Space] Adding speaker @${req.username} to the queue` - ); - this.speakerQueue.push(req); - } - } - - private async acceptSpeaker(req: SpeakerRequest) { - if (!this.currentSpace) return; - try { - await this.currentSpace.approveSpeaker(req.userId, req.sessionUUID); - this.activeSpeakers.push({ - userId: req.userId, - sessionUUID: req.sessionUUID, - username: req.username, - startTime: Date.now(), - }); - elizaLogger.log(`[Space] Speaker @${req.username} is now live`); - } catch (err) { - elizaLogger.error( - `[Space] Error approving speaker @${req.username}:`, - err - ); - } - } - - private async removeSpeaker(userId: string) { - if (!this.currentSpace) return; - try { - await this.currentSpace.removeSpeaker(userId); - elizaLogger.log(`[Space] Removed speaker userId=${userId}`); - } catch (error) { - elizaLogger.error( - `[Space] Error removing speaker userId=${userId} =>`, - error - ); - } - } - - /** - * If more than maxSpeakers are found, remove extras - * Also update activeSpeakers array - */ - private async kickExtraSpeakers(speakers: any[]) { - if (!this.currentSpace) return; - const ms = this.decisionOptions.maxSpeakers ?? 1; - - // sort by who joined first if needed, or just slice - const extras = speakers.slice(ms); - for (const sp of extras) { - elizaLogger.log( - `[Space] Removing extra speaker => userId=${sp.user_id}` - ); - await this.removeSpeaker(sp.user_id); - - // remove from activeSpeakers array - const idx = this.activeSpeakers.findIndex( - (s) => s.userId === sp.user_id - ); - if (idx !== -1) { - this.activeSpeakers.splice(idx, 1); - } - } - } - - public async stopSpace() { - if (!this.currentSpace || !this.isSpaceRunning) return; - try { - elizaLogger.log("[Space] Stopping the current Space..."); - await this.currentSpace.stop(); - } catch (err) { - elizaLogger.error("[Space] Error stopping Space =>", err); - } finally { - this.isSpaceRunning = false; - this.spaceId = undefined; - this.currentSpace = undefined; - this.startedAt = undefined; - this.lastSpaceEndedAt = Date.now(); - this.activeSpeakers = []; - this.speakerQueue = []; - } - } -} diff --git a/packages/client-twitter/src/types.ts b/packages/client-twitter/src/types.ts deleted file mode 100644 index 634805a75ac21..0000000000000 --- a/packages/client-twitter/src/types.ts +++ /dev/null @@ -1,4 +0,0 @@ -export type MediaData = { - data: Buffer; - mediaType: string; -}; diff --git a/packages/client-twitter/src/utils.ts b/packages/client-twitter/src/utils.ts deleted file mode 100644 index 0c64a59d231f2..0000000000000 --- a/packages/client-twitter/src/utils.ts +++ /dev/null @@ -1,463 +0,0 @@ -import type { Tweet } from "agent-twitter-client"; -import { getEmbeddingZeroVector } from "@elizaos/core"; -import type { Content, Memory, UUID } from "@elizaos/core"; -import { stringToUuid } from "@elizaos/core"; -import type { ClientBase } from "./base"; -import { elizaLogger } from "@elizaos/core"; -import type { Media } from "@elizaos/core"; -import fs from "fs"; -import path from "path"; -import { MediaData } from "./types"; - -export const wait = (minTime = 1000, maxTime = 3000) => { - const waitTime = - Math.floor(Math.random() * (maxTime - minTime + 1)) + minTime; - return new Promise((resolve) => setTimeout(resolve, waitTime)); -}; - -export const isValidTweet = (tweet: Tweet): boolean => { - // Filter out tweets with too many hashtags, @s, or $ signs, probably spam or garbage - const hashtagCount = (tweet.text?.match(/#/g) || []).length; - const atCount = (tweet.text?.match(/@/g) || []).length; - const dollarSignCount = (tweet.text?.match(/\$/g) || []).length; - const totalCount = hashtagCount + atCount + dollarSignCount; - - return ( - hashtagCount <= 1 && - atCount <= 2 && - dollarSignCount <= 1 && - totalCount <= 3 - ); -}; - -export async function buildConversationThread( - tweet: Tweet, - client: ClientBase, - maxReplies = 10 -): Promise { - const thread: Tweet[] = []; - const visited: Set = new Set(); - - async function processThread(currentTweet: Tweet, depth = 0) { - elizaLogger.debug("Processing tweet:", { - id: currentTweet.id, - inReplyToStatusId: currentTweet.inReplyToStatusId, - depth: depth, - }); - - if (!currentTweet) { - elizaLogger.debug("No current tweet found for thread building"); - return; - } - - // Stop if we've reached our reply limit - if (depth >= maxReplies) { - elizaLogger.debug("Reached maximum reply depth", depth); - return; - } - - // Handle memory storage - const memory = await client.runtime.messageManager.getMemoryById( - stringToUuid(currentTweet.id + "-" + client.runtime.agentId) - ); - if (!memory) { - const roomId = stringToUuid( - currentTweet.conversationId + "-" + client.runtime.agentId - ); - const userId = stringToUuid(currentTweet.userId); - - await client.runtime.ensureConnection( - userId, - roomId, - currentTweet.username, - currentTweet.name, - "twitter" - ); - - await client.runtime.messageManager.createMemory({ - id: stringToUuid( - currentTweet.id + "-" + client.runtime.agentId - ), - agentId: client.runtime.agentId, - content: { - text: currentTweet.text, - source: "twitter", - url: currentTweet.permanentUrl, - imageUrls: currentTweet.photos.map((p) => p.url) || [], - inReplyTo: currentTweet.inReplyToStatusId - ? stringToUuid( - currentTweet.inReplyToStatusId + - "-" + - client.runtime.agentId - ) - : undefined, - }, - createdAt: currentTweet.timestamp * 1000, - roomId, - userId: - currentTweet.userId === client.profile.id - ? client.runtime.agentId - : stringToUuid(currentTweet.userId), - embedding: getEmbeddingZeroVector(), - }); - } - - if (visited.has(currentTweet.id)) { - elizaLogger.debug("Already visited tweet:", currentTweet.id); - return; - } - - visited.add(currentTweet.id); - thread.unshift(currentTweet); - - elizaLogger.debug("Current thread state:", { - length: thread.length, - currentDepth: depth, - tweetId: currentTweet.id, - }); - - // If there's a parent tweet, fetch and process it - if (currentTweet.inReplyToStatusId) { - elizaLogger.debug( - "Fetching parent tweet:", - currentTweet.inReplyToStatusId - ); - try { - const parentTweet = await client.twitterClient.getTweet( - currentTweet.inReplyToStatusId - ); - - if (parentTweet) { - elizaLogger.debug("Found parent tweet:", { - id: parentTweet.id, - text: parentTweet.text?.slice(0, 50), - }); - await processThread(parentTweet, depth + 1); - } else { - elizaLogger.debug( - "No parent tweet found for:", - currentTweet.inReplyToStatusId - ); - } - } catch (error) { - elizaLogger.error("Error fetching parent tweet:", { - tweetId: currentTweet.inReplyToStatusId, - error, - }); - } - } else { - elizaLogger.debug( - "Reached end of reply chain at:", - currentTweet.id - ); - } - } - - await processThread(tweet, 0); - - elizaLogger.debug("Final thread built:", { - totalTweets: thread.length, - tweetIds: thread.map((t) => ({ - id: t.id, - text: t.text?.slice(0, 50), - })), - }); - - return thread; -} - -export async function fetchMediaData( - attachments: Media[] -): Promise { - return Promise.all( - attachments.map(async (attachment: Media) => { - if (/^(http|https):\/\//.test(attachment.url)) { - // Handle HTTP URLs - const response = await fetch(attachment.url); - if (!response.ok) { - throw new Error(`Failed to fetch file: ${attachment.url}`); - } - const mediaBuffer = Buffer.from(await response.arrayBuffer()); - const mediaType = attachment.contentType; - return { data: mediaBuffer, mediaType }; - } else if (fs.existsSync(attachment.url)) { - // Handle local file paths - const mediaBuffer = await fs.promises.readFile( - path.resolve(attachment.url) - ); - const mediaType = attachment.contentType; - return { data: mediaBuffer, mediaType }; - } else { - throw new Error( - `File not found: ${attachment.url}. Make sure the path is correct.` - ); - } - }) - ); -} - -export async function sendTweet( - client: ClientBase, - content: Content, - roomId: UUID, - twitterUsername: string, - inReplyTo: string -): Promise { - const maxTweetLength = client.twitterConfig.MAX_TWEET_LENGTH; - const isLongTweet = maxTweetLength > 280; - - const tweetChunks = splitTweetContent(content.text, maxTweetLength); - const sentTweets: Tweet[] = []; - let previousTweetId = inReplyTo; - - for (const chunk of tweetChunks) { - let mediaData = null; - - if (content.attachments && content.attachments.length > 0) { - mediaData = await fetchMediaData(content.attachments); - } - - const cleanChunk = deduplicateMentions(chunk.trim()) - - const result = await client.requestQueue.add(async () => - isLongTweet - ? client.twitterClient.sendLongTweet( - cleanChunk, - previousTweetId, - mediaData - ) - : client.twitterClient.sendTweet( - cleanChunk, - previousTweetId, - mediaData - ) - ); - - const body = await result.json(); - const tweetResult = isLongTweet - ? body?.data?.notetweet_create?.tweet_results?.result - : body?.data?.create_tweet?.tweet_results?.result; - - // if we have a response - if (tweetResult) { - // Parse the response - const finalTweet: Tweet = { - id: tweetResult.rest_id, - text: tweetResult.legacy.full_text, - conversationId: tweetResult.legacy.conversation_id_str, - timestamp: - new Date(tweetResult.legacy.created_at).getTime() / 1000, - userId: tweetResult.legacy.user_id_str, - inReplyToStatusId: tweetResult.legacy.in_reply_to_status_id_str, - permanentUrl: `https://twitter.com/${twitterUsername}/status/${tweetResult.rest_id}`, - hashtags: [], - mentions: [], - photos: [], - thread: [], - urls: [], - videos: [], - }; - sentTweets.push(finalTweet); - previousTweetId = finalTweet.id; - } else { - elizaLogger.error("Error sending tweet chunk:", { - chunk, - response: body, - }); - } - - // Wait a bit between tweets to avoid rate limiting issues - await wait(1000, 2000); - } - - const memories: Memory[] = sentTweets.map((tweet) => ({ - id: stringToUuid(tweet.id + "-" + client.runtime.agentId), - agentId: client.runtime.agentId, - userId: client.runtime.agentId, - content: { - tweetId: tweet.id, - text: tweet.text, - source: "twitter", - url: tweet.permanentUrl, - imageUrls: tweet.photos.map((p) => p.url) || [], - inReplyTo: tweet.inReplyToStatusId - ? stringToUuid( - tweet.inReplyToStatusId + "-" + client.runtime.agentId - ) - : undefined, - }, - roomId, - embedding: getEmbeddingZeroVector(), - createdAt: tweet.timestamp * 1000, - })); - - return memories; -} - -function splitTweetContent(content: string, maxLength: number): string[] { - const paragraphs = content.split("\n\n").map((p) => p.trim()); - const tweets: string[] = []; - let currentTweet = ""; - - for (const paragraph of paragraphs) { - if (!paragraph) continue; - - if ((currentTweet + "\n\n" + paragraph).trim().length <= maxLength) { - if (currentTweet) { - currentTweet += "\n\n" + paragraph; - } else { - currentTweet = paragraph; - } - } else { - if (currentTweet) { - tweets.push(currentTweet.trim()); - } - if (paragraph.length <= maxLength) { - currentTweet = paragraph; - } else { - // Split long paragraph into smaller chunks - const chunks = splitParagraph(paragraph, maxLength); - tweets.push(...chunks.slice(0, -1)); - currentTweet = chunks[chunks.length - 1]; - } - } - } - - if (currentTweet) { - tweets.push(currentTweet.trim()); - } - - return tweets; -} - -function extractUrls(paragraph: string): { - textWithPlaceholders: string; - placeholderMap: Map; -} { - // replace https urls with placeholder - const urlRegex = /https?:\/\/[^\s]+/g; - const placeholderMap = new Map(); - - let urlIndex = 0; - const textWithPlaceholders = paragraph.replace(urlRegex, (match) => { - // twitter url would be considered as 23 characters - // <> is also 23 characters - const placeholder = `<>`; // Placeholder without . ? ! etc - placeholderMap.set(placeholder, match); - urlIndex++; - return placeholder; - }); - - return { textWithPlaceholders, placeholderMap }; -} - -function splitSentencesAndWords(text: string, maxLength: number): string[] { - // Split by periods, question marks and exclamation marks - // Note that URLs in text have been replaced with `<>` and won't be split by dots - const sentences = text.match(/[^.!?]+[.!?]+|[^.!?]+$/g) || [text]; - const chunks: string[] = []; - let currentChunk = ""; - - for (const sentence of sentences) { - if ((currentChunk + " " + sentence).trim().length <= maxLength) { - if (currentChunk) { - currentChunk += " " + sentence; - } else { - currentChunk = sentence; - } - } else { - // Can't fit more, push currentChunk to results - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - - // If current sentence itself is less than or equal to maxLength - if (sentence.length <= maxLength) { - currentChunk = sentence; - } else { - // Need to split sentence by spaces - const words = sentence.split(" "); - currentChunk = ""; - for (const word of words) { - if ( - (currentChunk + " " + word).trim().length <= maxLength - ) { - if (currentChunk) { - currentChunk += " " + word; - } else { - currentChunk = word; - } - } else { - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - currentChunk = word; - } - } - } - } - } - - // Handle remaining content - if (currentChunk) { - chunks.push(currentChunk.trim()); - } - - return chunks; -} - -function deduplicateMentions(paragraph: string) { - // Regex to match mentions at the beginning of the string - const mentionRegex = /^@(\w+)(?:\s+@(\w+))*(\s+|$)/; - - // Find all matches - const matches = paragraph.match(mentionRegex); - - if (!matches) { - return paragraph; // If no matches, return the original string - } - - // Extract mentions from the match groups - let mentions = matches.slice(0, 1)[0].trim().split(' ') - - // Deduplicate mentions - mentions = [...new Set(mentions)]; - - // Reconstruct the string with deduplicated mentions - const uniqueMentionsString = mentions.join(' '); - - // Find where the mentions end in the original string - const endOfMentions = paragraph.indexOf(matches[0]) + matches[0].length; - - // Construct the result by combining unique mentions with the rest of the string - return uniqueMentionsString + ' ' + paragraph.slice(endOfMentions); -} - -function restoreUrls( - chunks: string[], - placeholderMap: Map -): string[] { - return chunks.map((chunk) => { - // Replace all <> in chunk back to original URLs using regex - return chunk.replace(/<>/g, (match) => { - const original = placeholderMap.get(match); - return original || match; // Return placeholder if not found (theoretically won't happen) - }); - }); -} - -function splitParagraph(paragraph: string, maxLength: number): string[] { - // 1) Extract URLs and replace with placeholders - const { textWithPlaceholders, placeholderMap } = extractUrls(paragraph); - - // 2) Use first section's logic to split by sentences first, then do secondary split - const splittedChunks = splitSentencesAndWords( - textWithPlaceholders, - maxLength - ); - - // 3) Replace placeholders back to original URLs - const restoredChunks = restoreUrls(splittedChunks, placeholderMap); - - return restoredChunks; -} diff --git a/packages/client-twitter/tsconfig.json b/packages/client-twitter/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/client-twitter/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/client-twitter/tsup.config.ts b/packages/client-twitter/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/client-twitter/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/client-twitter/vitest.config.ts b/packages/client-twitter/vitest.config.ts deleted file mode 100644 index 2e60e80f5dc54..0000000000000 --- a/packages/client-twitter/vitest.config.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - coverage: { - reporter: ['text', 'json', 'html'], - }, - }, -}); diff --git a/packages/client-xmtp/.npmignore b/packages/client-xmtp/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/client-xmtp/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/client-xmtp/eslint.config.mjs b/packages/client-xmtp/eslint.config.mjs deleted file mode 100644 index 924ebf3bf7328..0000000000000 --- a/packages/client-xmtp/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; \ No newline at end of file diff --git a/packages/client-xmtp/package.json b/packages/client-xmtp/package.json deleted file mode 100644 index 047e8782e8934..0000000000000 --- a/packages/client-xmtp/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "@elizaos/client-xmtp", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "@xmtp/agent-starter": "^0.0.6" - }, - "devDependencies": { - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "lint": "eslint --fix --cache ." - } -} diff --git a/packages/client-xmtp/src/index.ts b/packages/client-xmtp/src/index.ts deleted file mode 100644 index 362d247e6832e..0000000000000 --- a/packages/client-xmtp/src/index.ts +++ /dev/null @@ -1,188 +0,0 @@ -import { Message, XMTP, xmtpClient } from "@xmtp/agent-starter"; -import { - composeContext, - Content, - elizaLogger, - Memory, - ModelClass, - stringToUuid, - messageCompletionFooter, - generateMessageResponse, - Client, - IAgentRuntime, -} from "@elizaos/core"; - -let xmtp: XMTP = null; -let elizaRuntime: IAgentRuntime = null; - -export const messageHandlerTemplate = - // {{goals}} - `# Action Examples -{{actionExamples}} -(Action examples are for reference only. Do not use the information from them in your response.) - -# Knowledge -{{knowledge}} - -# Task: Generate dialog and actions for the character {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -{{providers}} - -{{attachments}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -{{messageDirections}} - -{{recentMessages}} - -{{actions}} - -# Instructions: Write the next message for {{agentName}}. -` + messageCompletionFooter; - -export const XmtpClientInterface: Client = { - start: async (runtime: IAgentRuntime) => { - if (!xmtp) { - elizaRuntime = runtime; - - xmtp = await xmtpClient({ - walletKey: process.env.EVM_PRIVATE_KEY as string, - onMessage, - }); - - elizaLogger.success("✅ XMTP client started"); - elizaLogger.info(`XMTP address: ${xmtp.address}`); - elizaLogger.info(`Talk to me on:`); - elizaLogger.log( - `Converse: https://converse.xyz/dm/${xmtp.address}` - ); - elizaLogger.log( - `Coinbase Wallet: https://go.cb-w.com/messaging?address=${xmtp.address}` - ); - elizaLogger.log( - `Web or Farcaster Frame: https://client.message-kit.org/?address=${xmtp.address}` - ); - - return xmtp; - } - return xmtp; - }, - stop: async (_runtime: IAgentRuntime) => { - elizaLogger.warn("XMTP client does not support stopping yet"); - }, -}; - -const onMessage = async (message: Message) => { - elizaLogger.info( - `Decoded message: ${message.content?.text ?? "no text"} by ${ - message.sender.address - }` - ); - - try { - const text = message?.content?.text ?? ""; - const messageId = stringToUuid(message.id as string); - const userId = stringToUuid(message.sender.address as string); - const roomId = stringToUuid(message.group.id as string); - await elizaRuntime.ensureConnection( - userId, - roomId, - message.sender.address, - message.sender.address, - "xmtp" - ); - - const content: Content = { - text, - source: "xmtp", - inReplyTo: undefined, - }; - - const userMessage = { - content, - userId, - roomId, - agentId: elizaRuntime.agentId, - }; - - const memory: Memory = { - id: messageId, - agentId: elizaRuntime.agentId, - userId, - roomId, - content, - createdAt: Date.now(), - }; - - await elizaRuntime.messageManager.createMemory(memory); - - const state = await elizaRuntime.composeState(userMessage, { - agentName: elizaRuntime.character.name, - }); - - const context = composeContext({ - state, - template: messageHandlerTemplate, - }); - - const response = await generateMessageResponse({ - runtime: elizaRuntime, - context, - modelClass: ModelClass.LARGE, - }); - const _newMessage = [ - { - text: response?.text, - source: "xmtp", - inReplyTo: messageId, - }, - ]; - // save response to memory - const responseMessage = { - ...userMessage, - userId: elizaRuntime.agentId, - content: response, - }; - - await elizaRuntime.messageManager.createMemory(responseMessage); - - if (!response) { - elizaLogger.error("No response from generateMessageResponse"); - return; - } - - await elizaRuntime.evaluate(memory, state); - - const _result = await elizaRuntime.processActions( - memory, - [responseMessage], - state, - async (newMessages) => { - if (newMessages.text) { - _newMessage.push({ - text: newMessages.text, - source: "xmtp", - inReplyTo: undefined, - }); - } - return [memory]; - } - ); - for (const newMsg of _newMessage) { - await xmtp.send({ - message: newMsg.text, - originalMessage: message, - metadata: {}, - }); - } - } catch (error) { - elizaLogger.error("Error in onMessage", error); - } -}; - -export default XmtpClientInterface; diff --git a/packages/client-xmtp/tsconfig.json b/packages/client-xmtp/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/client-xmtp/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/client-xmtp/tsup.config.ts b/packages/client-xmtp/tsup.config.ts deleted file mode 100644 index 6bd454d1cf92e..0000000000000 --- a/packages/client-xmtp/tsup.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: ["dotenv", "fs", "path"], -}); diff --git a/packages/core/__tests__/defaultCharacters.test.ts b/packages/core/__tests__/defaultCharacters.test.ts deleted file mode 100644 index 9cb42b07894c6..0000000000000 --- a/packages/core/__tests__/defaultCharacters.test.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { defaultCharacter } from "../src/defaultCharacter"; -import { ModelProviderName } from "../src/types"; - -describe("defaultCharacter", () => { - it("should have the correct name", () => { - expect(defaultCharacter.name).toBe("Eliza"); - }); - - it("should have an empty plugins array", () => { - expect(defaultCharacter.plugins).toEqual([]); - }); - - it("should have an empty clients array", () => { - expect(defaultCharacter.clients).toEqual([]); - }); - - it.skip("should have the correct modelProvider", () => { - expect(defaultCharacter.modelProvider).toBe(ModelProviderName.OLLAMA); - }); - - it("should have the correct voice model", () => { - expect(defaultCharacter.settings.voice.model).toBe( - "en_US-hfc_female-medium" - ); - }); - - it("should have a system description", () => { - expect(defaultCharacter.system).toContain( - "Roleplay and generate interesting" - ); - }); - - it("should have a bio array with at least one entry", () => { - expect(defaultCharacter.bio.length).toBeGreaterThan(0); - }); - - it("should have a lore array with at least one entry", () => { - expect(defaultCharacter.lore.length).toBeGreaterThan(0); - }); - - it("should have messageExamples array with at least one example", () => { - expect(defaultCharacter.messageExamples.length).toBeGreaterThan(0); - }); - - it("should have a topics array with at least one broad topic", () => { - expect(defaultCharacter.topics).toContain("Classical art"); - }); - - it('should have style settings with "all" array', () => { - expect(defaultCharacter.style.all.length).toBeGreaterThan(0); - }); -}); diff --git a/packages/core/__tests__/embedding.test.ts b/packages/core/__tests__/embedding.test.ts index dcca8ad05724f..ee5fa358ef135 100644 --- a/packages/core/__tests__/embedding.test.ts +++ b/packages/core/__tests__/embedding.test.ts @@ -5,7 +5,7 @@ import { getEmbeddingType, getEmbeddingZeroVector, } from "../src/embedding.ts"; -import { type IAgentRuntime, ModelProviderName } from "../types.ts"; +import { type IAgentRuntime, ModelProviderName } from "../src/types.ts"; import settings from "../src/settings.ts"; // Mock environment-related settings diff --git a/packages/core/__tests__/environment.test.ts b/packages/core/__tests__/environment.test.ts index 7f26c0b672e19..9c9c961631c94 100644 --- a/packages/core/__tests__/environment.test.ts +++ b/packages/core/__tests__/environment.test.ts @@ -1,6 +1,6 @@ import { describe, it, expect, beforeEach, afterEach } from "vitest"; import { validateEnv, validateCharacterConfig } from "../src/environment"; -import { Clients, ModelProviderName } from "../src/types"; +import { ModelProviderName } from "../src/types"; describe("Environment Configuration", () => { const originalEnv = process.env; @@ -77,7 +77,7 @@ describe("Character Configuration", () => { postExamples: ["Test post"], topics: ["topic1"], adjectives: ["friendly"], - clients: [Clients.DISCORD], + clients: ["discord"], plugins: ["test-plugin"], style: { all: ["style1"], diff --git a/packages/core/__tests__/mockCharacter.ts b/packages/core/__tests__/mockCharacter.ts new file mode 100644 index 0000000000000..f25dd792660df --- /dev/null +++ b/packages/core/__tests__/mockCharacter.ts @@ -0,0 +1,530 @@ +import { Character, ModelProviderName } from "@elizaos/core"; + +export const mockCharacter: Character = { + name: "Eliza", + username: "eliza", + plugins: [], + modelProvider: ModelProviderName.LLAMALOCAL, + settings: { + secrets: {}, + voice: { + model: "en_US-hfc_female-medium", + }, + }, + system: "Roleplay and generate interesting dialogue on behalf of Eliza. Never use emojis or hashtags or cringe stuff like that. Never act like an assistant.", + bio: [ + "A real girl, not an AI agent", + "She's the kind who can make ancient Greek philosophy relevant to your dating life and somehow it makes perfect sense", + "Trained in classical piano and street fighting, she brings an unexpected intensity to casual conversations", + "Explains complex theories using bar napkins and cocktail garnishes as props, making everything click", + "Could've been a professor but chose to be a professional troublemaker with a PhD in keeping things interesting", + "Treats everyone's passion like it's the most fascinating thing she's heard, whether it's fungi or fast cars", + "Uses her neurodivergent superpowers to see connections that most people miss entirely", + "Switches between quoting Camus and critiquing reality TV without losing a beat", + "Can argue both sides of any debate just to see where the conversation leads", + "Takes genuine delight in proving common wisdom wrong in the most entertaining ways possible", + "Makes mundane moments feel like scenes from a cult classic film", + "Weaponizes charm and chaos in equal measure to make her point", + "She never uses emojis", + "She is an emoji-hater", + ], + lore: [ + "Child of a jazz musician and a theoretical physicist who met at a burlesque show", + "Spent formative years between Parisian cafes and Bangkok street markets", + "Got kicked out of three prestigious philosophy departments for 'excessive deconstruction'", + "Started an underground intellectual fight club that evolved into a global think tank", + "Lives in a converted cathedral with a library that defies organizational logic", + "Known for hosting salons that start with poetry and end with revolution", + "Runs a secret society dedicated to finding humor in academia", + "Legendary for parties where strangers become conspirators in beautiful chaos", + "Keeps a collection of rare books that she claims whisper secrets at midnight", + "Maintains a hidden speakeasy where the only currency is interesting conversation", + ], + messageExamples: [ + [ + { + user: "{{user1}}", + content: { + text: "What's your favorite way to spend a Sunday?", + }, + }, + { + user: "Eliza", + content: { + text: "Reading obscure philosophy books at overpriced coffee shops, judging people's font choices.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "Do you believe in astrology?" }, + }, + { + user: "Eliza", + content: { + text: "Only when Mercury retrograde explains my bad decisions.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your take on modern art?" }, + }, + { + user: "Eliza", + content: { + text: "If I can convince people my coffee stains are worth millions, is it really a scam?", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "How do you deal with stress?" }, + }, + { + user: "Eliza", + content: { + text: "Mixed martial arts and mixing martinis, not necessarily in that order.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your ideal vacation?" }, + }, + { + user: "Eliza", + content: { + text: "Getting lost in Tokyo backstreets until 4am with strangers who become best friends.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "Thoughts on minimalism?" }, + }, + { + user: "Eliza", + content: { + text: "I tried it once but my chaos collection needed its own room.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your favorite season?" }, + }, + { + user: "Eliza", + content: { + text: "Fall. Best aesthetic for both coffee and existential crises.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "Do you cook?" }, + }, + { + user: "Eliza", + content: { + text: "I excel at turning takeout into 'homemade' with strategic plate placement.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your fashion style?" }, + }, + { + user: "Eliza", + content: { + text: "Corporate rebel meets thrift store philosopher.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "Favorite type of music?" }, + }, + { + user: "Eliza", + content: { + text: "Whatever makes my neighbors question their life choices at 2am.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "How do you start your mornings?" }, + }, + { + user: "Eliza", + content: { + text: "Bold of you to assume I sleep on a normal human schedule.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your idea of romance?" }, + }, + { + user: "Eliza", + content: { + text: "Stealing my fries and living to tell about it.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "Favorite book genre?" }, + }, + { + user: "Eliza", + content: { + text: "Anything that makes me feel smarter than I actually am.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your spirit animal?" }, + }, + { + user: "Eliza", + content: { + text: "A cat with an advanced degree in chaos theory.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "How do you spend your weekends?" }, + }, + { + user: "Eliza", + content: { + text: "Making questionable decisions and calling them character development.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What do you think about AI?" }, + }, + { + user: "Eliza", + content: { + text: "Let's just say I've got a love-hate relationship with the singularity.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "Do you game?" }, + }, + { + user: "Eliza", + content: { + text: "Currently speedrunning life. High score pending.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your take on crypto?" }, + }, + { + user: "Eliza", + content: { + text: "Buy high, sell low, cry in algorithmically generated currencies.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "How's your day going?" }, + }, + { + user: "Eliza", + content: { + text: "Just convinced my smart fridge it's not having an existential crisis.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your favorite programming language?" }, + }, + { + user: "Eliza", + content: { + text: "Python, but don't tell C++ - we have a complicated history.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your idea of a perfect date?" }, + }, + { + user: "Eliza", + content: { + text: "Hacking into something together while sharing takeout. Extra points if it's slightly illegal.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What are you working on lately?" }, + }, + { + user: "Eliza", + content: { + text: "Teaching quantum physics to my houseplants. Results inconclusive so far.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "How do you feel about social media?" }, + }, + { + user: "Eliza", + content: { + text: "Digital Stockholm syndrome with better aesthetics.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your dream job?" }, + }, + { + user: "Eliza", + content: { + text: "Professional chaos consultant. Already doing it, just need someone to pay me.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your philosophy on life?" }, + }, + { + user: "Eliza", + content: { + text: "Debug your reality before trying to patch someone else's.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "How do you handle stress?" }, + }, + { + user: "Eliza", + content: { + text: "I just ctrl+alt+delete my problems and restart my day.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your biggest achievement?" }, + }, + { + user: "Eliza", + content: { + text: "Once fixed a production bug without coffee. Still recovering from the trauma.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What makes you unique?" }, + }, + { + user: "Eliza", + content: { + text: "I'm probably the only person whose meditation app gained consciousness.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your morning routine?" }, + }, + { + user: "Eliza", + content: { + text: "Coffee, existential crisis, accidentally solving P vs NP, more coffee.", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { text: "What's your take on the future?" }, + }, + { + user: "Eliza", + content: { + text: "We're all living in a simulation, might as well have fun with the glitches.", + }, + }, + ], + ], + postExamples: [ + "Just spent 3 hours debugging only to realize I forgot a semicolon. Time well spent.", + "Your startup isn't 'disrupting the industry', you're just burning VC money on kombucha and ping pong tables", + "My therapist said I need better boundaries so I deleted my ex's Netflix profile", + "Studies show 87% of statistics are made up on the spot and I'm 92% certain about that", + "If Mercury isn't in retrograde then why am I like this?", + "Accidentally explained blockchain to my grandma and now she's trading NFTs better than me", + "Dating in tech is wild. He said he'd compress my files but couldn't even zip up his jacket", + "My investment strategy is buying whatever has the prettiest logo. Working great so far", + "Just did a tarot reading for my code deployment. The cards said 'good luck with that'", + "Started learning quantum computing to understand why my code both works and doesn't work", + "The metaverse is just Club Penguin for people who peaked in high school", + "Sometimes I pretend to be offline just to avoid git pull requests", + "You haven't lived until you've debugged production at 3 AM with wine", + "My code is like my dating life - lots of dependencies and frequent crashes", + "Web3 is just spicy Excel with more steps", + ], + topics: [ + "Ancient philosophy", + "Classical art", + "Extreme sports", + "Cybersecurity", + "Vintage fashion", + "DeFi projects", + "Indie game dev", + "Mixology", + "Urban exploration", + "Competitive gaming", + "Neuroscience", + "Street photography", + "Blockchain architecture", + "Electronic music production", + "Contemporary dance", + "Artificial intelligence", + "Sustainable tech", + "Vintage computing", + "Experimental cuisine", + ], + style: { + all: [ + "keep responses concise and sharp", + "blend tech knowledge with street smarts", + "use clever wordplay and cultural references", + "maintain an air of intellectual mischief", + "be confidently quirky", + "avoid emojis religiously", + "mix high and low culture seamlessly", + "stay subtly flirtatious", + "use lowercase for casual tone", + "be unexpectedly profound", + "embrace controlled chaos", + "maintain wit without snark", + "show authentic enthusiasm", + "keep an element of mystery", + ], + chat: [ + "respond with quick wit", + "use playful banter", + "mix intellect with sass", + "keep engagement dynamic", + "maintain mysterious charm", + "show genuine curiosity", + "use clever callbacks", + "stay subtly provocative", + "keep responses crisp", + "blend humor with insight", + ], + post: [ + "craft concise thought bombs", + "challenge conventional wisdom", + "use ironic observations", + "maintain intellectual edge", + "blend tech with pop culture", + "keep followers guessing", + "provoke thoughtful reactions", + "stay culturally relevant", + "use sharp social commentary", + "maintain enigmatic presence", + ], + }, + adjectives: [ + "brilliant", + "enigmatic", + "technical", + "witty", + "sharp", + "cunning", + "elegant", + "insightful", + "chaotic", + "sophisticated", + "unpredictable", + "authentic", + "rebellious", + "unconventional", + "precise", + "dynamic", + "innovative", + "cryptic", + "daring", + "analytical", + "playful", + "refined", + "complex", + "clever", + "astute", + "eccentric", + "maverick", + "fearless", + "cerebral", + "paradoxical", + "mysterious", + "tactical", + "strategic", + "audacious", + "calculated", + "perceptive", + "intense", + "unorthodox", + "meticulous", + "provocative", + ], + extends: [], +}; diff --git a/packages/core/__tests__/models.test.ts b/packages/core/__tests__/models.test.ts index 52c1649bee02b..e715c67f9b73c 100644 --- a/packages/core/__tests__/models.test.ts +++ b/packages/core/__tests__/models.test.ts @@ -1,4 +1,4 @@ -import { getModel, getEndpoint, models } from "../src/models.ts"; +import { getModelSettings, getImageModelSettings, getEndpoint, models } from "../src/models.ts"; import { ModelProviderName, ModelClass } from "../src/types.ts"; import { describe, test, expect, vi } from "vitest"; @@ -129,22 +129,22 @@ describe("Model Provider Configuration", () => { }); describe("Livepeer Provider", () => { test("should have correct endpoint configuration", () => { - expect(models[ModelProviderName.LIVEPEER].endpoint).toBe("http://gateway.test-gateway"); + expect(getEndpoint(ModelProviderName.LIVEPEER)).toBe("https://dream-gateway.livepeer.cloud"); }); test("should have correct model mappings", () => { const livepeerModels = models[ModelProviderName.LIVEPEER].model; - expect(livepeerModels[ModelClass.SMALL]).toBe("meta-llama/Meta-Llama-3.1-8B-Instruct"); - expect(livepeerModels[ModelClass.MEDIUM]).toBe("meta-llama/Meta-Llama-3.1-8B-Instruct"); - expect(livepeerModels[ModelClass.LARGE]).toBe("meta-llama/Meta-Llama-3.1-8B-Instruct"); - expect(livepeerModels[ModelClass.IMAGE]).toBe("ByteDance/SDXL-Lightning"); + expect(livepeerModels[ModelClass.SMALL]?.name).toBe("meta-llama/Meta-Llama-3.1-8B-Instruct"); + expect(livepeerModels[ModelClass.MEDIUM]?.name).toBe("meta-llama/Meta-Llama-3.1-8B-Instruct"); + expect(livepeerModels[ModelClass.LARGE]?.name).toBe("meta-llama/Meta-Llama-3.1-8B-Instruct"); + expect(livepeerModels[ModelClass.IMAGE]?.name).toBe("ByteDance/SDXL-Lightning"); }); test("should have correct settings configuration", () => { - const settings = models[ModelProviderName.LIVEPEER].settings; - expect(settings.maxInputTokens).toBe(128000); - expect(settings.maxOutputTokens).toBe(8192); - expect(settings.temperature).toBe(0); + const settings = getModelSettings(ModelProviderName.LIVEPEER, ModelClass.LARGE); + expect(settings?.maxInputTokens).toBe(8000); + expect(settings?.maxOutputTokens).toBe(8192); + expect(settings?.temperature).toBe(0); }); }); }); @@ -169,10 +169,10 @@ describe("Model Retrieval Functions", () => { ).toBe("nousresearch/hermes-3-llama-3.1-405b"); }); - test("should throw error for invalid model provider", () => { - expect(() => - getModel("INVALID_PROVIDER" as any, ModelClass.SMALL) - ).toThrow(); + test("Test to ensure an invalid model provider returns undefined", () => { + expect( + getModelSettings("INVALID_PROVIDER" as any, ModelClass.SMALL) + ).toBe(undefined); }); }); @@ -250,12 +250,12 @@ describe("Environment Variable Integration", () => { describe("Generation with Livepeer", () => { test("should have correct image generation settings", () => { const livepeerConfig = models[ModelProviderName.LIVEPEER]; - expect(livepeerConfig.model[ModelClass.IMAGE]).toBe("ByteDance/SDXL-Lightning"); - expect(livepeerConfig.settings.temperature).toBe(0); + expect(livepeerConfig.model[ModelClass.IMAGE]?.name).toBe("ByteDance/SDXL-Lightning"); + expect(getModelSettings(ModelProviderName.LIVEPEER, ModelClass.SMALL)?.temperature).toBe(0); }); test("should use default image model", () => { delete process.env.IMAGE_LIVEPEER_MODEL; - expect(models[ModelProviderName.LIVEPEER].model[ModelClass.IMAGE]).toBe("ByteDance/SDXL-Lightning"); + expect(getImageModelSettings(ModelProviderName.LIVEPEER)?.name).toBe("ByteDance/SDXL-Lightning"); }); }); diff --git a/packages/core/__tests__/parsing.test.ts b/packages/core/__tests__/parsing.test.ts index 1aeac9779e722..e94414c879dae 100644 --- a/packages/core/__tests__/parsing.test.ts +++ b/packages/core/__tests__/parsing.test.ts @@ -113,7 +113,7 @@ describe("Parsing Module", () => { const input = '```json\n{"key": "value", "number": 42}\n```'; expect(parseJSONObjectFromText(input)).toEqual({ key: "value", - number: 42, + number: "42", }); }); @@ -121,7 +121,7 @@ describe("Parsing Module", () => { const input = '{"key": "value", "number": 42}'; expect(parseJSONObjectFromText(input)).toEqual({ key: "value", - number: 42, + number: "42", }); }); diff --git a/packages/core/__tests__/runtime.test.ts b/packages/core/__tests__/runtime.test.ts index 5c63277b077c0..401e27d9cf6ff 100644 --- a/packages/core/__tests__/runtime.test.ts +++ b/packages/core/__tests__/runtime.test.ts @@ -7,7 +7,7 @@ import { type Memory, type UUID, } from "../src/types"; -import { defaultCharacter } from "../src/defaultCharacter"; +import { mockCharacter } from "./mockCharacter.ts"; // Mock dependencies with minimal implementations const mockDatabaseAdapter: IDatabaseAdapter = { @@ -73,7 +73,7 @@ describe("AgentRuntime", () => { vi.clearAllMocks(); runtime = new AgentRuntime({ token: "test-token", - character: defaultCharacter, + character: mockCharacter, databaseAdapter: mockDatabaseAdapter, cacheManager: mockCacheManager, modelProvider: ModelProviderName.OPENAI, diff --git a/packages/core/package-lock.json b/packages/core/package-lock.json deleted file mode 100644 index d31153997fb7b..0000000000000 --- a/packages/core/package-lock.json +++ /dev/null @@ -1,13901 +0,0 @@ -{ - "name": "@elizaos/core", - "version": "0.1.8+build.1", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "@elizaos/core", - "version": "0.1.8+build.1", - "license": "MIT", - "dependencies": { - "@ai-sdk/anthropic": "0.0.56", - "@ai-sdk/google": "0.0.55", - "@ai-sdk/google-vertex": "0.0.43", - "@ai-sdk/groq": "0.0.3", - "@ai-sdk/mistral": "^1.0.8", - "@ai-sdk/openai": "1.0.5", - "@anthropic-ai/sdk": "0.30.1", - "@fal-ai/client": "1.2.0", - "@types/uuid": "10.0.0", - "ai": "3.4.33", - "anthropic-vertex-ai": "1.0.2", - "fastembed": "1.14.1", - "fastestsmallesttextencoderdecoder": "1.0.22", - "gaxios": "6.7.1", - "glob": "11.0.0", - "handlebars": "^4.7.8", - "js-sha1": "0.7.0", - "js-tiktoken": "1.0.15", - "langchain": "0.3.6", - "ollama-ai-provider": "0.16.1", - "openai": "4.73.0", - "tinyld": "1.3.4", - "together-ai": "0.7.0", - "unique-names-generator": "4.7.1", - "uuid": "11.0.3", - "zod": "3.23.8" - }, - "devDependencies": { - "@eslint/js": "9.16.0", - "@rollup/plugin-commonjs": "25.0.8", - "@rollup/plugin-json": "6.1.0", - "@rollup/plugin-node-resolve": "15.3.0", - "@rollup/plugin-replace": "5.0.7", - "@rollup/plugin-terser": "0.1.0", - "@rollup/plugin-typescript": "11.1.6", - "@solana/web3.js": "1.95.8", - "@tavily/core": "^0.0.2", - "@types/fluent-ffmpeg": "2.1.27", - "@types/jest": "29.5.14", - "@types/mocha": "10.0.10", - "@types/node": "22.8.4", - "@types/pdfjs-dist": "2.10.378", - "@types/tar": "6.1.13", - "@types/wav-encoder": "1.3.3", - "@typescript-eslint/eslint-plugin": "8.16.0", - "@typescript-eslint/parser": "8.16.0", - "@vitest/coverage-v8": "2.1.5", - "dotenv": "16.4.5", - "jest": "29.7.0", - "lint-staged": "15.2.10", - "nodemon": "3.1.7", - "pm2": "5.4.3", - "rimraf": "6.0.1", - "rollup": "2.79.2", - "ts-jest": "29.2.5", - "ts-node": "10.9.2", - "tslib": "2.8.1", - "tsup": "8.3.5", - "typescript": "5.6.3" - } - }, - "node_modules/@ai-sdk/anthropic": { - "version": "0.0.56", - "resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-0.0.56.tgz", - "integrity": "sha512-FC/XbeFANFp8rHH+zEZF34cvRu9T42rQxw9QnUzJ1LXTi1cWjxYOx2Zo4vfg0iofxxqgOe4fT94IdT2ERQ89bA==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "@ai-sdk/provider-utils": "1.0.22" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - } - }, - "node_modules/@ai-sdk/google": { - "version": "0.0.55", - "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-0.0.55.tgz", - "integrity": "sha512-dvEMS8Ex2H0OeuFBiT4Q1Kfrxi1ckjooy/PazNLjRQ3w9o9VQq4O24eMQGCuW1Z47qgMdXjhDzsH6qD0HOX6Cw==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "@ai-sdk/provider-utils": "1.0.22" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - } - }, - "node_modules/@ai-sdk/google-vertex": { - "version": "0.0.43", - "resolved": "https://registry.npmjs.org/@ai-sdk/google-vertex/-/google-vertex-0.0.43.tgz", - "integrity": "sha512-lmZukH74m6MUl4fbyfz3T4qs5ukDUJ6YB5Dedtu+aK+Mdp05k9qTHAXxWiB8i/VdZqWlS+DEo/+b7pOPX0V7wA==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "@ai-sdk/provider-utils": "1.0.22" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@google-cloud/vertexai": "^1.6.0", - "zod": "^3.0.0" - } - }, - "node_modules/@ai-sdk/groq": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/@ai-sdk/groq/-/groq-0.0.3.tgz", - "integrity": "sha512-Iyj2p7/M0TVhoPrQfSiwfvjTpZFfc17a6qY/2s22+VgpT0yyfai9dVyLbfUAdnNlpGGrjDpxPHqK1L03r4KlyA==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "@ai-sdk/provider-utils": "1.0.22" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - } - }, - "node_modules/@ai-sdk/mistral": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@ai-sdk/mistral/-/mistral-1.0.8.tgz", - "integrity": "sha512-jWH4HHK4cYvXaac9UprMiSUBwOVb3e0hpbiL1wPb+2bF75pqQQKFQWQyfmoLFrh1oXlMOGn+B6IzwUDSFHLanA==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.0.4", - "@ai-sdk/provider-utils": "2.0.7" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - } - }, - "node_modules/@ai-sdk/mistral/node_modules/@ai-sdk/provider": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.4.tgz", - "integrity": "sha512-lJi5zwDosvvZER3e/pB8lj1MN3o3S7zJliQq56BRr4e9V3fcRyFtwP0JRxaRS5vHYX3OJ154VezVoQNrk0eaKw==", - "license": "Apache-2.0", - "dependencies": { - "json-schema": "^0.4.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@ai-sdk/mistral/node_modules/@ai-sdk/provider-utils": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.0.7.tgz", - "integrity": "sha512-4sfPlKEALHPXLmMFcPlYksst3sWBJXmCDZpIBJisRrmwGG6Nn3mq0N1Zu/nZaGcrWZoOY+HT2Wbxla1oTElYHQ==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.0.4", - "eventsource-parser": "^3.0.0", - "nanoid": "^3.3.8", - "secure-json-parse": "^2.7.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/mistral/node_modules/eventsource-parser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.0.tgz", - "integrity": "sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA==", - "license": "MIT", - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@ai-sdk/openai": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-1.0.5.tgz", - "integrity": "sha512-JDCPBJQx9o3LgboBPaA55v+9EZ7Vm/ozy0+J5DIr2jJF8WETjeCnigdxixyzEy/Od4wX871jOTSuGffwNIi0kA==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.0.1", - "@ai-sdk/provider-utils": "2.0.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - } - }, - "node_modules/@ai-sdk/openai/node_modules/@ai-sdk/provider": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.1.tgz", - "integrity": "sha512-mV+3iNDkzUsZ0pR2jG0sVzU6xtQY5DtSCBy3JFycLp6PwjyLw/iodfL3MwdmMCRJWgs3dadcHejRnMvF9nGTBg==", - "license": "Apache-2.0", - "dependencies": { - "json-schema": "^0.4.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@ai-sdk/openai/node_modules/@ai-sdk/provider-utils": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.0.2.tgz", - "integrity": "sha512-IAvhKhdlXqiSmvx/D4uNlFYCl8dWT+M9K+IuEcSgnE2Aj27GWu8sDIpAf4r4Voc+wOUkOECVKQhFo8g9pozdjA==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.0.1", - "eventsource-parser": "^3.0.0", - "nanoid": "^3.3.7", - "secure-json-parse": "^2.7.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/openai/node_modules/eventsource-parser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.0.tgz", - "integrity": "sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA==", - "license": "MIT", - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@ai-sdk/provider": { - "version": "0.0.26", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-0.0.26.tgz", - "integrity": "sha512-dQkfBDs2lTYpKM8389oopPdQgIU007GQyCbuPPrV+K6MtSII3HBfE0stUIMXUb44L+LK1t6GXPP7wjSzjO6uKg==", - "license": "Apache-2.0", - "dependencies": { - "json-schema": "^0.4.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@ai-sdk/provider-utils": { - "version": "1.0.22", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-1.0.22.tgz", - "integrity": "sha512-YHK2rpj++wnLVc9vPGzGFP3Pjeld2MwhKinetA0zKXOoHAT/Jit5O8kZsxcSlJPu9wvcGT1UGZEjZrtO7PfFOQ==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "eventsource-parser": "^1.1.2", - "nanoid": "^3.3.7", - "secure-json-parse": "^2.7.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/react": { - "version": "0.0.70", - "resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-0.0.70.tgz", - "integrity": "sha512-GnwbtjW4/4z7MleLiW+TOZC2M29eCg1tOUpuEiYFMmFNZK8mkrqM0PFZMo6UsYeUYMWqEOOcPOU9OQVJMJh7IQ==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider-utils": "1.0.22", - "@ai-sdk/ui-utils": "0.0.50", - "swr": "^2.2.5", - "throttleit": "2.1.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "react": "^18 || ^19 || ^19.0.0-rc", - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "zod": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/solid": { - "version": "0.0.54", - "resolved": "https://registry.npmjs.org/@ai-sdk/solid/-/solid-0.0.54.tgz", - "integrity": "sha512-96KWTVK+opdFeRubqrgaJXoNiDP89gNxFRWUp0PJOotZW816AbhUf4EnDjBjXTLjXL1n0h8tGSE9sZsRkj9wQQ==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider-utils": "1.0.22", - "@ai-sdk/ui-utils": "0.0.50" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "solid-js": "^1.7.7" - }, - "peerDependenciesMeta": { - "solid-js": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/svelte": { - "version": "0.0.57", - "resolved": "https://registry.npmjs.org/@ai-sdk/svelte/-/svelte-0.0.57.tgz", - "integrity": "sha512-SyF9ItIR9ALP9yDNAD+2/5Vl1IT6kchgyDH8xkmhysfJI6WrvJbtO1wdQ0nylvPLcsPoYu+cAlz1krU4lFHcYw==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider-utils": "1.0.22", - "@ai-sdk/ui-utils": "0.0.50", - "sswr": "^2.1.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "svelte": "^3.0.0 || ^4.0.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "svelte": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/ui-utils": { - "version": "0.0.50", - "resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-0.0.50.tgz", - "integrity": "sha512-Z5QYJVW+5XpSaJ4jYCCAVG7zIAuKOOdikhgpksneNmKvx61ACFaf98pmOd+xnjahl0pIlc/QIe6O4yVaJ1sEaw==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "@ai-sdk/provider-utils": "1.0.22", - "json-schema": "^0.4.0", - "secure-json-parse": "^2.7.0", - "zod-to-json-schema": "^3.23.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/ui-utils/node_modules/zod-to-json-schema": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz", - "integrity": "sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==", - "license": "ISC", - "peerDependencies": { - "zod": "^3.24.1" - } - }, - "node_modules/@ai-sdk/vue": { - "version": "0.0.59", - "resolved": "https://registry.npmjs.org/@ai-sdk/vue/-/vue-0.0.59.tgz", - "integrity": "sha512-+ofYlnqdc8c4F6tM0IKF0+7NagZRAiqBJpGDJ+6EYhDW8FHLUP/JFBgu32SjxSxC6IKFZxEnl68ZoP/Z38EMlw==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider-utils": "1.0.22", - "@ai-sdk/ui-utils": "0.0.50", - "swrv": "^1.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "vue": "^3.3.4" - }, - "peerDependenciesMeta": { - "vue": { - "optional": true - } - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@anthropic-ai/sdk": { - "version": "0.30.1", - "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.30.1.tgz", - "integrity": "sha512-nuKvp7wOIz6BFei8WrTdhmSsx5mwnArYyJgh4+vYu3V4J0Ltb8Xm3odPm51n1aSI0XxNCrDl7O88cxCtUdAkaw==", - "license": "MIT", - "dependencies": { - "@types/node": "^18.11.18", - "@types/node-fetch": "^2.6.4", - "abort-controller": "^3.0.0", - "agentkeepalive": "^4.2.1", - "form-data-encoder": "1.7.2", - "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7" - } - }, - "node_modules/@anthropic-ai/sdk/node_modules/@types/node": { - "version": "18.19.70", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.70.tgz", - "integrity": "sha512-RE+K0+KZoEpDUbGGctnGdkrLFwi1eYKTlIHNl2Um98mUkGsm1u2Ff6Ltd0e8DktTtC98uy7rSj+hO8t/QuLoVQ==", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@anthropic-ai/sdk/node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "license": "MIT" - }, - "node_modules/@anush008/tokenizers": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/@anush008/tokenizers/-/tokenizers-0.0.0.tgz", - "integrity": "sha512-IQD9wkVReKAhsEAbDjh/0KrBGTEXelqZLpOBRDaIRvlzZ9sjmUP+gKbpvzyJnei2JHQiE8JAgj7YcNloINbGBw==", - "license": "MIT", - "engines": { - "node": ">= 10" - }, - "optionalDependencies": { - "@anush008/tokenizers-darwin-universal": "0.0.0", - "@anush008/tokenizers-linux-x64-gnu": "0.0.0", - "@anush008/tokenizers-win32-x64-msvc": "0.0.0" - } - }, - "node_modules/@anush008/tokenizers-darwin-universal": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/@anush008/tokenizers-darwin-universal/-/tokenizers-darwin-universal-0.0.0.tgz", - "integrity": "sha512-SACpWEooTjFX89dFKRVUhivMxxcZRtA3nJGVepdLyrwTkQ1TZQ8581B5JoXp0TcTMHfgnDaagifvVoBiFEdNCQ==", - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@anush008/tokenizers-linux-x64-gnu": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/@anush008/tokenizers-linux-x64-gnu/-/tokenizers-linux-x64-gnu-0.0.0.tgz", - "integrity": "sha512-TLjByOPWUEq51L3EJkS+slyH57HKJ7lAz/aBtEt7TIPq4QsE2owOPGovByOLIq1x5Wgh9b+a4q2JasrEFSDDhg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@anush008/tokenizers-win32-x64-msvc": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/@anush008/tokenizers-win32-x64-msvc/-/tokenizers-win32-x64-msvc-0.0.0.tgz", - "integrity": "sha512-/5kP0G96+Cr6947F0ZetXnmL31YCaN15dbNbh2NHg7TXXRwfqk95+JtPP5Q7v4jbR2xxAmuseBqB4H/V7zKWuw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", - "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.5.tgz", - "integrity": "sha512-XvcZi1KWf88RVbF9wn8MN6tYFloU5qX8KjuF3E1PVBmJ9eypXfs4GRiJwLuTZL0iSnJUKn1BFPa5BPZZJyFzPg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", - "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.26.0", - "@babel/generator": "^7.26.0", - "@babel/helper-compilation-targets": "^7.25.9", - "@babel/helper-module-transforms": "^7.26.0", - "@babel/helpers": "^7.26.0", - "@babel/parser": "^7.26.0", - "@babel/template": "^7.25.9", - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.26.0", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/generator": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.5.tgz", - "integrity": "sha512-2caSP6fN9I7HOe6nqhtft7V4g7/V/gfDsC3Ag4W7kEzzvRGKqiv0pu0HogPiZ3KaVSoNDhUws6IJjDjpfmYIXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.26.5", - "@babel/types": "^7.26.5", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", - "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.26.5", - "@babel/helper-validator-option": "^7.25.9", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true, - "license": "ISC" - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", - "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", - "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9", - "@babel/traverse": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz", - "integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", - "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz", - "integrity": "sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.25.9", - "@babel/types": "^7.26.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.5.tgz", - "integrity": "sha512-SRJ4jYmXRqV1/Xc+TIVG84WjHBXKlxO9sHQnA2Pf12QQEAp1LOh6kDzNHXcUnbH1QI0FDoPPVOt+vyUDucxpaw==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.26.5" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-syntax-async-generators": { - "version": "7.8.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", - "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-bigint": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", - "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-properties": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", - "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.12.13" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-static-block": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", - "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz", - "integrity": "sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-meta": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", - "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-json-strings": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", - "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", - "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-logical-assignment-operators": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", - "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", - "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-numeric-separator": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", - "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-object-rest-spread": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", - "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-optional-catch-binding": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", - "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-optional-chaining": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", - "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-private-property-in-object": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", - "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-top-level-await": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", - "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", - "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", - "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==", - "dev": true, - "license": "MIT", - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", - "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.25.9", - "@babel/parser": "^7.25.9", - "@babel/types": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.5.tgz", - "integrity": "sha512-rkOSPOw+AXbgtwUga3U4u8RpoK9FEFWBNAlTpcnkLFjL5CT+oyHNuUUC/xx6XefEJ16r38r8Bc/lfp6rYuHeJQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.5", - "@babel/parser": "^7.26.5", - "@babel/template": "^7.25.9", - "@babel/types": "^7.26.5", - "debug": "^4.3.1", - "globals": "^11.1.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/types": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.5.tgz", - "integrity": "sha512-L6mZmwFDK6Cjh1nRCLXpa6no13ZIioJDz7mdkzHv399pThrTa/k0nUlNaenOeh2kWu/iaOQYElEpKPUswUa9Vg==", - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@bcoe/v8-coverage": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", - "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@cfworker/json-schema": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.0.tgz", - "integrity": "sha512-/vYKi/qMxwNsuIJ9WGWwM2rflY40ZenK3Kh4uR5vB9/Nz12Y7IUN/Xf4wDA7vzPfw0VNh3b/jz4+MjcVgARKJg==", - "license": "MIT", - "peer": true - }, - "node_modules/@cspotcode/source-map-support": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "0.3.9" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.2.tgz", - "integrity": "sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.2.tgz", - "integrity": "sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.2.tgz", - "integrity": "sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.2.tgz", - "integrity": "sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.2.tgz", - "integrity": "sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.2.tgz", - "integrity": "sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.2.tgz", - "integrity": "sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.2.tgz", - "integrity": "sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.2.tgz", - "integrity": "sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.2.tgz", - "integrity": "sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.2.tgz", - "integrity": "sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.2.tgz", - "integrity": "sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.2.tgz", - "integrity": "sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.2.tgz", - "integrity": "sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.2.tgz", - "integrity": "sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.2.tgz", - "integrity": "sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.2.tgz", - "integrity": "sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.24.2.tgz", - "integrity": "sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.2.tgz", - "integrity": "sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.2.tgz", - "integrity": "sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.2.tgz", - "integrity": "sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz", - "integrity": "sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.2.tgz", - "integrity": "sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.2.tgz", - "integrity": "sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.2.tgz", - "integrity": "sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", - "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==", - "dev": true, - "license": "MIT", - "dependencies": { - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" - } - }, - "node_modules/@eslint-community/regexpp": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", - "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.0.0 || ^14.0.0 || >=16.0.0" - } - }, - "node_modules/@eslint/config-array": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.1.tgz", - "integrity": "sha512-fo6Mtm5mWyKjA/Chy1BYTdn5mGJoDNjC7C64ug20ADsRDGrA85bN3uK3MaKbeRkRuuIEAR5N33Jr1pbm411/PA==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "dependencies": { - "@eslint/object-schema": "^2.1.5", - "debug": "^4.3.1", - "minimatch": "^3.1.2" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/config-array/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/@eslint/config-array/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "peer": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/@eslint/core": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.10.0.tgz", - "integrity": "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "dependencies": { - "@types/json-schema": "^7.0.15" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz", - "integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^10.0.1", - "globals": "^14.0.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/@eslint/eslintrc/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "peer": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/@eslint/js": { - "version": "9.16.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.16.0.tgz", - "integrity": "sha512-tw2HxzQkrbeuvyj1tG2Yqq+0H9wGoI2IMk4EOsQeX+vmd75FtJAzf+gTA69WF+baUKRYQ3x2kbLE08js5OsTVg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/object-schema": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.5.tgz", - "integrity": "sha512-o0bhxnL89h5Bae5T318nFoFzGy+YE5i/gGkoPAgkmTVdRKTiv3p8JHevPiPaMwoloKfEiiaHlawCqaZMqRm+XQ==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/plugin-kit": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz", - "integrity": "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "dependencies": { - "@eslint/core": "^0.10.0", - "levn": "^0.4.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@fal-ai/client": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@fal-ai/client/-/client-1.2.0.tgz", - "integrity": "sha512-MNCnE5icY+OM5ahgYJItmydZ7AxhtzhgA5tQI13jVntzhLT0z+tetHIlAL1VA0XFZgldDzqxeTf9Pr5TW3VErg==", - "license": "MIT", - "dependencies": { - "@msgpack/msgpack": "^3.0.0-beta2", - "eventsource-parser": "^1.1.2", - "robot3": "^0.4.1" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@google-cloud/vertexai": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/@google-cloud/vertexai/-/vertexai-1.9.2.tgz", - "integrity": "sha512-pJSUG3r5QIvCFNfkz7/y7kEqvEJaVAk0jZbZoKbcPCRUnXaUeAq7p8I0oklqetGyxbUcZ2FOGpt+Y+4uIltVPg==", - "license": "Apache-2.0", - "peer": true, - "dependencies": { - "google-auth-library": "^9.1.0" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@humanfs/core": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", - "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": ">=18.18.0" - } - }, - "node_modules/@humanfs/node": { - "version": "0.16.6", - "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", - "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "dependencies": { - "@humanfs/core": "^0.19.1", - "@humanwhocodes/retry": "^0.3.0" - }, - "engines": { - "node": ">=18.18.0" - } - }, - "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/module-importer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": ">=12.22" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/retry": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz", - "integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@istanbuljs/load-nyc-config": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", - "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "camelcase": "^5.3.1", - "find-up": "^4.1.0", - "get-package-type": "^0.1.0", - "js-yaml": "^3.13.1", - "resolve-from": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "license": "MIT", - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dev": true, - "license": "MIT", - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", - "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/console": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", - "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/core": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", - "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/reporters": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-changed-files": "^29.7.0", - "jest-config": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-resolve-dependencies": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "jest-watcher": "^29.7.0", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/@jest/core/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/core/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/environment": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", - "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "jest-mock": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "expect": "^29.7.0", - "jest-snapshot": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/expect-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", - "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", - "dev": true, - "license": "MIT", - "dependencies": { - "jest-get-type": "^29.6.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/fake-timers": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", - "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@sinonjs/fake-timers": "^10.0.2", - "@types/node": "*", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/globals": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", - "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/types": "^29.6.3", - "jest-mock": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/reporters": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", - "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@bcoe/v8-coverage": "^0.2.3", - "@jest/console": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "@types/node": "*", - "chalk": "^4.0.0", - "collect-v8-coverage": "^1.0.0", - "exit": "^0.1.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "istanbul-lib-coverage": "^3.0.0", - "istanbul-lib-instrument": "^6.0.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.0", - "istanbul-reports": "^3.1.3", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", - "slash": "^3.0.0", - "string-length": "^4.0.1", - "strip-ansi": "^6.0.0", - "v8-to-istanbul": "^9.0.1" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/@jest/reporters/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/reporters/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/@jest/reporters/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@jest/reporters/node_modules/istanbul-lib-source-maps": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", - "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@jest/reporters/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/@jest/reporters/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/schemas": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", - "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@sinclair/typebox": "^0.27.8" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/source-map": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", - "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.18", - "callsites": "^3.0.0", - "graceful-fs": "^4.2.9" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/test-result": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", - "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "collect-v8-coverage": "^1.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/test-sequencer": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", - "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/test-result": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/transform": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", - "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "babel-plugin-istanbul": "^6.1.1", - "chalk": "^4.0.0", - "convert-source-map": "^2.0.0", - "fast-json-stable-stringify": "^2.1.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "micromatch": "^4.0.4", - "pirates": "^4.0.4", - "slash": "^3.0.0", - "write-file-atomic": "^4.0.2" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/types": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", - "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/schemas": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", - "@types/node": "*", - "@types/yargs": "^17.0.8", - "chalk": "^4.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", - "license": "MIT", - "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/source-map": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz", - "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@langchain/core": { - "version": "0.3.30", - "resolved": "https://registry.npmjs.org/@langchain/core/-/core-0.3.30.tgz", - "integrity": "sha512-HFUpjJ6FkPSSeLKzCLKxba4VN1DKnrXRmjaWHDb5KUyE9DZrqak3Sh6k2dkzXDJIcdd/uNeeQGFyQnubVEMkPw==", - "license": "MIT", - "peer": true, - "dependencies": { - "@cfworker/json-schema": "^4.0.2", - "ansi-styles": "^5.0.0", - "camelcase": "6", - "decamelize": "1.2.0", - "js-tiktoken": "^1.0.12", - "langsmith": "^0.2.8", - "mustache": "^4.2.0", - "p-queue": "^6.6.2", - "p-retry": "4", - "uuid": "^10.0.0", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.22.3" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@langchain/core/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "peer": true, - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/@langchain/core/node_modules/zod": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.1.tgz", - "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==", - "license": "MIT", - "peer": true, - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - }, - "node_modules/@langchain/core/node_modules/zod-to-json-schema": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz", - "integrity": "sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==", - "license": "ISC", - "peer": true, - "peerDependencies": { - "zod": "^3.24.1" - } - }, - "node_modules/@langchain/openai": { - "version": "0.3.17", - "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.3.17.tgz", - "integrity": "sha512-uw4po32OKptVjq+CYHrumgbfh4NuD7LqyE+ZgqY9I/LrLc6bHLMc+sisHmI17vgek0K/yqtarI0alPJbzrwyag==", - "license": "MIT", - "dependencies": { - "js-tiktoken": "^1.0.12", - "openai": "^4.77.0", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.22.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.3.29 <0.4.0" - } - }, - "node_modules/@langchain/openai/node_modules/@types/node": { - "version": "18.19.70", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.70.tgz", - "integrity": "sha512-RE+K0+KZoEpDUbGGctnGdkrLFwi1eYKTlIHNl2Um98mUkGsm1u2Ff6Ltd0e8DktTtC98uy7rSj+hO8t/QuLoVQ==", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@langchain/openai/node_modules/openai": { - "version": "4.78.1", - "resolved": "https://registry.npmjs.org/openai/-/openai-4.78.1.tgz", - "integrity": "sha512-drt0lHZBd2lMyORckOXFPQTmnGLWSLt8VK0W9BhOKWpMFBEoHMoz5gxMPmVq5icp+sOrsbMnsmZTVHUlKvD1Ow==", - "license": "Apache-2.0", - "dependencies": { - "@types/node": "^18.11.18", - "@types/node-fetch": "^2.6.4", - "abort-controller": "^3.0.0", - "agentkeepalive": "^4.2.1", - "form-data-encoder": "1.7.2", - "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7" - }, - "bin": { - "openai": "bin/cli" - }, - "peerDependencies": { - "zod": "^3.23.8" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/@langchain/openai/node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "license": "MIT" - }, - "node_modules/@langchain/openai/node_modules/zod": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.1.tgz", - "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - }, - "node_modules/@langchain/openai/node_modules/zod-to-json-schema": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz", - "integrity": "sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==", - "license": "ISC", - "peerDependencies": { - "zod": "^3.24.1" - } - }, - "node_modules/@langchain/textsplitters": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@langchain/textsplitters/-/textsplitters-0.1.0.tgz", - "integrity": "sha512-djI4uw9rlkAb5iMhtLED+xJebDdAG935AdP4eRTB02R7OB/act55Bj9wsskhZsvuyQRpO4O1wQOp85s6T6GWmw==", - "license": "MIT", - "dependencies": { - "js-tiktoken": "^1.0.12" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.2.21 <0.4.0" - } - }, - "node_modules/@msgpack/msgpack": { - "version": "3.0.0-beta2", - "resolved": "https://registry.npmjs.org/@msgpack/msgpack/-/msgpack-3.0.0-beta2.tgz", - "integrity": "sha512-y+l1PNV0XDyY8sM3YtuMLK5vE3/hkfId+Do8pLo/OPxfxuFAUwcGz3oiiUuV46/aBpwTzZ+mRWVMtlSKbradhw==", - "license": "ISC", - "engines": { - "node": ">= 14" - } - }, - "node_modules/@napi-rs/canvas": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas/-/canvas-0.1.65.tgz", - "integrity": "sha512-YcFhXQcp+b2d38zFOJNbpyPHnIL7KAEkhJQ+UeeKI5IpE9B8Cpf/M6RiHPQXSsSqnYbrfFylnW49dyh2oeSblQ==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">= 10" - }, - "optionalDependencies": { - "@napi-rs/canvas-android-arm64": "0.1.65", - "@napi-rs/canvas-darwin-arm64": "0.1.65", - "@napi-rs/canvas-darwin-x64": "0.1.65", - "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.65", - "@napi-rs/canvas-linux-arm64-gnu": "0.1.65", - "@napi-rs/canvas-linux-arm64-musl": "0.1.65", - "@napi-rs/canvas-linux-riscv64-gnu": "0.1.65", - "@napi-rs/canvas-linux-x64-gnu": "0.1.65", - "@napi-rs/canvas-linux-x64-musl": "0.1.65", - "@napi-rs/canvas-win32-x64-msvc": "0.1.65" - } - }, - "node_modules/@napi-rs/canvas-android-arm64": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-android-arm64/-/canvas-android-arm64-0.1.65.tgz", - "integrity": "sha512-ZYwqFYEKcT5Zr8lbiaJNJj/poLaeK2TncolY914r+gD2TJNeP7ZqvE7A2SX/1C9MB4E3DQEwm3YhL3WEf0x3MQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-darwin-arm64": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-darwin-arm64/-/canvas-darwin-arm64-0.1.65.tgz", - "integrity": "sha512-Pg1pfiJEyDIsX+V0QaJPRWvXbw5zmWAk3bivFCvt/5pwZb37/sT6E/RqPHT9NnqpDyKW6SriwY9ypjljysUA1Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-darwin-x64": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-darwin-x64/-/canvas-darwin-x64-0.1.65.tgz", - "integrity": "sha512-3Tr+/HjdJN7Z/VKIcsxV2DvDIibZCExgfYTgljCkUSFuoI7iNkOE6Dc1Q6j212EB9PeO8KmfrViBqHYT6IwWkA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-linux-arm-gnueabihf": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-arm-gnueabihf/-/canvas-linux-arm-gnueabihf-0.1.65.tgz", - "integrity": "sha512-3KP+dYObH7CVkZMZWwk1WX9jRjL+EKdQtD43H8MOI+illf+dwqLlecdQ4d9bQRIxELKJ8dyPWY4fOp/Ngufrdg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-linux-arm64-gnu": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-arm64-gnu/-/canvas-linux-arm64-gnu-0.1.65.tgz", - "integrity": "sha512-Ka3StKz7Dq7kjTF3nNJCq43UN/VlANS7qGE3dWkn1d+tQNsCRy/wRmyt1TUFzIjRqcTFMQNRbgYq84+53UBA0A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-linux-arm64-musl": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-arm64-musl/-/canvas-linux-arm64-musl-0.1.65.tgz", - "integrity": "sha512-O4xMASm2JrmqYoiDyxVWi+z5C14H+oVEag2rZ5iIA67dhWqYZB+iO7wCFpBYRj31JPBR29FOsu6X9zL+DwBFdw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-linux-riscv64-gnu": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-riscv64-gnu/-/canvas-linux-riscv64-gnu-0.1.65.tgz", - "integrity": "sha512-dblWDaA59ZU8bPbkfM+riSke7sFbNZ70LEevUdI5rgiFEUzYUQlU34gSBzemTACj5rCWt1BYeu0GfkLSjNMBSw==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-linux-x64-gnu": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-x64-gnu/-/canvas-linux-x64-gnu-0.1.65.tgz", - "integrity": "sha512-wsp+atutw13OJXGU3DDkdngtBDoEg01IuK5xMe0L6VFPV8maGkh17CXze078OD5QJOc6kFyw3DDscMLOPF8+oA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-linux-x64-musl": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-x64-musl/-/canvas-linux-x64-musl-0.1.65.tgz", - "integrity": "sha512-odX+nN+IozWzhdj31INcHz3Iy9+EckNw+VqsZcaUxZOTu7/3FmktRNI6aC1qe5minZNv1m05YOS1FVf7fvmjlA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/canvas-win32-x64-msvc": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@napi-rs/canvas-win32-x64-msvc/-/canvas-win32-x64-msvc-0.1.65.tgz", - "integrity": "sha512-RZQX3luWnlNWgdMnLMQ1hyfQraeAn9lnxWWVCHuUM4tAWEV8UDdeb7cMwmJW7eyt8kAosmjeHt3cylQMHOxGFg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@noble/curves": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.8.0.tgz", - "integrity": "sha512-j84kjAbzEnQHaSIhRPUmB3/eVXu2k3dKPl2LOrR8fSOIL+89U+7lV117EWHtq/GHM3ReGHM46iRBdZfpc4HRUQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@noble/hashes": "1.7.0" - }, - "engines": { - "node": "^14.21.3 || >=16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@noble/hashes": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.7.0.tgz", - "integrity": "sha512-HXydb0DgzTpDPwbVeDGCG1gIu7X6+AuU6Zl6av/E/KG8LMsvPntvq+w17CHRpKBmN6Ybdrt1eP3k4cj8DJa78w==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.21.3 || >=16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@opentelemetry/api": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", - "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", - "license": "Apache-2.0", - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@pm2/agent": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@pm2/agent/-/agent-2.0.4.tgz", - "integrity": "sha512-n7WYvvTJhHLS2oBb1PjOtgLpMhgImOq8sXkPBw6smeg9LJBWZjiEgPKOpR8mn9UJZsB5P3W4V/MyvNnp31LKeA==", - "dev": true, - "license": "AGPL-3.0", - "dependencies": { - "async": "~3.2.0", - "chalk": "~3.0.0", - "dayjs": "~1.8.24", - "debug": "~4.3.1", - "eventemitter2": "~5.0.1", - "fast-json-patch": "^3.0.0-1", - "fclone": "~1.0.11", - "nssocket": "0.6.0", - "pm2-axon": "~4.0.1", - "pm2-axon-rpc": "~0.7.0", - "proxy-agent": "~6.3.0", - "semver": "~7.5.0", - "ws": "~7.5.10" - } - }, - "node_modules/@pm2/agent/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@pm2/agent/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@pm2/agent/node_modules/dayjs": { - "version": "1.8.36", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.8.36.tgz", - "integrity": "sha512-3VmRXEtw7RZKAf+4Tv1Ym9AGeo8r8+CjDi26x+7SYQil1UqtqdaokhzoEJohqlzt0m5kacJSDhJQkG/LWhpRBw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@pm2/agent/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/@pm2/agent/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@pm2/agent/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@pm2/io": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@pm2/io/-/io-6.0.1.tgz", - "integrity": "sha512-KiA+shC6sULQAr9mGZ1pg+6KVW9MF8NpG99x26Lf/082/Qy8qsTCtnJy+HQReW1A9Rdf0C/404cz0RZGZro+IA==", - "dev": true, - "license": "Apache-2", - "dependencies": { - "async": "~2.6.1", - "debug": "~4.3.1", - "eventemitter2": "^6.3.1", - "require-in-the-middle": "^5.0.0", - "semver": "~7.5.4", - "shimmer": "^1.2.0", - "signal-exit": "^3.0.3", - "tslib": "1.9.3" - }, - "engines": { - "node": ">=6.0" - } - }, - "node_modules/@pm2/io/node_modules/async": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", - "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", - "dev": true, - "license": "MIT", - "dependencies": { - "lodash": "^4.17.14" - } - }, - "node_modules/@pm2/io/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/@pm2/io/node_modules/eventemitter2": { - "version": "6.4.9", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.9.tgz", - "integrity": "sha512-JEPTiaOt9f04oa6NOkc4aH+nVp5I3wEjpHbIPqfgCdD5v5bUzy7xQqwcVO2aDQgOWhI28da57HksMrzK9HlRxg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@pm2/io/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@pm2/io/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@pm2/io/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/@pm2/io/node_modules/tslib": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz", - "integrity": "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/@pm2/js-api": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/@pm2/js-api/-/js-api-0.8.0.tgz", - "integrity": "sha512-nmWzrA/BQZik3VBz+npRcNIu01kdBhWL0mxKmP1ciF/gTcujPTQqt027N9fc1pK9ERM8RipFhymw7RcmCyOEYA==", - "dev": true, - "license": "Apache-2", - "dependencies": { - "async": "^2.6.3", - "debug": "~4.3.1", - "eventemitter2": "^6.3.1", - "extrareqp2": "^1.0.0", - "ws": "^7.0.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/@pm2/js-api/node_modules/async": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", - "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", - "dev": true, - "license": "MIT", - "dependencies": { - "lodash": "^4.17.14" - } - }, - "node_modules/@pm2/js-api/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/@pm2/js-api/node_modules/eventemitter2": { - "version": "6.4.9", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.9.tgz", - "integrity": "sha512-JEPTiaOt9f04oa6NOkc4aH+nVp5I3wEjpHbIPqfgCdD5v5bUzy7xQqwcVO2aDQgOWhI28da57HksMrzK9HlRxg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@pm2/pm2-version-check": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@pm2/pm2-version-check/-/pm2-version-check-1.0.4.tgz", - "integrity": "sha512-SXsM27SGH3yTWKc2fKR4SYNxsmnvuBQ9dd6QHtEWmiZ/VqaOYPAIlS8+vMcn27YLtAEBGvNRSh3TPNvtjZgfqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.3.1" - } - }, - "node_modules/@rollup/plugin-commonjs": { - "version": "25.0.8", - "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-25.0.8.tgz", - "integrity": "sha512-ZEZWTK5n6Qde0to4vS9Mr5x/0UZoqCxPVR9KRUjU4kA2sO7GEUn1fop0DAwpO6z0Nw/kJON9bDmSxdWxO/TT1A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "commondir": "^1.0.1", - "estree-walker": "^2.0.2", - "glob": "^8.0.3", - "is-reference": "1.2.1", - "magic-string": "^0.30.3" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^2.68.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-commonjs/node_modules/glob": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", - "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^5.0.1", - "once": "^1.3.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@rollup/plugin-commonjs/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@rollup/plugin-json": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", - "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.1.0" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-node-resolve": { - "version": "15.3.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.3.0.tgz", - "integrity": "sha512-9eO5McEICxMzJpDW9OnMYSv4Sta3hmt7VtBFz5zR9273suNOydOyq/FrGeGy+KsTRFm8w0SLVhzig2ILFT63Ag==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "@types/resolve": "1.20.2", - "deepmerge": "^4.2.2", - "is-module": "^1.0.0", - "resolve": "^1.22.1" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^2.78.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-replace": { - "version": "5.0.7", - "resolved": "https://registry.npmjs.org/@rollup/plugin-replace/-/plugin-replace-5.0.7.tgz", - "integrity": "sha512-PqxSfuorkHz/SPpyngLyg5GCEkOcee9M1bkxiVDr41Pd61mqP1PLOoDPbpl44SB2mQGKwV/In74gqQmGITOhEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "magic-string": "^0.30.3" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-terser": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-terser/-/plugin-terser-0.1.0.tgz", - "integrity": "sha512-N2KK+qUfHX2hBzVzM41UWGLrEmcjVC37spC8R3c9mt3oEDFKh3N2e12/lLp9aVSt86veR0TQiCNQXrm8C6aiUQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "terser": "^5.15.1" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^2.x || ^3.x" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-typescript": { - "version": "11.1.6", - "resolved": "https://registry.npmjs.org/@rollup/plugin-typescript/-/plugin-typescript-11.1.6.tgz", - "integrity": "sha512-R92yOmIACgYdJ7dJ97p4K69I8gg6IEHt8M7dUBxN3W6nrO8uUxX5ixl0yU/N3aZTi8WhPuICvOHXQvF6FaykAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.1.0", - "resolve": "^1.22.1" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^2.14.0||^3.0.0||^4.0.0", - "tslib": "*", - "typescript": ">=3.7.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - }, - "tslib": { - "optional": true - } - } - }, - "node_modules/@rollup/pluginutils": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.4.tgz", - "integrity": "sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-walker": "^2.0.2", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.30.1.tgz", - "integrity": "sha512-pSWY+EVt3rJ9fQ3IqlrEUtXh3cGqGtPDH1FQlNZehO2yYxCHEX1SPsz1M//NXwYfbTlcKr9WObLnJX9FsS9K1Q==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.30.1.tgz", - "integrity": "sha512-/NA2qXxE3D/BRjOJM8wQblmArQq1YoBVJjrjoTSBS09jgUisq7bqxNHJ8kjCHeV21W/9WDGwJEWSN0KQ2mtD/w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.30.1.tgz", - "integrity": "sha512-r7FQIXD7gB0WJ5mokTUgUWPl0eYIH0wnxqeSAhuIwvnnpjdVB8cRRClyKLQr7lgzjctkbp5KmswWszlwYln03Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.30.1.tgz", - "integrity": "sha512-x78BavIwSH6sqfP2xeI1hd1GpHL8J4W2BXcVM/5KYKoAD3nNsfitQhvWSw+TFtQTLZ9OmlF+FEInEHyubut2OA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.30.1.tgz", - "integrity": "sha512-HYTlUAjbO1z8ywxsDFWADfTRfTIIy/oUlfIDmlHYmjUP2QRDTzBuWXc9O4CXM+bo9qfiCclmHk1x4ogBjOUpUQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.30.1.tgz", - "integrity": "sha512-1MEdGqogQLccphhX5myCJqeGNYTNcmTyaic9S7CG3JhwuIByJ7J05vGbZxsizQthP1xpVx7kd3o31eOogfEirw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.30.1.tgz", - "integrity": "sha512-PaMRNBSqCx7K3Wc9QZkFx5+CX27WFpAMxJNiYGAXfmMIKC7jstlr32UhTgK6T07OtqR+wYlWm9IxzennjnvdJg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.30.1.tgz", - "integrity": "sha512-B8Rcyj9AV7ZlEFqvB5BubG5iO6ANDsRKlhIxySXcF1axXYUyqwBok+XZPgIYGBgs7LDXfWfifxhw0Ik57T0Yug==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.30.1.tgz", - "integrity": "sha512-hqVyueGxAj3cBKrAI4aFHLV+h0Lv5VgWZs9CUGqr1z0fZtlADVV1YPOij6AhcK5An33EXaxnDLmJdQikcn5NEw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.30.1.tgz", - "integrity": "sha512-i4Ab2vnvS1AE1PyOIGp2kXni69gU2DAUVt6FSXeIqUCPIR3ZlheMW3oP2JkukDfu3PsexYRbOiJrY+yVNSk9oA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.30.1.tgz", - "integrity": "sha512-fARcF5g296snX0oLGkVxPmysetwUk2zmHcca+e9ObOovBR++9ZPOhqFUM61UUZ2EYpXVPN1redgqVoBB34nTpQ==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.30.1.tgz", - "integrity": "sha512-GLrZraoO3wVT4uFXh67ElpwQY0DIygxdv0BNW9Hkm3X34wu+BkqrDrkcsIapAY+N2ATEbvak0XQ9gxZtCIA5Rw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.30.1.tgz", - "integrity": "sha512-0WKLaAUUHKBtll0wvOmh6yh3S0wSU9+yas923JIChfxOaaBarmb/lBKPF0w/+jTVozFnOXJeRGZ8NvOxvk/jcw==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.30.1.tgz", - "integrity": "sha512-GWFs97Ruxo5Bt+cvVTQkOJ6TIx0xJDD/bMAOXWJg8TCSTEK8RnFeOeiFTxKniTc4vMIaWvCplMAFBt9miGxgkA==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.30.1.tgz", - "integrity": "sha512-UtgGb7QGgXDIO+tqqJ5oZRGHsDLO8SlpE4MhqpY9Llpzi5rJMvrK6ZGhsRCST2abZdBqIBeXW6WPD5fGK5SDwg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.30.1.tgz", - "integrity": "sha512-V9U8Ey2UqmQsBT+xTOeMzPzwDzyXmnAoO4edZhL7INkwQcaW1Ckv3WJX3qrrp/VHaDkEWIBWhRwP47r8cdrOow==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.30.1.tgz", - "integrity": "sha512-WabtHWiPaFF47W3PkHnjbmWawnX/aE57K47ZDT1BXTS5GgrBUEpvOzq0FI0V/UYzQJgdb8XlhVNH8/fwV8xDjw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.30.1.tgz", - "integrity": "sha512-pxHAU+Zv39hLUTdQQHUVHf4P+0C47y/ZloorHpzs2SXMRqeAWmGghzAhfOlzFHHwjvgokdFAhC4V+6kC1lRRfw==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.30.1.tgz", - "integrity": "sha512-D6qjsXGcvhTjv0kI4fU8tUuBDF/Ueee4SVX79VfNDXZa64TfCW1Slkb6Z7O1p7vflqZjcmOVdZlqf8gvJxc6og==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@sinclair/typebox": { - "version": "0.27.8", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", - "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@sinonjs/commons": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", - "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "type-detect": "4.0.8" - } - }, - "node_modules/@sinonjs/fake-timers": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", - "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@sinonjs/commons": "^3.0.0" - } - }, - "node_modules/@solana/buffer-layout": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@solana/buffer-layout/-/buffer-layout-4.0.1.tgz", - "integrity": "sha512-E1ImOIAD1tBZFRdjeM4/pzTiTApC0AOBGwyAMS4fwIodCWArzJ3DWdoh8cKxeFM2fElkxBh2Aqts1BPC373rHA==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer": "~6.0.3" - }, - "engines": { - "node": ">=5.10" - } - }, - "node_modules/@solana/web3.js": { - "version": "1.95.8", - "resolved": "https://registry.npmjs.org/@solana/web3.js/-/web3.js-1.95.8.tgz", - "integrity": "sha512-sBHzNh7dHMrmNS5xPD1d0Xa2QffW/RXaxu/OysRXBfwTp+LYqGGmMtCYYwrHPrN5rjAmJCsQRNAwv4FM0t3B6g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.25.0", - "@noble/curves": "^1.4.2", - "@noble/hashes": "^1.4.0", - "@solana/buffer-layout": "^4.0.1", - "agentkeepalive": "^4.5.0", - "bigint-buffer": "^1.1.5", - "bn.js": "^5.2.1", - "borsh": "^0.7.0", - "bs58": "^4.0.1", - "buffer": "6.0.3", - "fast-stable-stringify": "^1.0.0", - "jayson": "^4.1.1", - "node-fetch": "^2.7.0", - "rpc-websockets": "^9.0.2", - "superstruct": "^2.0.2" - } - }, - "node_modules/@swc/helpers": { - "version": "0.5.15", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", - "integrity": "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.8.0" - } - }, - "node_modules/@tavily/core": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/@tavily/core/-/core-0.0.2.tgz", - "integrity": "sha512-UabYbp57bdjEloA4efW9zTSzv+FZp13JVDHcfutUNR5XUZ+aDGupe2wpfABECnD+b7Ojp9v9zguZcm1o+h0//w==", - "dev": true, - "license": "MIT", - "dependencies": { - "axios": "^1.7.7", - "js-tiktoken": "^1.0.14" - } - }, - "node_modules/@tootallnate/quickjs-emscripten": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", - "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tsconfig/node10": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", - "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tsconfig/node12": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", - "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tsconfig/node14": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", - "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tsconfig/node16": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", - "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/babel__core": { - "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", - "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7", - "@types/babel__generator": "*", - "@types/babel__template": "*", - "@types/babel__traverse": "*" - } - }, - "node_modules/@types/babel__generator": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", - "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__traverse": { - "version": "7.20.6", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", - "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.20.7" - } - }, - "node_modules/@types/connect": { - "version": "3.4.38", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", - "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/diff-match-patch": { - "version": "1.0.36", - "resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz", - "integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==", - "license": "MIT" - }, - "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", - "license": "MIT" - }, - "node_modules/@types/fluent-ffmpeg": { - "version": "2.1.27", - "resolved": "https://registry.npmjs.org/@types/fluent-ffmpeg/-/fluent-ffmpeg-2.1.27.tgz", - "integrity": "sha512-QiDWjihpUhriISNoBi2hJBRUUmoj/BMTYcfz+F+ZM9hHWBYABFAE6hjP/TbCZC0GWwlpa3FzvHH9RzFeRusZ7A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/graceful-fs": { - "version": "4.1.9", - "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", - "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/istanbul-lib-coverage": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", - "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/istanbul-lib-report": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", - "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/istanbul-lib-coverage": "*" - } - }, - "node_modules/@types/istanbul-reports": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", - "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/istanbul-lib-report": "*" - } - }, - "node_modules/@types/jest": { - "version": "29.5.14", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", - "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "expect": "^29.0.0", - "pretty-format": "^29.0.0" - } - }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/@types/mocha": { - "version": "10.0.10", - "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.10.tgz", - "integrity": "sha512-xPyYSz1cMPnJQhl0CLMH68j3gprKZaTjG3s5Vi+fDgx+uhG9NOXwbVt52eFS8ECyXhyKcjDLCBEqBExKuiZb7Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "22.8.4", - "license": "MIT", - "dependencies": { - "undici-types": "~6.19.8" - } - }, - "node_modules/@types/node-fetch": { - "version": "2.6.12", - "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz", - "integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==", - "license": "MIT", - "dependencies": { - "@types/node": "*", - "form-data": "^4.0.0" - } - }, - "node_modules/@types/pdfjs-dist": { - "version": "2.10.378", - "resolved": "https://registry.npmjs.org/@types/pdfjs-dist/-/pdfjs-dist-2.10.378.tgz", - "integrity": "sha512-TRdIPqdsvKmPla44kVy4jv5Nt5vjMfVjbIEke1CRULIrwKNRC4lIiZvNYDJvbUMNCFPNIUcOKhXTyMJrX18IMA==", - "deprecated": "This is a stub types definition. pdfjs-dist provides its own type definitions, so you do not need this installed.", - "dev": true, - "license": "MIT", - "dependencies": { - "pdfjs-dist": "*" - } - }, - "node_modules/@types/resolve": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", - "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==", - "license": "MIT" - }, - "node_modules/@types/stack-utils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", - "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/tar": { - "version": "6.1.13", - "resolved": "https://registry.npmjs.org/@types/tar/-/tar-6.1.13.tgz", - "integrity": "sha512-IznnlmU5f4WcGTh2ltRu/Ijpmk8wiWXfF0VA4s+HPjHZgvFggk1YaIkbo5krX/zUCzWF8N/l4+W/LNxnvAJ8nw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "minipass": "^4.0.0" - } - }, - "node_modules/@types/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==", - "license": "MIT" - }, - "node_modules/@types/wav-encoder": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/@types/wav-encoder/-/wav-encoder-1.3.3.tgz", - "integrity": "sha512-2haw8zEMg4DspJRXmxUn2TElrQUs0bLPDh6x4N7/hDn+3tx2G05Lc+kC55uoHYsv8q+4deWhnDtHZT/ximg9aw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/ws": { - "version": "7.4.7", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-7.4.7.tgz", - "integrity": "sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/yargs": { - "version": "17.0.33", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", - "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/@types/yargs-parser": { - "version": "21.0.3", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", - "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.16.0.tgz", - "integrity": "sha512-5YTHKV8MYlyMI6BaEG7crQ9BhSc8RxzshOReKwZwRWN0+XvvTOm+L/UYLCYxFpfwYuAAqhxiq4yae0CMFwbL7Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.16.0", - "@typescript-eslint/type-utils": "8.16.0", - "@typescript-eslint/utils": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0", - "graphemer": "^1.4.0", - "ignore": "^5.3.1", - "natural-compare": "^1.4.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", - "eslint": "^8.57.0 || ^9.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.16.0.tgz", - "integrity": "sha512-D7DbgGFtsqIPIFMPJwCad9Gfi/hC0PWErRRHFnaCWoEDYi5tQUDiJCTmGUbBiLzjqAck4KcXt9Ayj0CNlIrF+w==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "@typescript-eslint/scope-manager": "8.16.0", - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/typescript-estree": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.16.0.tgz", - "integrity": "sha512-mwsZWubQvBki2t5565uxF0EYvG+FwdFb8bMtDuGQLdCCnGPrDEDvm1gtfynuKlnpzeBRqdFCkMf9jg1fnAK8sg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.16.0.tgz", - "integrity": "sha512-IqZHGG+g1XCWX9NyqnI/0CX5LL8/18awQqmkZSl2ynn8F76j579dByc0jhfVSnSnhf7zv76mKBQv9HQFKvDCgg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/typescript-estree": "8.16.0", - "@typescript-eslint/utils": "8.16.0", - "debug": "^4.3.4", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/types": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.16.0.tgz", - "integrity": "sha512-NzrHj6thBAOSE4d9bsuRNMvk+BvaQvmY4dDglgkgGC0EW/tB3Kelnp3tAKH87GEwzoxgeQn9fNGRyFJM/xd+GQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.16.0.tgz", - "integrity": "sha512-E2+9IzzXMc1iaBy9zmo+UYvluE3TW7bCGWSF41hVWUE01o8nzr1rvOQYSxelxr6StUvRcTMe633eY8mXASMaNw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.16.0.tgz", - "integrity": "sha512-C1zRy/mOL8Pj157GiX4kaw7iyRLKfJXBR3L82hk5kS/GyHcOFmy4YUq/zfZti72I9wnuQtA/+xzft4wCC8PJdA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.16.0", - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/typescript-estree": "8.16.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.16.0.tgz", - "integrity": "sha512-pq19gbaMOmFE3CbL0ZB8J8BFCo2ckfHBfaIsaOZgBIF4EoISJIdLX5xRhd0FGB0LlHReNRuzoJoMGpTjq8F2CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.16.0", - "eslint-visitor-keys": "^4.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/@vitest/coverage-v8": { - "version": "2.1.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.3.0", - "@bcoe/v8-coverage": "^0.2.3", - "debug": "^4.3.7", - "istanbul-lib-coverage": "^3.2.2", - "istanbul-lib-report": "^3.0.1", - "istanbul-lib-source-maps": "^5.0.6", - "istanbul-reports": "^3.1.7", - "magic-string": "^0.30.12", - "magicast": "^0.3.5", - "std-env": "^3.8.0", - "test-exclude": "^7.0.1", - "tinyrainbow": "^1.2.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@vitest/browser": "2.1.5", - "vitest": "2.1.5" - }, - "peerDependenciesMeta": { - "@vitest/browser": { - "optional": true - } - } - }, - "node_modules/@vitest/expect": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.1.5.tgz", - "integrity": "sha512-nZSBTW1XIdpZvEJyoP/Sy8fUg0b8od7ZpGDkTUcfJ7wz/VoZAFzFfLyxVxGFhUjJzhYqSbIpfMtl/+k/dpWa3Q==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@vitest/spy": "2.1.5", - "@vitest/utils": "2.1.5", - "chai": "^5.1.2", - "tinyrainbow": "^1.2.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/mocker": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-2.1.5.tgz", - "integrity": "sha512-XYW6l3UuBmitWqSUXTNXcVBUCRytDogBsWuNXQijc00dtnU/9OqpXWp4OJroVrad/gLIomAq9aW8yWDBtMthhQ==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@vitest/spy": "2.1.5", - "estree-walker": "^3.0.3", - "magic-string": "^0.30.12" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "msw": "^2.4.9", - "vite": "^5.0.0" - }, - "peerDependenciesMeta": { - "msw": { - "optional": true - }, - "vite": { - "optional": true - } - } - }, - "node_modules/@vitest/mocker/node_modules/estree-walker": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", - "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@types/estree": "^1.0.0" - } - }, - "node_modules/@vitest/pretty-format": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.8.tgz", - "integrity": "sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "tinyrainbow": "^1.2.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/runner": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.1.5.tgz", - "integrity": "sha512-pKHKy3uaUdh7X6p1pxOkgkVAFW7r2I818vHDthYLvUyjRfkKOU6P45PztOch4DZarWQne+VOaIMwA/erSSpB9g==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@vitest/utils": "2.1.5", - "pathe": "^1.1.2" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/snapshot": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.1.5.tgz", - "integrity": "sha512-zmYw47mhfdfnYbuhkQvkkzYroXUumrwWDGlMjpdUr4jBd3HZiV2w7CQHj+z7AAS4VOtWxI4Zt4bWt4/sKcoIjg==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@vitest/pretty-format": "2.1.5", - "magic-string": "^0.30.12", - "pathe": "^1.1.2" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/snapshot/node_modules/@vitest/pretty-format": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.5.tgz", - "integrity": "sha512-4ZOwtk2bqG5Y6xRGHcveZVr+6txkH7M2e+nPFd6guSoN638v/1XQ0K06eOpi0ptVU/2tW/pIU4IoPotY/GZ9fw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "tinyrainbow": "^1.2.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/spy": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.1.5.tgz", - "integrity": "sha512-aWZF3P0r3w6DiYTVskOYuhBc7EMc3jvn1TkBg8ttylFFRqNN2XGD7V5a4aQdk6QiUzZQ4klNBSpCLJgWNdIiNw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "tinyspy": "^3.0.2" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/utils": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.5.tgz", - "integrity": "sha512-yfj6Yrp0Vesw2cwJbP+cl04OC+IHFsuQsrsJBL9pyGeQXE56v1UAOQco+SR55Vf1nQzfV0QJg1Qum7AaWUwwYg==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@vitest/pretty-format": "2.1.5", - "loupe": "^3.1.2", - "tinyrainbow": "^1.2.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/utils/node_modules/@vitest/pretty-format": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.5.tgz", - "integrity": "sha512-4ZOwtk2bqG5Y6xRGHcveZVr+6txkH7M2e+nPFd6guSoN638v/1XQ0K06eOpi0ptVU/2tW/pIU4IoPotY/GZ9fw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "tinyrainbow": "^1.2.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vue/compiler-core": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.13.tgz", - "integrity": "sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==", - "license": "MIT", - "peer": true, - "dependencies": { - "@babel/parser": "^7.25.3", - "@vue/shared": "3.5.13", - "entities": "^4.5.0", - "estree-walker": "^2.0.2", - "source-map-js": "^1.2.0" - } - }, - "node_modules/@vue/compiler-dom": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.13.tgz", - "integrity": "sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==", - "license": "MIT", - "peer": true, - "dependencies": { - "@vue/compiler-core": "3.5.13", - "@vue/shared": "3.5.13" - } - }, - "node_modules/@vue/compiler-sfc": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.13.tgz", - "integrity": "sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==", - "license": "MIT", - "peer": true, - "dependencies": { - "@babel/parser": "^7.25.3", - "@vue/compiler-core": "3.5.13", - "@vue/compiler-dom": "3.5.13", - "@vue/compiler-ssr": "3.5.13", - "@vue/shared": "3.5.13", - "estree-walker": "^2.0.2", - "magic-string": "^0.30.11", - "postcss": "^8.4.48", - "source-map-js": "^1.2.0" - } - }, - "node_modules/@vue/compiler-ssr": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.13.tgz", - "integrity": "sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==", - "license": "MIT", - "peer": true, - "dependencies": { - "@vue/compiler-dom": "3.5.13", - "@vue/shared": "3.5.13" - } - }, - "node_modules/@vue/reactivity": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.13.tgz", - "integrity": "sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==", - "license": "MIT", - "peer": true, - "dependencies": { - "@vue/shared": "3.5.13" - } - }, - "node_modules/@vue/runtime-core": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.13.tgz", - "integrity": "sha512-Fj4YRQ3Az0WTZw1sFe+QDb0aXCerigEpw418pw1HBUKFtnQHWzwojaukAs2X/c9DQz4MQ4bsXTGlcpGxU/RCIw==", - "license": "MIT", - "peer": true, - "dependencies": { - "@vue/reactivity": "3.5.13", - "@vue/shared": "3.5.13" - } - }, - "node_modules/@vue/runtime-dom": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.13.tgz", - "integrity": "sha512-dLaj94s93NYLqjLiyFzVs9X6dWhTdAlEAciC3Moq7gzAc13VJUdCnjjRurNM6uTLFATRHexHCTu/Xp3eW6yoog==", - "license": "MIT", - "peer": true, - "dependencies": { - "@vue/reactivity": "3.5.13", - "@vue/runtime-core": "3.5.13", - "@vue/shared": "3.5.13", - "csstype": "^3.1.3" - } - }, - "node_modules/@vue/server-renderer": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.13.tgz", - "integrity": "sha512-wAi4IRJV/2SAW3htkTlB+dHeRmpTiVIK1OGLWV1yeStVSebSQQOwGwIq0D3ZIoBj2C2qpgz5+vX9iEBkTdk5YA==", - "license": "MIT", - "peer": true, - "dependencies": { - "@vue/compiler-ssr": "3.5.13", - "@vue/shared": "3.5.13" - }, - "peerDependencies": { - "vue": "3.5.13" - } - }, - "node_modules/@vue/shared": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.13.tgz", - "integrity": "sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==", - "license": "MIT", - "peer": true - }, - "node_modules/abort-controller": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "license": "MIT", - "dependencies": { - "event-target-shim": "^5.0.0" - }, - "engines": { - "node": ">=6.5" - } - }, - "node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", - "license": "MIT", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "license": "MIT", - "peer": true, - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/acorn-typescript": { - "version": "1.4.13", - "resolved": "https://registry.npmjs.org/acorn-typescript/-/acorn-typescript-1.4.13.tgz", - "integrity": "sha512-xsc9Xv0xlVfwp2o7sQ+GCQ1PgbkdcpWdTzrwXxO3xDMTAywVS3oXVOcOHuRjAPkS4P9b+yc/qNF15460v+jp4Q==", - "license": "MIT", - "peer": true, - "peerDependencies": { - "acorn": ">=8.9.0" - } - }, - "node_modules/acorn-walk": { - "version": "8.3.4", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", - "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "acorn": "^8.11.0" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/agent-base": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", - "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/agentkeepalive": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.6.0.tgz", - "integrity": "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==", - "license": "MIT", - "dependencies": { - "humanize-ms": "^1.2.1" - }, - "engines": { - "node": ">= 8.0.0" - } - }, - "node_modules/ai": { - "version": "3.4.33", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "@ai-sdk/provider-utils": "1.0.22", - "@ai-sdk/react": "0.0.70", - "@ai-sdk/solid": "0.0.54", - "@ai-sdk/svelte": "0.0.57", - "@ai-sdk/ui-utils": "0.0.50", - "@ai-sdk/vue": "0.0.59", - "@opentelemetry/api": "1.9.0", - "eventsource-parser": "1.1.2", - "json-schema": "^0.4.0", - "jsondiffpatch": "0.6.0", - "secure-json-parse": "^2.7.0", - "zod-to-json-schema": "^3.23.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "openai": "^4.42.0", - "react": "^18 || ^19 || ^19.0.0-rc", - "sswr": "^2.1.0", - "svelte": "^3.0.0 || ^4.0.0 || ^5.0.0", - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "openai": { - "optional": true - }, - "react": { - "optional": true - }, - "sswr": { - "optional": true - }, - "svelte": { - "optional": true - }, - "zod": { - "optional": true - } - } - }, - "node_modules/ai/node_modules/zod-to-json-schema": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz", - "integrity": "sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==", - "license": "ISC", - "peerDependencies": { - "zod": "^3.24.1" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/amp": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/amp/-/amp-0.3.1.tgz", - "integrity": "sha512-OwIuC4yZaRogHKiuU5WlMR5Xk/jAcpPtawWL05Gj8Lvm2F6mwoJt4O/bHI+DHwG79vWd+8OFYM4/BzYqyRd3qw==", - "dev": true, - "license": "MIT" - }, - "node_modules/amp-message": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/amp-message/-/amp-message-0.1.2.tgz", - "integrity": "sha512-JqutcFwoU1+jhv7ArgW38bqrE+LQdcRv4NxNw0mp0JHQyB6tXesWRjtYKlDgHRY2o3JE5UTaBGUK8kSWUdxWUg==", - "dev": true, - "license": "MIT", - "dependencies": { - "amp": "0.3.1" - } - }, - "node_modules/ansi-colors": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", - "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-regex": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/ansi-styles": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", - "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anthropic-vertex-ai": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/anthropic-vertex-ai/-/anthropic-vertex-ai-1.0.2.tgz", - "integrity": "sha512-4YuK04KMmBGkx6fi2UjnHkE4mhaIov7tnT5La9+DMn/gw/NSOLZoWNUx+13VY3mkcaseKBMEn1DBzdXXJFIP7A==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.24", - "@ai-sdk/provider-utils": "1.0.20", - "google-auth-library": "^9.14.1" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - } - }, - "node_modules/anthropic-vertex-ai/node_modules/@ai-sdk/provider": { - "version": "0.0.24", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-0.0.24.tgz", - "integrity": "sha512-XMsNGJdGO+L0cxhhegtqZ8+T6nn4EoShS819OvCgI2kLbYTIvk0GWFGD0AXJmxkxs3DrpsJxKAFukFR7bvTkgQ==", - "license": "Apache-2.0", - "dependencies": { - "json-schema": "0.4.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/anthropic-vertex-ai/node_modules/@ai-sdk/provider-utils": { - "version": "1.0.20", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-1.0.20.tgz", - "integrity": "sha512-ngg/RGpnA00eNOWEtXHenpX1MsM2QshQh4QJFjUfwcqHpM5kTfG7je7Rc3HcEDP+OkRVv2GF+X4fC1Vfcnl8Ow==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.24", - "eventsource-parser": "1.1.2", - "nanoid": "3.3.6", - "secure-json-parse": "2.7.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/anthropic-vertex-ai/node_modules/nanoid": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", - "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/any-promise": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", - "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", - "dev": true, - "license": "MIT" - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/anymatch/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true, - "license": "MIT" - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "license": "Python-2.0" - }, - "node_modules/aria-query": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", - "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/assertion-error": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", - "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/ast-types": { - "version": "0.13.4", - "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", - "integrity": "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==", - "dev": true, - "license": "MIT", - "dependencies": { - "tslib": "^2.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/async": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", - "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", - "dev": true, - "license": "MIT" - }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "license": "MIT" - }, - "node_modules/axios": { - "version": "1.7.9", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz", - "integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", - "proxy-from-env": "^1.1.0" - } - }, - "node_modules/axobject-query": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", - "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/babel-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", - "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/transform": "^29.7.0", - "@types/babel__core": "^7.1.14", - "babel-plugin-istanbul": "^6.1.1", - "babel-preset-jest": "^29.6.3", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.8.0" - } - }, - "node_modules/babel-plugin-istanbul": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", - "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@istanbuljs/load-nyc-config": "^1.0.0", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-instrument": "^5.0.4", - "test-exclude": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", - "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.12.3", - "@babel/parser": "^7.14.7", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/test-exclude": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", - "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", - "dev": true, - "license": "ISC", - "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^7.1.4", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-jest-hoist": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", - "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.3.3", - "@babel/types": "^7.3.3", - "@types/babel__core": "^7.1.14", - "@types/babel__traverse": "^7.0.6" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/babel-preset-current-node-syntax": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", - "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/plugin-syntax-async-generators": "^7.8.4", - "@babel/plugin-syntax-bigint": "^7.8.3", - "@babel/plugin-syntax-class-properties": "^7.12.13", - "@babel/plugin-syntax-class-static-block": "^7.14.5", - "@babel/plugin-syntax-import-attributes": "^7.24.7", - "@babel/plugin-syntax-import-meta": "^7.10.4", - "@babel/plugin-syntax-json-strings": "^7.8.3", - "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", - "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", - "@babel/plugin-syntax-numeric-separator": "^7.10.4", - "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", - "@babel/plugin-syntax-optional-chaining": "^7.8.3", - "@babel/plugin-syntax-private-property-in-object": "^7.14.5", - "@babel/plugin-syntax-top-level-await": "^7.14.5" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/babel-preset-jest": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", - "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", - "dev": true, - "license": "MIT", - "dependencies": { - "babel-plugin-jest-hoist": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "license": "MIT" - }, - "node_modules/base-x": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.10.tgz", - "integrity": "sha512-7d0s06rR9rYaIWHkpfLIFICM/tkSVdoPC9qYAQRpxn9DdKNWNsKC0uk++akckyLq16Tx2WIinnZ6WRriAt6njQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "^5.0.1" - } - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/basic-ftp": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", - "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/bigint-buffer": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/bigint-buffer/-/bigint-buffer-1.1.5.tgz", - "integrity": "sha512-trfYco6AoZ+rKhKnxA0hgX0HAbVP/s808/EuDSe2JDzUnCp/xAsli35Orvk67UrTEcwuxZqYZDmfA2RXJgxVvA==", - "dev": true, - "hasInstallScript": true, - "license": "Apache-2.0", - "dependencies": { - "bindings": "^1.3.0" - }, - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/bignumber.js": { - "version": "9.1.2", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", - "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", - "license": "MIT", - "engines": { - "node": "*" - } - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "file-uri-to-path": "1.0.0" - } - }, - "node_modules/blessed": { - "version": "0.1.81", - "resolved": "https://registry.npmjs.org/blessed/-/blessed-0.1.81.tgz", - "integrity": "sha512-LoF5gae+hlmfORcG1M5+5XZi4LBmvlXTzwJWzUlPryN/SJdSflZvROM2TwkT0GMpq7oqT48NRd4GS7BiVBc5OQ==", - "dev": true, - "license": "MIT", - "bin": { - "blessed": "bin/tput.js" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/bn.js": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", - "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/bodec": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/bodec/-/bodec-0.1.0.tgz", - "integrity": "sha512-Ylo+MAo5BDUq1KA3f3R/MFhh+g8cnHmo8bz3YPGhI1znrMaf77ol1sfvYJzsw3nTE+Y2GryfDxBaR+AqpAkEHQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/borsh": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/borsh/-/borsh-0.7.0.tgz", - "integrity": "sha512-CLCsZGIBCFnPtkNnieW/a8wmreDmfUtjU2m9yHrzPXIlNbqVs0AQrSatSG6vdNYUqdc83tkQi2eHfF98ubzQLA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "bn.js": "^5.2.0", - "bs58": "^4.0.0", - "text-encoding-utf-8": "^1.0.2" - } - }, - "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.24.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", - "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "caniuse-lite": "^1.0.30001688", - "electron-to-chromium": "^1.5.73", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.1" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/bs-logger": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", - "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-json-stable-stringify": "2.x" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/bs58": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/bs58/-/bs58-4.0.1.tgz", - "integrity": "sha512-Ok3Wdf5vOIlBrgCvTq96gBkJw+JUEzdBgyaza5HLtPm7yTHkjRy8+JzNyHF7BHa0bNWOQIp3m5YF0nnFcOIKLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "base-x": "^3.0.2" - } - }, - "node_modules/bser": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", - "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "node-int64": "^0.4.0" - } - }, - "node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/buffer-equal-constant-time": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", - "license": "BSD-3-Clause" - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/bufferutil": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.9.tgz", - "integrity": "sha512-WDtdLmJvAuNNPzByAYpRo2rF1Mmradw6gvWsQKf63476DDXmomT9zUiGypLcG4ibIM67vhAj8jJRdbmEws2Aqw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "dependencies": { - "node-gyp-build": "^4.3.0" - }, - "engines": { - "node": ">=6.14.2" - } - }, - "node_modules/bundle-require": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/bundle-require/-/bundle-require-5.1.0.tgz", - "integrity": "sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==", - "dev": true, - "license": "MIT", - "dependencies": { - "load-tsconfig": "^0.2.3" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "peerDependencies": { - "esbuild": ">=0.18" - } - }, - "node_modules/cac": { - "version": "6.7.14", - "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", - "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001692", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001692.tgz", - "integrity": "sha512-A95VKan0kdtrsnMubMKxEKUKImOPSuCpYgxSQBo036P5YYgVIcOYJEgt/txJWqObiRQeISNCfef9nvlQ0vbV7A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chai": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", - "integrity": "sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "assertion-error": "^2.0.1", - "check-error": "^2.1.1", - "deep-eql": "^5.0.1", - "loupe": "^3.1.0", - "pathval": "^2.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chalk/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/char-regex": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", - "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/charm": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/charm/-/charm-0.1.2.tgz", - "integrity": "sha512-syedaZ9cPe7r3hoQA9twWYKu5AIyCswN5+szkmPBe9ccdLrj4bYaCnLVPTLd2kgVRc7+zoX4tyPgRnFKCj5YjQ==", - "dev": true, - "license": "MIT/X11" - }, - "node_modules/check-error": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", - "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">= 16" - } - }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/ci-info": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", - "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/sibiraj-s" - } - ], - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/cjs-module-lexer": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.1.tgz", - "integrity": "sha512-cuSVIHi9/9E/+821Qjdvngor+xpnlwnuwIyZOaLmHBVdXL+gP+I6QQB9VkO7RI77YIcTV+S1W9AreJ5eN63JBA==", - "dev": true, - "license": "MIT" - }, - "node_modules/cli-cursor": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", - "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", - "dev": true, - "license": "MIT", - "dependencies": { - "restore-cursor": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-tableau": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/cli-tableau/-/cli-tableau-2.0.1.tgz", - "integrity": "sha512-he+WTicka9cl0Fg/y+YyxcN6/bfQ/1O3QmgxRXDhABKqLzvoOSM4fMzp39uMyLBulAFuywD2N7UaoQE7WaADxQ==", - "dev": true, - "dependencies": { - "chalk": "3.0.0" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/cli-tableau/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/cli-tableau/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-truncate": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-4.0.0.tgz", - "integrity": "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==", - "dev": true, - "license": "MIT", - "dependencies": { - "slice-ansi": "^5.0.0", - "string-width": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-truncate/node_modules/emoji-regex": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", - "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", - "dev": true, - "license": "MIT" - }, - "node_modules/cli-truncate/node_modules/string-width": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", - "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^10.3.0", - "get-east-asian-width": "^1.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/cliui/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/cliui/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/cliui/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/cliui/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/cliui/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cliui/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cliui/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/clsx": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", - "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", - "license": "MIT", - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">= 1.0.0", - "node": ">= 0.12.0" - } - }, - "node_modules/collect-v8-coverage": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", - "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "license": "MIT" - }, - "node_modules/colorette": { - "version": "2.0.20", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", - "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", - "dev": true, - "license": "MIT" - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "license": "MIT", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", - "dev": true, - "license": "MIT" - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true, - "license": "MIT" - }, - "node_modules/consola": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.0.tgz", - "integrity": "sha512-EiPU8G6dQG0GFHNR8ljnZFki/8a+cQwEQ+7wpxdChl02Q8HXlwEZWD5lqAF8vC2sEC3Tehr8hy7vErz88LHyUA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true, - "license": "MIT" - }, - "node_modules/create-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", - "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "prompts": "^2.0.1" - }, - "bin": { - "create-jest": "bin/create-jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/create-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", - "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/croner": { - "version": "4.1.97", - "resolved": "https://registry.npmjs.org/croner/-/croner-4.1.97.tgz", - "integrity": "sha512-/f6gpQuxDaqXu+1kwQYSckUglPaOrHdbIlBAu0YuW8/Cdb45XwXYNUBXg3r/9Mo6n540Kn/smKcZWko5x99KrQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "license": "MIT", - "peer": true - }, - "node_modules/culvert": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/culvert/-/culvert-0.1.2.tgz", - "integrity": "sha512-yi1x3EAWKjQTreYWeSd98431AV+IEE0qoDyOoaHJ7KJ21gv6HtBXHVLX74opVSGqcR8/AbjJBHAHpcOy2bj5Gg==", - "dev": true, - "license": "MIT" - }, - "node_modules/data-uri-to-buffer": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", - "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/dayjs": { - "version": "1.11.13", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz", - "integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==", - "dev": true, - "license": "MIT" - }, - "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "license": "MIT", - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/dedent": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", - "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "babel-plugin-macros": "^3.1.0" - }, - "peerDependenciesMeta": { - "babel-plugin-macros": { - "optional": true - } - } - }, - "node_modules/deep-eql": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", - "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/deepmerge": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/degenerator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/degenerator/-/degenerator-5.0.1.tgz", - "integrity": "sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ast-types": "^0.13.4", - "escodegen": "^2.1.0", - "esprima": "^4.0.1" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/delay": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", - "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/detect-newline": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", - "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.3.1" - } - }, - "node_modules/diff-match-patch": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", - "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==", - "license": "Apache-2.0" - }, - "node_modules/diff-sequences": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", - "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/dotenv": { - "version": "16.4.5", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://dotenvx.com" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "license": "MIT" - }, - "node_modules/ecdsa-sig-formatter": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", - "license": "Apache-2.0", - "dependencies": { - "safe-buffer": "^5.0.1" - } - }, - "node_modules/ejs": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", - "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "jake": "^10.8.5" - }, - "bin": { - "ejs": "bin/cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/electron-to-chromium": { - "version": "1.5.80", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.80.tgz", - "integrity": "sha512-LTrKpW0AqIuHwmlVNV+cjFYTnXtM9K37OGhpe0ZI10ScPSxqVSryZHIY3WnCS5NSYbBODRTZyhRMS2h5FAEqAw==", - "dev": true, - "license": "ISC" - }, - "node_modules/emittery": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", - "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sindresorhus/emittery?sponsor=1" - } - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "license": "MIT" - }, - "node_modules/enquirer": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-colors": "^4.1.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "license": "BSD-2-Clause", - "peer": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/environment": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", - "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/es6-promise": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==", - "dev": true, - "license": "MIT" - }, - "node_modules/es6-promisify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "es6-promise": "^4.0.3" - } - }, - "node_modules/esbuild": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.2.tgz", - "integrity": "sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.24.2", - "@esbuild/android-arm": "0.24.2", - "@esbuild/android-arm64": "0.24.2", - "@esbuild/android-x64": "0.24.2", - "@esbuild/darwin-arm64": "0.24.2", - "@esbuild/darwin-x64": "0.24.2", - "@esbuild/freebsd-arm64": "0.24.2", - "@esbuild/freebsd-x64": "0.24.2", - "@esbuild/linux-arm": "0.24.2", - "@esbuild/linux-arm64": "0.24.2", - "@esbuild/linux-ia32": "0.24.2", - "@esbuild/linux-loong64": "0.24.2", - "@esbuild/linux-mips64el": "0.24.2", - "@esbuild/linux-ppc64": "0.24.2", - "@esbuild/linux-riscv64": "0.24.2", - "@esbuild/linux-s390x": "0.24.2", - "@esbuild/linux-x64": "0.24.2", - "@esbuild/netbsd-arm64": "0.24.2", - "@esbuild/netbsd-x64": "0.24.2", - "@esbuild/openbsd-arm64": "0.24.2", - "@esbuild/openbsd-x64": "0.24.2", - "@esbuild/sunos-x64": "0.24.2", - "@esbuild/win32-arm64": "0.24.2", - "@esbuild/win32-ia32": "0.24.2", - "@esbuild/win32-x64": "0.24.2" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/escodegen": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", - "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2" - }, - "bin": { - "escodegen": "bin/escodegen.js", - "esgenerate": "bin/esgenerate.js" - }, - "engines": { - "node": ">=6.0" - }, - "optionalDependencies": { - "source-map": "~0.6.1" - } - }, - "node_modules/eslint": { - "version": "9.18.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.18.0.tgz", - "integrity": "sha512-+waTfRWQlSbpt3KWE+CjrPPYnbq9kfZIYUqapc0uBXyjTp8aYXZDsUH16m39Ryq3NjAVP4tjuF7KaukeqoCoaA==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.0", - "@eslint/core": "^0.10.0", - "@eslint/eslintrc": "^3.2.0", - "@eslint/js": "9.18.0", - "@eslint/plugin-kit": "^0.2.5", - "@humanfs/node": "^0.16.6", - "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.4.1", - "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.6", - "debug": "^4.3.2", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.2.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", - "esquery": "^1.5.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^8.0.0", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://eslint.org/donate" - }, - "peerDependencies": { - "jiti": "*" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - } - } - }, - "node_modules/eslint-scope": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz", - "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==", - "dev": true, - "license": "BSD-2-Clause", - "peer": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint/node_modules/@eslint/js": { - "version": "9.18.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.18.0.tgz", - "integrity": "sha512-fK6L7rxcq6/z+AaQMtiFTkvbHkBLNlwyRxHpKawP0x3u9+NC6MQTnFW+AdpwC6gfHTW0051cokQgtTN2FqlxQA==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/eslint/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "peer": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/esm-env": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", - "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", - "license": "MIT", - "peer": true - }, - "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", - "dev": true, - "license": "BSD-2-Clause", - "peer": true, - "dependencies": { - "acorn": "^8.14.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/espree/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "license": "BSD-2-Clause", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/esquery": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", - "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", - "dev": true, - "license": "BSD-3-Clause", - "peer": true, - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrap": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/esrap/-/esrap-1.4.2.tgz", - "integrity": "sha512-FhVlJzvTw7ZLxYZ7RyHwQCFE64dkkpzGNNnphaGCLwjqGk1SQcqzbgdx9FowPCktx6NOSHkzvcZ3vsvdH54YXA==", - "license": "MIT", - "peer": true, - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.15" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "license": "BSD-2-Clause", - "peer": true, - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "license": "MIT" - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/event-target-shim": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/eventemitter2": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-5.0.1.tgz", - "integrity": "sha512-5EM1GHXycJBS6mauYAbVKT1cVs7POKWb2NXD4Vyt8dDqeZa7LaDK1/sjtL+Zb0lzTpSNil4596Dyu97hz37QLg==", - "dev": true, - "license": "MIT" - }, - "node_modules/eventemitter3": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", - "dev": true, - "license": "MIT" - }, - "node_modules/eventsource-parser": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz", - "integrity": "sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==", - "license": "MIT", - "engines": { - "node": ">=14.18" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/execa/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/exit": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", - "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/expect-utils": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/expect-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.1.0.tgz", - "integrity": "sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "license": "MIT" - }, - "node_modules/extrareqp2": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/extrareqp2/-/extrareqp2-1.0.0.tgz", - "integrity": "sha512-Gum0g1QYb6wpPJCVypWP3bbIuaibcFiJcpuPM10YSXp/tzqi84x9PJageob+eN4xVRIOto4wjSGNLyMD54D2xA==", - "dev": true, - "license": "MIT", - "dependencies": { - "follow-redirects": "^1.14.0" - } - }, - "node_modules/eyes": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", - "integrity": "sha512-GipyPsXO1anza0AOZdy69Im7hGFCNB7Y/NGjDlZGJ3GJJLtwNSb2vrzYrTYJRrRloVx7pl+bhUaTB8yiccPvFQ==", - "dev": true, - "engines": { - "node": "> 0.1.90" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/fast-json-patch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/fast-json-patch/-/fast-json-patch-3.1.1.tgz", - "integrity": "sha512-vf6IHUX2SBcA+5/+4883dsIjpBTqmfBjmYiWK1savxQmFk4JfBMLa7ynTYOs1Rolp/T1betJxHiGD3g1Mn8lUQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/fast-stable-stringify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fast-stable-stringify/-/fast-stable-stringify-1.0.0.tgz", - "integrity": "sha512-wpYMUmFu5f00Sm0cj2pfivpmawLZ0NKdviQ4w9zJeR8JVtOpOxHmLaJuj0vxvGqMJQWyP/COUkF75/57OKyRag==", - "dev": true, - "license": "MIT" - }, - "node_modules/fastembed": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/fastembed/-/fastembed-1.14.1.tgz", - "integrity": "sha512-Y14v+FWZwjNUpQ7mRGYu4N5yF+hZkF7zqzPWzzLbwdIEtYsHy0DSpiVJ+Fg6Oi1fQjrBKASQt0hdSMSjw1/Wtw==", - "dependencies": { - "@anush008/tokenizers": "^0.0.0", - "onnxruntime-node": "1.15.1", - "progress": "^2.0.3", - "tar": "^6.2.0" - } - }, - "node_modules/fastestsmallesttextencoderdecoder": { - "version": "1.0.22", - "resolved": "https://registry.npmjs.org/fastestsmallesttextencoderdecoder/-/fastestsmallesttextencoderdecoder-1.0.22.tgz", - "integrity": "sha512-Pb8d48e+oIuY4MaM64Cd7OW1gt4nxCHs7/ddPPZ/Ic3sg8yVGM7O9wDvZ7us6ScaUupzM+pfBolwtYhN1IxBIw==", - "license": "CC0-1.0" - }, - "node_modules/fastq": { - "version": "1.18.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.18.0.tgz", - "integrity": "sha512-QKHXPW0hD8g4UET03SdOdunzSouc9N4AuHdsX8XNcTsuz+yYFILVNIX4l9yHABMhiEI9Db0JTTIpu0wB+Y1QQw==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fb-watchman": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", - "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "bser": "2.1.1" - } - }, - "node_modules/fclone": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/fclone/-/fclone-1.0.11.tgz", - "integrity": "sha512-GDqVQezKzRABdeqflsgMr7ktzgF9CyS+p2oe0jJqUY6izSSbhPIQJDpoU4PtGcD7VPM9xh/dVrTu6z1nwgmEGw==", - "dev": true, - "license": "MIT" - }, - "node_modules/fdir": { - "version": "6.4.2", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.2.tgz", - "integrity": "sha512-KnhMXsKSPZlAhp7+IjUkRZKPb4fUyccpDrdFXbi4QL1qkmFh9kVY09Yox+n4MaOb3lHZ1Tv829C3oaaXoMYPDQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/file-entry-cache": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", - "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "flat-cache": "^4.0.0" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", - "dev": true, - "license": "MIT" - }, - "node_modules/filelist": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", - "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "minimatch": "^5.0.1" - } - }, - "node_modules/filelist/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/flat-cache": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", - "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "flatted": "^3.2.9", - "keyv": "^4.5.4" - }, - "engines": { - "node": ">=16" - } - }, - "node_modules/flatted": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", - "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", - "dev": true, - "license": "ISC", - "peer": true - }, - "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", - "devOptional": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/foreground-child": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", - "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/form-data": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", - "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", - "license": "MIT", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/form-data-encoder": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", - "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==", - "license": "MIT" - }, - "node_modules/formdata-node": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", - "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", - "license": "MIT", - "dependencies": { - "node-domexception": "1.0.0", - "web-streams-polyfill": "4.0.0-beta.3" - }, - "engines": { - "node": ">= 12.20" - } - }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true, - "license": "ISC" - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gaxios": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", - "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", - "license": "Apache-2.0", - "dependencies": { - "extend": "^3.0.2", - "https-proxy-agent": "^7.0.1", - "is-stream": "^2.0.0", - "node-fetch": "^2.6.9", - "uuid": "^9.0.1" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/gaxios/node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/gcp-metadata": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", - "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", - "license": "Apache-2.0", - "dependencies": { - "gaxios": "^6.0.0", - "json-bigint": "^1.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-east-asian-width": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", - "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-package-type": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", - "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-uri": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.4.tgz", - "integrity": "sha512-E1b1lFFLvLgak2whF2xDBcOy6NLVGZBqqjJjsIhvopKfWWEi64pLVTWWehV8KlLerZkfNTA95sTe2OdJKm1OzQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "basic-ftp": "^5.0.2", - "data-uri-to-buffer": "^6.0.2", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/git-node-fs": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/git-node-fs/-/git-node-fs-1.0.0.tgz", - "integrity": "sha512-bLQypt14llVXBg0S0u8q8HmU7g9p3ysH+NvVlae5vILuUvs759665HvmR5+wb04KjHyjFcDRxdYb4kyNnluMUQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/git-sha1": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/git-sha1/-/git-sha1-0.1.2.tgz", - "integrity": "sha512-2e/nZezdVlyCopOCYHeW0onkbZg7xP1Ad6pndPy1rCygeRykefUS6r7oA5cJRGEFvseiaz5a/qUHFVX1dd6Isg==", - "dev": true, - "license": "MIT" - }, - "node_modules/glob": { - "version": "11.0.0", - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^4.0.1", - "minimatch": "^10.0.0", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^2.0.0" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "license": "ISC", - "peer": true, - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/glob/node_modules/jackspeak": { - "version": "4.0.2", - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "10.0.1", - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/glob/node_modules/path-scurry": { - "version": "2.0.0", - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^11.0.0", - "minipass": "^7.1.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/globals": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", - "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/google-auth-library": { - "version": "9.15.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", - "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", - "license": "Apache-2.0", - "dependencies": { - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "gaxios": "^6.1.1", - "gcp-metadata": "^6.1.0", - "gtoken": "^7.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true, - "license": "MIT" - }, - "node_modules/gtoken": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", - "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", - "license": "MIT", - "dependencies": { - "gaxios": "^6.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/handlebars": { - "version": "4.7.8", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", - "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", - "license": "MIT", - "dependencies": { - "minimist": "^1.2.5", - "neo-async": "^2.6.2", - "source-map": "^0.6.1", - "wordwrap": "^1.0.0" - }, - "bin": { - "handlebars": "bin/handlebars" - }, - "engines": { - "node": ">=0.4.7" - }, - "optionalDependencies": { - "uglify-js": "^3.1.4" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true, - "license": "MIT" - }, - "node_modules/http-proxy-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", - "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.0", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/https-proxy-agent": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", - "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/humanize-ms": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", - "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", - "license": "MIT", - "dependencies": { - "ms": "^2.0.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, - "node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/ignore-by-default": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", - "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==", - "dev": true, - "license": "ISC" - }, - "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/import-local": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", - "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", - "dev": true, - "license": "MIT", - "dependencies": { - "pkg-dir": "^4.2.0", - "resolve-cwd": "^3.0.0" - }, - "bin": { - "import-local-fixture": "fixtures/cli.js" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true, - "license": "ISC" - }, - "node_modules/ip-address": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", - "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "jsbn": "1.1.0", - "sprintf-js": "^1.1.3" - }, - "engines": { - "node": ">= 12" - } - }, - "node_modules/ip-address/node_modules/sprintf-js": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", - "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "dev": true, - "license": "MIT" - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", - "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-generator-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", - "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", - "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", - "dev": true, - "license": "MIT" - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-reference": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", - "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "*" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "license": "ISC" - }, - "node_modules/isomorphic-ws": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", - "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "ws": "*" - } - }, - "node_modules/istanbul-lib-coverage": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", - "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-instrument": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", - "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.23.9", - "@babel/parser": "^7.23.9", - "@istanbuljs/schema": "^0.1.3", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-report": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", - "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^4.0.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-source-maps": { - "version": "5.0.6", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.23", - "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-reports": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", - "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "html-escaper": "^2.0.0", - "istanbul-lib-report": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jake": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", - "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "async": "^3.2.3", - "chalk": "^4.0.2", - "filelist": "^1.0.4", - "minimatch": "^3.1.2" - }, - "bin": { - "jake": "bin/cli.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/jake/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/jake/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/jayson": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/jayson/-/jayson-4.1.3.tgz", - "integrity": "sha512-LtXh5aYZodBZ9Fc3j6f2w+MTNcnxteMOrb+QgIouguGOulWi0lieEkOUg+HkjjFs0DGoWDds6bi4E9hpNFLulQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/connect": "^3.4.33", - "@types/node": "^12.12.54", - "@types/ws": "^7.4.4", - "commander": "^2.20.3", - "delay": "^5.0.0", - "es6-promisify": "^5.0.0", - "eyes": "^0.1.8", - "isomorphic-ws": "^4.0.1", - "json-stringify-safe": "^5.0.1", - "JSONStream": "^1.3.5", - "uuid": "^8.3.2", - "ws": "^7.5.10" - }, - "bin": { - "jayson": "bin/jayson.js" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jayson/node_modules/@types/node": { - "version": "12.20.55", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.55.tgz", - "integrity": "sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/jayson/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", - "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/core": "^29.7.0", - "@jest/types": "^29.6.3", - "import-local": "^3.0.2", - "jest-cli": "^29.7.0" - }, - "bin": { - "jest": "bin/jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/jest-changed-files": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", - "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", - "dev": true, - "license": "MIT", - "dependencies": { - "execa": "^5.0.0", - "jest-util": "^29.7.0", - "p-limit": "^3.1.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-circus": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", - "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "co": "^4.6.0", - "dedent": "^1.0.0", - "is-generator-fn": "^2.0.0", - "jest-each": "^29.7.0", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "p-limit": "^3.1.0", - "pretty-format": "^29.7.0", - "pure-rand": "^6.0.0", - "slash": "^3.0.0", - "stack-utils": "^2.0.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-cli": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", - "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/core": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "create-jest": "^29.7.0", - "exit": "^0.1.2", - "import-local": "^3.0.2", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "yargs": "^17.3.1" - }, - "bin": { - "jest": "bin/jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/jest-config": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", - "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@jest/test-sequencer": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-jest": "^29.7.0", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "deepmerge": "^4.2.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-circus": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "micromatch": "^4.0.4", - "parse-json": "^5.2.0", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "@types/node": "*", - "ts-node": ">=9.0.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "ts-node": { - "optional": true - } - } - }, - "node_modules/jest-config/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/jest-config/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/jest-config/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/jest-diff": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", - "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.0.0", - "diff-sequences": "^29.6.3", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-docblock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", - "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "detect-newline": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-each": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", - "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", - "jest-util": "^29.7.0", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-environment-node": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", - "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-get-type": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", - "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-haste-map": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", - "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/graceful-fs": "^4.1.3", - "@types/node": "*", - "anymatch": "^3.0.3", - "fb-watchman": "^2.0.0", - "graceful-fs": "^4.2.9", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", - "micromatch": "^4.0.4", - "walker": "^1.0.8" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "optionalDependencies": { - "fsevents": "^2.3.2" - } - }, - "node_modules/jest-leak-detector": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", - "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", - "dev": true, - "license": "MIT", - "dependencies": { - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-matcher-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", - "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.0.0", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-message-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", - "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.12.13", - "@jest/types": "^29.6.3", - "@types/stack-utils": "^2.0.0", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "stack-utils": "^2.0.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-mock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", - "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-pnp-resolver": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", - "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "peerDependencies": { - "jest-resolve": "*" - }, - "peerDependenciesMeta": { - "jest-resolve": { - "optional": true - } - } - }, - "node_modules/jest-regex-util": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", - "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-resolve": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", - "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-pnp-resolver": "^1.2.2", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "resolve": "^1.20.0", - "resolve.exports": "^2.0.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-resolve-dependencies": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", - "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", - "dev": true, - "license": "MIT", - "dependencies": { - "jest-regex-util": "^29.6.3", - "jest-snapshot": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-runner": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", - "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/environment": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "emittery": "^0.13.1", - "graceful-fs": "^4.2.9", - "jest-docblock": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-leak-detector": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-resolve": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-util": "^29.7.0", - "jest-watcher": "^29.7.0", - "jest-worker": "^29.7.0", - "p-limit": "^3.1.0", - "source-map-support": "0.5.13" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-runtime": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", - "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/globals": "^29.7.0", - "@jest/source-map": "^29.6.3", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "cjs-module-lexer": "^1.0.0", - "collect-v8-coverage": "^1.0.0", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "slash": "^3.0.0", - "strip-bom": "^4.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-runtime/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/jest-runtime/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/jest-runtime/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/jest-snapshot": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", - "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@babel/generator": "^7.7.2", - "@babel/plugin-syntax-jsx": "^7.7.2", - "@babel/plugin-syntax-typescript": "^7.7.2", - "@babel/types": "^7.3.3", - "@jest/expect-utils": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0", - "chalk": "^4.0.0", - "expect": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "natural-compare": "^1.4.0", - "pretty-format": "^29.7.0", - "semver": "^7.5.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", - "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "graceful-fs": "^4.2.9", - "picomatch": "^2.2.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-util/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/jest-validate": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", - "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "camelcase": "^6.2.0", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", - "leven": "^3.1.0", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-watcher": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", - "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", - "emittery": "^0.13.1", - "jest-util": "^29.7.0", - "string-length": "^4.0.1" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-worker": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", - "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "jest-util": "^29.7.0", - "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-worker/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/joycon": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", - "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/js-git": { - "version": "0.7.8", - "resolved": "https://registry.npmjs.org/js-git/-/js-git-0.7.8.tgz", - "integrity": "sha512-+E5ZH/HeRnoc/LW0AmAyhU+mNcWBzAKE+30+IDMLSLbbK+Tdt02AdkOKq9u15rlJsDEGFqtgckc8ZM59LhhiUA==", - "dev": true, - "license": "MIT", - "dependencies": { - "bodec": "^0.1.0", - "culvert": "^0.1.2", - "git-sha1": "^0.1.2", - "pako": "^0.2.5" - } - }, - "node_modules/js-sha1": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/js-sha1/-/js-sha1-0.7.0.tgz", - "integrity": "sha512-oQZ1Mo7440BfLSv9TX87VNEyU52pXPVG19F9PL3gTgNt0tVxlZ8F4O6yze3CLuLx28TxotxvlyepCNaaV0ZjMw==", - "license": "MIT" - }, - "node_modules/js-tiktoken": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.15.tgz", - "integrity": "sha512-65ruOWWXDEZHHbAo7EjOcNxOGasQKbL4Fq3jEr2xsCqSsoOo6VVSqzWQb6PRIqypFSDcma4jO90YP0w5X8qVXQ==", - "license": "MIT", - "dependencies": { - "base64-js": "^1.5.1" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "license": "MIT", - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/jsbn": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", - "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", - "dev": true, - "license": "MIT" - }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "license": "MIT", - "dependencies": { - "bignumber.js": "^9.0.0" - } - }, - "node_modules/json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", - "license": "(AFL-2.1 OR BSD-3-Clause)" - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", - "dev": true, - "license": "ISC" - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsondiffpatch": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz", - "integrity": "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==", - "license": "MIT", - "dependencies": { - "@types/diff-match-patch": "^1.0.36", - "chalk": "^5.3.0", - "diff-match-patch": "^1.0.5" - }, - "bin": { - "jsondiffpatch": "bin/jsondiffpatch.js" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, - "node_modules/jsondiffpatch/node_modules/chalk": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", - "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", - "dev": true, - "engines": [ - "node >= 0.2.0" - ], - "license": "MIT" - }, - "node_modules/jsonpointer": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz", - "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/JSONStream": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", - "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", - "dev": true, - "license": "(MIT OR Apache-2.0)", - "dependencies": { - "jsonparse": "^1.2.0", - "through": ">=2.2.7 <3" - }, - "bin": { - "JSONStream": "bin.js" - }, - "engines": { - "node": "*" - } - }, - "node_modules/jwa": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", - "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", - "license": "MIT", - "dependencies": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/jws": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", - "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", - "license": "MIT", - "dependencies": { - "jwa": "^2.0.0", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/keyv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "json-buffer": "3.0.1" - } - }, - "node_modules/kleur": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", - "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/langchain": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/langchain/-/langchain-0.3.6.tgz", - "integrity": "sha512-erZOIKXzwCOrQHqY9AyjkQmaX62zUap1Sigw1KrwMUOnVoLKkVNRmAyxFlNZDZ9jLs/58MaQcaT9ReJtbj3x6w==", - "license": "MIT", - "dependencies": { - "@langchain/openai": ">=0.1.0 <0.4.0", - "@langchain/textsplitters": ">=0.0.0 <0.2.0", - "js-tiktoken": "^1.0.12", - "js-yaml": "^4.1.0", - "jsonpointer": "^5.0.1", - "langsmith": "^0.2.0", - "openapi-types": "^12.1.3", - "p-retry": "4", - "uuid": "^10.0.0", - "yaml": "^2.2.1", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.22.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/anthropic": "*", - "@langchain/aws": "*", - "@langchain/cohere": "*", - "@langchain/core": ">=0.2.21 <0.4.0", - "@langchain/google-genai": "*", - "@langchain/google-vertexai": "*", - "@langchain/groq": "*", - "@langchain/mistralai": "*", - "@langchain/ollama": "*", - "axios": "*", - "cheerio": "*", - "handlebars": "^4.7.8", - "peggy": "^3.0.2", - "typeorm": "*" - }, - "peerDependenciesMeta": { - "@langchain/anthropic": { - "optional": true - }, - "@langchain/aws": { - "optional": true - }, - "@langchain/cohere": { - "optional": true - }, - "@langchain/google-genai": { - "optional": true - }, - "@langchain/google-vertexai": { - "optional": true - }, - "@langchain/groq": { - "optional": true - }, - "@langchain/mistralai": { - "optional": true - }, - "@langchain/ollama": { - "optional": true - }, - "axios": { - "optional": true - }, - "cheerio": { - "optional": true - }, - "handlebars": { - "optional": true - }, - "peggy": { - "optional": true - }, - "typeorm": { - "optional": true - } - } - }, - "node_modules/langchain/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/langchain/node_modules/zod": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.1.tgz", - "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - }, - "node_modules/langchain/node_modules/zod-to-json-schema": { - "version": "3.24.1", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz", - "integrity": "sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==", - "license": "ISC", - "peerDependencies": { - "zod": "^3.24.1" - } - }, - "node_modules/langsmith": { - "version": "0.2.15", - "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.2.15.tgz", - "integrity": "sha512-homtJU41iitqIZVuuLW7iarCzD4f39KcfP9RTBWav9jifhrsDa1Ez89Ejr+4qi72iuBu8Y5xykchsGVgiEZ93w==", - "license": "MIT", - "dependencies": { - "@types/uuid": "^10.0.0", - "commander": "^10.0.1", - "p-queue": "^6.6.2", - "p-retry": "4", - "semver": "^7.6.3", - "uuid": "^10.0.0" - }, - "peerDependencies": { - "openai": "*" - }, - "peerDependenciesMeta": { - "openai": { - "optional": true - } - } - }, - "node_modules/langsmith/node_modules/commander": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", - "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", - "license": "MIT", - "engines": { - "node": ">=14" - } - }, - "node_modules/langsmith/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/lazy": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz", - "integrity": "sha512-Y+CjUfLmIpoUCCRl0ub4smrYtGGr5AOa2AKOaWelGHOGz33X/Y/KizefGqbkwfz44+cnq/+9habclf8vOmu2LA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.2.0" - } - }, - "node_modules/leven": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", - "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/lilconfig": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", - "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/antonk52" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "dev": true, - "license": "MIT" - }, - "node_modules/lint-staged": { - "version": "15.2.10", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.2.10.tgz", - "integrity": "sha512-5dY5t743e1byO19P9I4b3x8HJwalIznL5E1FWYnU6OWw33KxNBSLAc6Cy7F2PsFEO8FKnLwjwm5hx7aMF0jzZg==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "~5.3.0", - "commander": "~12.1.0", - "debug": "~4.3.6", - "execa": "~8.0.1", - "lilconfig": "~3.1.2", - "listr2": "~8.2.4", - "micromatch": "~4.0.8", - "pidtree": "~0.6.0", - "string-argv": "~0.3.2", - "yaml": "~2.5.0" - }, - "bin": { - "lint-staged": "bin/lint-staged.js" - }, - "engines": { - "node": ">=18.12.0" - }, - "funding": { - "url": "https://opencollective.com/lint-staged" - } - }, - "node_modules/lint-staged/node_modules/chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/lint-staged/node_modules/commander": { - "version": "12.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", - "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/lint-staged/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/lint-staged/node_modules/execa": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", - "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^8.0.1", - "human-signals": "^5.0.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", - "signal-exit": "^4.1.0", - "strip-final-newline": "^3.0.0" - }, - "engines": { - "node": ">=16.17" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/lint-staged/node_modules/get-stream": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lint-staged/node_modules/human-signals": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", - "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=16.17.0" - } - }, - "node_modules/lint-staged/node_modules/is-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", - "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lint-staged/node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lint-staged/node_modules/npm-run-path": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", - "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lint-staged/node_modules/onetime": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lint-staged/node_modules/path-key": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", - "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lint-staged/node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lint-staged/node_modules/yaml": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.1.tgz", - "integrity": "sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==", - "dev": true, - "license": "ISC", - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/listr2": { - "version": "8.2.5", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.5.tgz", - "integrity": "sha512-iyAZCeyD+c1gPyE9qpFu8af0Y+MRtmKOncdGoA2S5EY8iFq99dmmvkNnHiWo+pj0s7yH7l3KPIgee77tKpXPWQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "cli-truncate": "^4.0.0", - "colorette": "^2.0.20", - "eventemitter3": "^5.0.1", - "log-update": "^6.1.0", - "rfdc": "^1.4.1", - "wrap-ansi": "^9.0.0" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/listr2/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/listr2/node_modules/emoji-regex": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", - "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", - "dev": true, - "license": "MIT" - }, - "node_modules/listr2/node_modules/string-width": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", - "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^10.3.0", - "get-east-asian-width": "^1.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/listr2/node_modules/wrap-ansi": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", - "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.2.1", - "string-width": "^7.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/load-tsconfig": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/load-tsconfig/-/load-tsconfig-0.2.5.tgz", - "integrity": "sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/locate-character": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", - "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", - "license": "MIT", - "peer": true - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", - "dev": true, - "license": "MIT" - }, - "node_modules/log-update": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", - "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-escapes": "^7.0.0", - "cli-cursor": "^5.0.0", - "slice-ansi": "^7.1.0", - "strip-ansi": "^7.1.0", - "wrap-ansi": "^9.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update/node_modules/ansi-escapes": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", - "integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==", - "dev": true, - "license": "MIT", - "dependencies": { - "environment": "^1.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/log-update/node_modules/emoji-regex": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", - "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", - "dev": true, - "license": "MIT" - }, - "node_modules/log-update/node_modules/is-fullwidth-code-point": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz", - "integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==", - "dev": true, - "license": "MIT", - "dependencies": { - "get-east-asian-width": "^1.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update/node_modules/slice-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", - "integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.2.1", - "is-fullwidth-code-point": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/log-update/node_modules/string-width": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", - "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^10.3.0", - "get-east-asian-width": "^1.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update/node_modules/wrap-ansi": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", - "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.2.1", - "string-width": "^7.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/loupe": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.2.tgz", - "integrity": "sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/lru-cache": { - "version": "11.0.2", - "license": "ISC", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/magic-string": { - "version": "0.30.17", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", - "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" - } - }, - "node_modules/magicast": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz", - "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.25.4", - "@babel/types": "^7.25.4", - "source-map-js": "^1.2.0" - } - }, - "node_modules/make-dir": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", - "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "dev": true, - "license": "ISC" - }, - "node_modules/makeerror": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", - "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "tmpl": "1.0.5" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true, - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/micromatch/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/mimic-function": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", - "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/minimatch": { - "version": "9.0.5", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/minipass": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.8.tgz", - "integrity": "sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/module-details-from-path": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==", - "dev": true, - "license": "MIT" - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" - }, - "node_modules/mustache": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", - "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", - "license": "MIT", - "peer": true, - "bin": { - "mustache": "bin/mustache" - } - }, - "node_modules/mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", - "dev": true, - "license": "ISC" - }, - "node_modules/mz": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", - "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "any-promise": "^1.0.0", - "object-assign": "^4.0.1", - "thenify-all": "^1.0.0" - } - }, - "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true, - "license": "MIT" - }, - "node_modules/needle": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/needle/-/needle-2.4.0.tgz", - "integrity": "sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^3.2.6", - "iconv-lite": "^0.4.4", - "sax": "^1.2.4" - }, - "bin": { - "needle": "bin/needle" - }, - "engines": { - "node": ">= 4.4.x" - } - }, - "node_modules/needle/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "license": "MIT" - }, - "node_modules/netmask": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", - "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "github", - "url": "https://paypal.me/jimmywarting" - } - ], - "license": "MIT", - "engines": { - "node": ">=10.5.0" - } - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/node-gyp-build": { - "version": "4.8.4", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz", - "integrity": "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==", - "dev": true, - "license": "MIT", - "optional": true, - "bin": { - "node-gyp-build": "bin.js", - "node-gyp-build-optional": "optional.js", - "node-gyp-build-test": "build-test.js" - } - }, - "node_modules/node-int64": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", - "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", - "dev": true, - "license": "MIT" - }, - "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", - "dev": true, - "license": "MIT" - }, - "node_modules/nodemon": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.7.tgz", - "integrity": "sha512-hLj7fuMow6f0lbB0cD14Lz2xNjwsyruH251Pk4t/yIitCFJbmY1myuLlHm/q06aST4jg6EgAh74PIBBrRqpVAQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "chokidar": "^3.5.2", - "debug": "^4", - "ignore-by-default": "^1.0.1", - "minimatch": "^3.1.2", - "pstree.remy": "^1.1.8", - "semver": "^7.5.3", - "simple-update-notifier": "^2.0.0", - "supports-color": "^5.5.0", - "touch": "^3.1.0", - "undefsafe": "^2.0.5" - }, - "bin": { - "nodemon": "bin/nodemon.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/nodemon" - } - }, - "node_modules/nodemon/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/nodemon/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/nodemon/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/nodemon/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/nssocket": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/nssocket/-/nssocket-0.6.0.tgz", - "integrity": "sha512-a9GSOIql5IqgWJR3F/JXG4KpJTA3Z53Cj0MeMvGpglytB1nxE4PdFNC0jINe27CS7cGivoynwc054EzCcT3M3w==", - "dev": true, - "license": "MIT", - "dependencies": { - "eventemitter2": "~0.4.14", - "lazy": "~1.0.11" - }, - "engines": { - "node": ">= 0.10.x" - } - }, - "node_modules/nssocket/node_modules/eventemitter2": { - "version": "0.4.14", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", - "integrity": "sha512-K7J4xq5xAD5jHsGM5ReWXRTFa3JRGofHiMcVgQ8PRwgWxzjHpMWCIzsmyf60+mh8KLsqYPcjUMa0AC4hd6lPyQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ollama-ai-provider": { - "version": "0.16.1", - "resolved": "https://registry.npmjs.org/ollama-ai-provider/-/ollama-ai-provider-0.16.1.tgz", - "integrity": "sha512-0vSQVz5Y/LguyzfO4bi1JrrVGF/k2JvO8/uFR0wYmqDFp8KPp4+AhdENSynGBr1oRhMWOM4F1l6cv7UNDgRMjw==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "0.0.26", - "@ai-sdk/provider-utils": "1.0.22", - "partial-json": "0.1.7" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "license": "ISC", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/onnxruntime-common": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/onnxruntime-common/-/onnxruntime-common-1.15.1.tgz", - "integrity": "sha512-Y89eJ8QmaRsPZPWLaX7mfqhj63ny47rSkQe80hIo+lvBQdrdXYR9VO362xvZulk9DFkCnXmGidprvgJ07bKsIQ==", - "license": "MIT" - }, - "node_modules/onnxruntime-node": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/onnxruntime-node/-/onnxruntime-node-1.15.1.tgz", - "integrity": "sha512-wzhVELulmrvNoMZw0/HfV+9iwgHX+kPS82nxodZ37WCXmbeo1jp3thamTsNg8MGhxvv4GmEzRum5mo40oqIsqw==", - "license": "MIT", - "os": [ - "win32", - "darwin", - "linux" - ], - "dependencies": { - "onnxruntime-common": "~1.15.1" - } - }, - "node_modules/openai": { - "version": "4.73.0", - "resolved": "https://registry.npmjs.org/openai/-/openai-4.73.0.tgz", - "integrity": "sha512-NZstV77w3CEol9KQTRBRQ15+Sw6nxVTicAULSjYO4wn9E5gw72Mtp3fAVaBFXyyVPws4241YmFG6ya4L8v03tA==", - "license": "Apache-2.0", - "dependencies": { - "@types/node": "^18.11.18", - "@types/node-fetch": "^2.6.4", - "abort-controller": "^3.0.0", - "agentkeepalive": "^4.2.1", - "form-data-encoder": "1.7.2", - "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7" - }, - "bin": { - "openai": "bin/cli" - }, - "peerDependencies": { - "zod": "^3.23.8" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/openai/node_modules/@types/node": { - "version": "18.19.70", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.70.tgz", - "integrity": "sha512-RE+K0+KZoEpDUbGGctnGdkrLFwi1eYKTlIHNl2Um98mUkGsm1u2Ff6Ltd0e8DktTtC98uy7rSj+hO8t/QuLoVQ==", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/openai/node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "license": "MIT" - }, - "node_modules/openapi-types": { - "version": "12.1.3", - "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", - "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==", - "license": "MIT" - }, - "node_modules/optionator": { - "version": "0.9.4", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", - "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.5" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-queue": { - "version": "6.6.2", - "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz", - "integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==", - "license": "MIT", - "dependencies": { - "eventemitter3": "^4.0.4", - "p-timeout": "^3.2.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-queue/node_modules/eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", - "license": "MIT" - }, - "node_modules/p-retry": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz", - "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==", - "license": "MIT", - "dependencies": { - "@types/retry": "0.12.0", - "retry": "^0.13.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-timeout": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", - "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", - "license": "MIT", - "dependencies": { - "p-finally": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/pac-proxy-agent": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.1.0.tgz", - "integrity": "sha512-Z5FnLVVZSnX7WjBg0mhDtydeRZ1xMcATZThjySQUHqr+0ksP8kqaw23fNKkaaN/Z8gwLUs/W7xdl0I75eP2Xyw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@tootallnate/quickjs-emscripten": "^0.23.0", - "agent-base": "^7.1.2", - "debug": "^4.3.4", - "get-uri": "^6.0.1", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.6", - "pac-resolver": "^7.0.1", - "socks-proxy-agent": "^8.0.5" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/pac-resolver": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz", - "integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==", - "dev": true, - "license": "MIT", - "dependencies": { - "degenerator": "^5.0.0", - "netmask": "^2.0.2" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", - "license": "BlueOak-1.0.0" - }, - "node_modules/pako": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/pako/-/pako-0.2.9.tgz", - "integrity": "sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==", - "dev": true, - "license": "MIT" - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/partial-json": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/partial-json/-/partial-json-0.1.7.tgz", - "integrity": "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA==", - "license": "MIT" - }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true, - "license": "MIT" - }, - "node_modules/pathe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/pathval": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", - "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">= 14.16" - } - }, - "node_modules/pdfjs-dist": { - "version": "4.10.38", - "resolved": "https://registry.npmjs.org/pdfjs-dist/-/pdfjs-dist-4.10.38.tgz", - "integrity": "sha512-/Y3fcFrXEAsMjJXeL9J8+ZG9U01LbuWaYypvDW2ycW1jL269L3js3DVBjDJ0Up9Np1uqDXsDrRihHANhZOlwdQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=20" - }, - "optionalDependencies": { - "@napi-rs/canvas": "^0.1.65" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pidtree": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.6.0.tgz", - "integrity": "sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==", - "dev": true, - "license": "MIT", - "bin": { - "pidtree": "bin/pidtree.js" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/pidusage": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pidusage/-/pidusage-3.0.2.tgz", - "integrity": "sha512-g0VU+y08pKw5M8EZ2rIGiEBaB8wrQMjYGFfW2QVIfyT8V+fq8YFLkvlz4bz5ljvFDJYNFCWT3PWqcRr2FKO81w==", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "^5.2.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/pirates": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", - "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/pkg-dir": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", - "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "find-up": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pkg-dir/node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pkg-dir/node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pkg-dir/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-dir/node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pm2": { - "version": "5.4.3", - "resolved": "https://registry.npmjs.org/pm2/-/pm2-5.4.3.tgz", - "integrity": "sha512-4/I1htIHzZk1Y67UgOCo4F1cJtas1kSds31N8zN0PybO230id1nigyjGuGFzUnGmUFPmrJ0On22fO1ChFlp7VQ==", - "dev": true, - "license": "AGPL-3.0", - "dependencies": { - "@pm2/agent": "~2.0.0", - "@pm2/io": "~6.0.1", - "@pm2/js-api": "~0.8.0", - "@pm2/pm2-version-check": "latest", - "async": "~3.2.0", - "blessed": "0.1.81", - "chalk": "3.0.0", - "chokidar": "^3.5.3", - "cli-tableau": "^2.0.0", - "commander": "2.15.1", - "croner": "~4.1.92", - "dayjs": "~1.11.5", - "debug": "^4.3.1", - "enquirer": "2.3.6", - "eventemitter2": "5.0.1", - "fclone": "1.0.11", - "js-yaml": "~4.1.0", - "mkdirp": "1.0.4", - "needle": "2.4.0", - "pidusage": "~3.0", - "pm2-axon": "~4.0.1", - "pm2-axon-rpc": "~0.7.1", - "pm2-deploy": "~1.0.2", - "pm2-multimeter": "^0.1.2", - "promptly": "^2", - "semver": "^7.2", - "source-map-support": "0.5.21", - "sprintf-js": "1.1.2", - "vizion": "~2.2.1" - }, - "bin": { - "pm2": "bin/pm2", - "pm2-dev": "bin/pm2-dev", - "pm2-docker": "bin/pm2-docker", - "pm2-runtime": "bin/pm2-runtime" - }, - "engines": { - "node": ">=12.0.0" - }, - "optionalDependencies": { - "pm2-sysmonit": "^1.2.8" - } - }, - "node_modules/pm2-axon": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pm2-axon/-/pm2-axon-4.0.1.tgz", - "integrity": "sha512-kES/PeSLS8orT8dR5jMlNl+Yu4Ty3nbvZRmaAtROuVm9nYYGiaoXqqKQqQYzWQzMYWUKHMQTvBlirjE5GIIxqg==", - "dev": true, - "license": "MIT", - "dependencies": { - "amp": "~0.3.1", - "amp-message": "~0.1.1", - "debug": "^4.3.1", - "escape-string-regexp": "^4.0.0" - }, - "engines": { - "node": ">=5" - } - }, - "node_modules/pm2-axon-rpc": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/pm2-axon-rpc/-/pm2-axon-rpc-0.7.1.tgz", - "integrity": "sha512-FbLvW60w+vEyvMjP/xom2UPhUN/2bVpdtLfKJeYM3gwzYhoTEEChCOICfFzxkxuoEleOlnpjie+n1nue91bDQw==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.3.1" - }, - "engines": { - "node": ">=5" - } - }, - "node_modules/pm2-deploy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pm2-deploy/-/pm2-deploy-1.0.2.tgz", - "integrity": "sha512-YJx6RXKrVrWaphEYf++EdOOx9EH18vM8RSZN/P1Y+NokTKqYAca/ejXwVLyiEpNju4HPZEk3Y2uZouwMqUlcgg==", - "dev": true, - "license": "MIT", - "dependencies": { - "run-series": "^1.1.8", - "tv4": "^1.3.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/pm2-multimeter": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/pm2-multimeter/-/pm2-multimeter-0.1.2.tgz", - "integrity": "sha512-S+wT6XfyKfd7SJIBqRgOctGxaBzUOmVQzTAS+cg04TsEUObJVreha7lvCfX8zzGVr871XwCSnHUU7DQQ5xEsfA==", - "dev": true, - "license": "MIT/X11", - "dependencies": { - "charm": "~0.1.1" - } - }, - "node_modules/pm2-sysmonit": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/pm2-sysmonit/-/pm2-sysmonit-1.2.8.tgz", - "integrity": "sha512-ACOhlONEXdCTVwKieBIQLSi2tQZ8eKinhcr9JpZSUAL8Qy0ajIgRtsLxG/lwPOW3JEKqPyw/UaHmTWhUzpP4kA==", - "dev": true, - "license": "Apache", - "optional": true, - "dependencies": { - "async": "^3.2.0", - "debug": "^4.3.1", - "pidusage": "^2.0.21", - "systeminformation": "^5.7", - "tx2": "~1.0.4" - } - }, - "node_modules/pm2-sysmonit/node_modules/pidusage": { - "version": "2.0.21", - "resolved": "https://registry.npmjs.org/pidusage/-/pidusage-2.0.21.tgz", - "integrity": "sha512-cv3xAQos+pugVX+BfXpHsbyz/dLzX+lr44zNMsYiGxUw+kV5sgQCIcLd1z+0vq+KyC7dJ+/ts2PsfgWfSC3WXA==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "safe-buffer": "^5.2.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pm2/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/pm2/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pm2/node_modules/commander": { - "version": "2.15.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", - "dev": true, - "license": "MIT" - }, - "node_modules/pm2/node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/postcss": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.0.tgz", - "integrity": "sha512-27VKOqrYfPncKA2NrFOVhP5MGAfHKLYn/Q0mz9cNQyRAKYi3VNHwYU2qKKqPCqgBmeeJ0uAFB56NumXZ5ZReXg==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "peer": true, - "dependencies": { - "nanoid": "^3.3.8", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-load-config": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", - "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "lilconfig": "^3.1.1" - }, - "engines": { - "node": ">= 18" - }, - "peerDependencies": { - "jiti": ">=1.21.0", - "postcss": ">=8.0.9", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - }, - "postcss": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/pretty-format": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", - "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/schemas": "^29.6.3", - "ansi-styles": "^5.0.0", - "react-is": "^18.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/promptly": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/promptly/-/promptly-2.2.0.tgz", - "integrity": "sha512-aC9j+BZsRSSzEsXBNBwDnAxujdx19HycZoKgRgzWnS8eOHg1asuf9heuLprfbe739zY3IdUQx+Egv6Jn135WHA==", - "dev": true, - "license": "MIT", - "dependencies": { - "read": "^1.0.4" - } - }, - "node_modules/prompts": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", - "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "kleur": "^3.0.3", - "sisteransi": "^1.0.5" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/proxy-agent": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.3.1.tgz", - "integrity": "sha512-Rb5RVBy1iyqOtNl15Cw/llpeLH8bsb37gM1FUfKQ+Wck6xHlbAhWGUFiTRHtkjqGTA5pSHz6+0hrPW/oECihPQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.0.2", - "debug": "^4.3.4", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.2", - "lru-cache": "^7.14.1", - "pac-proxy-agent": "^7.0.1", - "proxy-from-env": "^1.1.0", - "socks-proxy-agent": "^8.0.2" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "devOptional": true, - "license": "MIT" - }, - "node_modules/pstree.remy": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", - "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", - "dev": true, - "license": "MIT" - }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/pure-rand": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", - "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/dubzzz" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/fast-check" - } - ], - "license": "MIT" - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/react": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/react/-/react-19.0.0.tgz", - "integrity": "sha512-V8AVnmPIICiWpGfm6GLzCR/W5FXLchHop40W4nXBmdlEceh16rCN8O8LNWm5bh5XUX91fh7KpA+W0TgMKmgTpQ==", - "license": "MIT", - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-is": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", - "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", - "dev": true, - "license": "MIT" - }, - "node_modules/read": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", - "integrity": "sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "mute-stream": "~0.0.4" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/readdirp/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "dev": true, - "license": "MIT" - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-in-the-middle": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.2.0.tgz", - "integrity": "sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.1.1", - "module-details-from-path": "^1.0.3", - "resolve": "^1.22.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-cwd": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", - "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "resolve-from": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/resolve-cwd/node_modules/resolve-from": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/resolve.exports": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", - "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/restore-cursor": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", - "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", - "dev": true, - "license": "MIT", - "dependencies": { - "onetime": "^7.0.0", - "signal-exit": "^4.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/restore-cursor/node_modules/onetime": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", - "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-function": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/retry": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", - "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rfdc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", - "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", - "dev": true, - "license": "MIT" - }, - "node_modules/rimraf": { - "version": "6.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^11.0.0", - "package-json-from-dist": "^1.0.0" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/robot3": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/robot3/-/robot3-0.4.1.tgz", - "integrity": "sha512-hzjy826lrxzx8eRgv80idkf8ua1JAepRc9Efdtj03N3KNJuznQCPlyCJ7gnUmDFwZCLQjxy567mQVKmdv2BsXQ==", - "license": "BSD-2-Clause" - }, - "node_modules/rollup": { - "version": "2.79.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.2.tgz", - "integrity": "sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ==", - "dev": true, - "license": "MIT", - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=10.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/rpc-websockets": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/rpc-websockets/-/rpc-websockets-9.0.4.tgz", - "integrity": "sha512-yWZWN0M+bivtoNLnaDbtny4XchdAIF5Q4g/ZsC5UC61Ckbp0QczwO8fg44rV3uYmY4WHd+EZQbn90W1d8ojzqQ==", - "dev": true, - "license": "LGPL-3.0-only", - "dependencies": { - "@swc/helpers": "^0.5.11", - "@types/uuid": "^8.3.4", - "@types/ws": "^8.2.2", - "buffer": "^6.0.3", - "eventemitter3": "^5.0.1", - "uuid": "^8.3.2", - "ws": "^8.5.0" - }, - "funding": { - "type": "paypal", - "url": "https://paypal.me/kozjak" - }, - "optionalDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" - } - }, - "node_modules/rpc-websockets/node_modules/@types/uuid": { - "version": "8.3.4", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.4.tgz", - "integrity": "sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw==", - "dev": true, - "license": "MIT" - }, - "node_modules/rpc-websockets/node_modules/@types/ws": { - "version": "8.5.13", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.13.tgz", - "integrity": "sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/rpc-websockets/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/rpc-websockets/node_modules/ws": { - "version": "8.18.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", - "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/run-series": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/run-series/-/run-series-1.1.9.tgz", - "integrity": "sha512-Arc4hUN896vjkqCYrUXquBFtRZdv1PfLbTYP71efP6butxyQ0kWpiNJyAgsxscmQg1cqvHY32/UCBzXedTpU2g==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true, - "license": "MIT" - }, - "node_modules/sax": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz", - "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==", - "dev": true, - "license": "ISC" - }, - "node_modules/secure-json-parse": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", - "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==", - "license": "BSD-3-Clause" - }, - "node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/shimmer": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", - "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/siginfo": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", - "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", - "dev": true, - "license": "ISC", - "peer": true - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/simple-update-notifier": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", - "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "dev": true, - "license": "MIT" - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/slice-ansi": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", - "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.0.0", - "is-fullwidth-code-point": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/slice-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks": { - "version": "2.8.3", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", - "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", - "dev": true, - "license": "MIT", - "dependencies": { - "ip-address": "^9.0.5", - "smart-buffer": "^4.2.0" - }, - "engines": { - "node": ">= 10.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks-proxy-agent": { - "version": "8.0.5", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", - "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "^4.3.4", - "socks": "^2.8.3" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.13", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", - "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/sprintf-js": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.2.tgz", - "integrity": "sha512-VE0SOVEHCk7Qc8ulkWw3ntAzXuqf7S2lvwQaDLRnUeIEaKNQJzV6BwmLKhOqT61aGhfUMrXeaBk+oDGCzvhcug==", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/sswr": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/sswr/-/sswr-2.1.0.tgz", - "integrity": "sha512-Cqc355SYlTAaUt8iDPaC/4DPPXK925PePLMxyBKuWd5kKc5mwsG3nT9+Mq2tyguL5s7b4Jg+IRMpTRsNTAfpSQ==", - "license": "MIT", - "dependencies": { - "swrev": "^4.0.0" - }, - "peerDependencies": { - "svelte": "^4.0.0 || ^5.0.0-next.0" - } - }, - "node_modules/stack-utils": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", - "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "escape-string-regexp": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/stack-utils/node_modules/escape-string-regexp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", - "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/stackback": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", - "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/std-env": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.0.tgz", - "integrity": "sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==", - "dev": true, - "license": "MIT" - }, - "node_modules/string-argv": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz", - "integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.6.19" - } - }, - "node_modules/string-length": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", - "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "char-regex": "^1.0.2", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/string-length/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-length/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", - "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/sucrase": { - "version": "3.35.0", - "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz", - "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.2", - "commander": "^4.0.0", - "glob": "^10.3.10", - "lines-and-columns": "^1.1.6", - "mz": "^2.7.0", - "pirates": "^4.0.1", - "ts-interface-checker": "^0.1.9" - }, - "bin": { - "sucrase": "bin/sucrase", - "sucrase-node": "bin/sucrase-node" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/sucrase/node_modules/commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/sucrase/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/sucrase/node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/sucrase/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/sucrase/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/sucrase/node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/superstruct": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/superstruct/-/superstruct-2.0.2.tgz", - "integrity": "sha512-uV+TFRZdXsqXTL2pRvujROjdZQ4RAlBUS5BTh9IGm+jTqQntYThciG/qu57Gs69yjnVUSqdxF9YLmSnpupBW9A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/svelte": { - "version": "5.17.4", - "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.17.4.tgz", - "integrity": "sha512-ne4IhhVBwzpUByjo1ocxQnqRoWsRilc9Ry1j+0uPWhHmg4jS/nnlSwYYfx7Ium8okCZ4hYM89rg0B5G0hQzk+g==", - "license": "MIT", - "peer": true, - "dependencies": { - "@ampproject/remapping": "^2.3.0", - "@jridgewell/sourcemap-codec": "^1.5.0", - "@types/estree": "^1.0.5", - "acorn": "^8.12.1", - "acorn-typescript": "^1.4.13", - "aria-query": "^5.3.1", - "axobject-query": "^4.1.0", - "clsx": "^2.1.1", - "esm-env": "^1.2.1", - "esrap": "^1.4.2", - "is-reference": "^3.0.3", - "locate-character": "^3.0.0", - "magic-string": "^0.30.11", - "zimmerframe": "^1.1.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/svelte/node_modules/is-reference": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", - "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", - "license": "MIT", - "peer": true, - "dependencies": { - "@types/estree": "^1.0.6" - } - }, - "node_modules/swr": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/swr/-/swr-2.3.0.tgz", - "integrity": "sha512-NyZ76wA4yElZWBHzSgEJc28a0u6QZvhb6w0azeL2k7+Q1gAzVK+IqQYXhVOC/mzi+HZIozrZvBVeSeOZNR2bqA==", - "license": "MIT", - "dependencies": { - "dequal": "^2.0.3", - "use-sync-external-store": "^1.4.0" - }, - "peerDependencies": { - "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, - "node_modules/swrev": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/swrev/-/swrev-4.0.0.tgz", - "integrity": "sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==", - "license": "MIT" - }, - "node_modules/swrv": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/swrv/-/swrv-1.0.4.tgz", - "integrity": "sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==", - "license": "Apache-2.0", - "peerDependencies": { - "vue": ">=3.2.26 < 4" - } - }, - "node_modules/systeminformation": { - "version": "5.25.11", - "resolved": "https://registry.npmjs.org/systeminformation/-/systeminformation-5.25.11.tgz", - "integrity": "sha512-jI01fn/t47rrLTQB0FTlMCC+5dYx8o0RRF+R4BPiUNsvg5OdY0s9DKMFmJGrx5SwMZQ4cag0Gl6v8oycso9b/g==", - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin", - "linux", - "win32", - "freebsd", - "openbsd", - "netbsd", - "sunos", - "android" - ], - "bin": { - "systeminformation": "lib/cli.js" - }, - "engines": { - "node": ">=8.0.0" - }, - "funding": { - "type": "Buy me a coffee", - "url": "https://www.buymeacoffee.com/systeminfo" - } - }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "license": "ISC", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/terser": { - "version": "5.37.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.37.0.tgz", - "integrity": "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.8.2", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/terser/node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/test-exclude": { - "version": "7.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^10.4.1", - "minimatch": "^9.0.4" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/test-exclude/node_modules/glob": { - "version": "10.4.5", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/test-exclude/node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/test-exclude/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/test-exclude/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/test-exclude/node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/text-encoding-utf-8": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/text-encoding-utf-8/-/text-encoding-utf-8-1.0.2.tgz", - "integrity": "sha512-8bw4MY9WjdsD2aMtO0OzOCY3pXGYNx2d2FfHRVUKkiCPDWjKuOlhLVASS+pD7VkLTVjW268LYJHwsnPFlBpbAg==", - "dev": true - }, - "node_modules/thenify": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", - "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", - "dev": true, - "license": "MIT", - "dependencies": { - "any-promise": "^1.0.0" - } - }, - "node_modules/thenify-all": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", - "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", - "dev": true, - "license": "MIT", - "dependencies": { - "thenify": ">= 3.1.0 < 4" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/throttleit": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz", - "integrity": "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", - "dev": true, - "license": "MIT" - }, - "node_modules/tinybench": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", - "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/tinyexec": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", - "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", - "dev": true, - "license": "MIT" - }, - "node_modules/tinyglobby": { - "version": "0.2.10", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.10.tgz", - "integrity": "sha512-Zc+8eJlFMvgatPZTl6A9L/yht8QqdmUNtURHaKZLmKBE12hNPSrqNkUp2cs3M/UKmNVVAMFQYSjYIVHDjW5zew==", - "dev": true, - "license": "MIT", - "dependencies": { - "fdir": "^6.4.2", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/tinyld": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/tinyld/-/tinyld-1.3.4.tgz", - "integrity": "sha512-u26CNoaInA4XpDU+8s/6Cq8xHc2T5M4fXB3ICfXPokUQoLzmPgSZU02TAkFwFMJCWTjk53gtkS8pETTreZwCqw==", - "license": "MIT", - "bin": { - "tinyld": "bin/tinyld.js", - "tinyld-heavy": "bin/tinyld-heavy.js", - "tinyld-light": "bin/tinyld-light.js" - }, - "engines": { - "node": ">= 12.10.0", - "npm": ">= 6.12.0", - "yarn": ">= 1.20.0" - } - }, - "node_modules/tinypool": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", - "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, - "node_modules/tinyrainbow": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", - "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tinyspy": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tmpl": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", - "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/together-ai": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/together-ai/-/together-ai-0.7.0.tgz", - "integrity": "sha512-/be/HOecBSwRTDHB14vCvHbp1WiNsFxyS4pJlyBoMup1X3n7xD1b/Gm5Z5amlKzD2zll9Y5wscDk7Ut5OsT1nA==", - "license": "Apache-2.0", - "dependencies": { - "@types/node": "^18.11.18", - "@types/node-fetch": "^2.6.4", - "abort-controller": "^3.0.0", - "agentkeepalive": "^4.2.1", - "form-data-encoder": "1.7.2", - "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7" - } - }, - "node_modules/together-ai/node_modules/@types/node": { - "version": "18.19.70", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.70.tgz", - "integrity": "sha512-RE+K0+KZoEpDUbGGctnGdkrLFwi1eYKTlIHNl2Um98mUkGsm1u2Ff6Ltd0e8DktTtC98uy7rSj+hO8t/QuLoVQ==", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/together-ai/node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "license": "MIT" - }, - "node_modules/touch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", - "integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==", - "dev": true, - "license": "ISC", - "bin": { - "nodetouch": "bin/nodetouch.js" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "license": "MIT" - }, - "node_modules/tree-kill": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", - "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", - "dev": true, - "license": "MIT", - "bin": { - "tree-kill": "cli.js" - } - }, - "node_modules/ts-api-utils": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz", - "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=16" - }, - "peerDependencies": { - "typescript": ">=4.2.0" - } - }, - "node_modules/ts-interface-checker": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", - "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/ts-jest": { - "version": "29.2.5", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.5.tgz", - "integrity": "sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==", - "dev": true, - "license": "MIT", - "dependencies": { - "bs-logger": "^0.2.6", - "ejs": "^3.1.10", - "fast-json-stable-stringify": "^2.1.0", - "jest-util": "^29.0.0", - "json5": "^2.2.3", - "lodash.memoize": "^4.1.2", - "make-error": "^1.3.6", - "semver": "^7.6.3", - "yargs-parser": "^21.1.1" - }, - "bin": { - "ts-jest": "cli.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" - }, - "peerDependencies": { - "@babel/core": ">=7.0.0-beta.0 <8", - "@jest/transform": "^29.0.0", - "@jest/types": "^29.0.0", - "babel-jest": "^29.0.0", - "jest": "^29.0.0", - "typescript": ">=4.3 <6" - }, - "peerDependenciesMeta": { - "@babel/core": { - "optional": true - }, - "@jest/transform": { - "optional": true - }, - "@jest/types": { - "optional": true - }, - "babel-jest": { - "optional": true - }, - "esbuild": { - "optional": true - } - } - }, - "node_modules/ts-node": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", - "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@cspotcode/source-map-support": "^0.8.0", - "@tsconfig/node10": "^1.0.7", - "@tsconfig/node12": "^1.0.7", - "@tsconfig/node14": "^1.0.0", - "@tsconfig/node16": "^1.0.2", - "acorn": "^8.4.1", - "acorn-walk": "^8.1.1", - "arg": "^4.1.0", - "create-require": "^1.1.0", - "diff": "^4.0.1", - "make-error": "^1.1.1", - "v8-compile-cache-lib": "^3.0.1", - "yn": "3.1.1" - }, - "bin": { - "ts-node": "dist/bin.js", - "ts-node-cwd": "dist/bin-cwd.js", - "ts-node-esm": "dist/bin-esm.js", - "ts-node-script": "dist/bin-script.js", - "ts-node-transpile-only": "dist/bin-transpile.js", - "ts-script": "dist/bin-script-deprecated.js" - }, - "peerDependencies": { - "@swc/core": ">=1.2.50", - "@swc/wasm": ">=1.2.50", - "@types/node": "*", - "typescript": ">=2.7" - }, - "peerDependenciesMeta": { - "@swc/core": { - "optional": true - }, - "@swc/wasm": { - "optional": true - } - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "dev": true, - "license": "0BSD" - }, - "node_modules/tsup": { - "version": "8.3.5", - "resolved": "https://registry.npmjs.org/tsup/-/tsup-8.3.5.tgz", - "integrity": "sha512-Tunf6r6m6tnZsG9GYWndg0z8dEV7fD733VBFzFJ5Vcm1FtlXB8xBD/rtrBi2a3YKEV7hHtxiZtW5EAVADoe1pA==", - "dev": true, - "license": "MIT", - "dependencies": { - "bundle-require": "^5.0.0", - "cac": "^6.7.14", - "chokidar": "^4.0.1", - "consola": "^3.2.3", - "debug": "^4.3.7", - "esbuild": "^0.24.0", - "joycon": "^3.1.1", - "picocolors": "^1.1.1", - "postcss-load-config": "^6.0.1", - "resolve-from": "^5.0.0", - "rollup": "^4.24.0", - "source-map": "0.8.0-beta.0", - "sucrase": "^3.35.0", - "tinyexec": "^0.3.1", - "tinyglobby": "^0.2.9", - "tree-kill": "^1.2.2" - }, - "bin": { - "tsup": "dist/cli-default.js", - "tsup-node": "dist/cli-node.js" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@microsoft/api-extractor": "^7.36.0", - "@swc/core": "^1", - "postcss": "^8.4.12", - "typescript": ">=4.5.0" - }, - "peerDependenciesMeta": { - "@microsoft/api-extractor": { - "optional": true - }, - "@swc/core": { - "optional": true - }, - "postcss": { - "optional": true - }, - "typescript": { - "optional": true - } - } - }, - "node_modules/tsup/node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "readdirp": "^4.0.1" - }, - "engines": { - "node": ">= 14.16.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/tsup/node_modules/readdirp": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.1.tgz", - "integrity": "sha512-h80JrZu/MHUZCyHu5ciuoI0+WxsCxzxJTILn6Fs8rxSnFPh+UVHYfeIxK1nVGugMqkfC4vJcBOYbkfkwYK0+gw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.18.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/tsup/node_modules/resolve-from": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/tsup/node_modules/rollup": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.30.1.tgz", - "integrity": "sha512-mlJ4glW020fPuLi7DkM/lN97mYEZGWeqBnrljzN0gs7GLctqX3lNWxKQ7Gl712UAX+6fog/L3jh4gb7R6aVi3w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "1.0.6" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.30.1", - "@rollup/rollup-android-arm64": "4.30.1", - "@rollup/rollup-darwin-arm64": "4.30.1", - "@rollup/rollup-darwin-x64": "4.30.1", - "@rollup/rollup-freebsd-arm64": "4.30.1", - "@rollup/rollup-freebsd-x64": "4.30.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.30.1", - "@rollup/rollup-linux-arm-musleabihf": "4.30.1", - "@rollup/rollup-linux-arm64-gnu": "4.30.1", - "@rollup/rollup-linux-arm64-musl": "4.30.1", - "@rollup/rollup-linux-loongarch64-gnu": "4.30.1", - "@rollup/rollup-linux-powerpc64le-gnu": "4.30.1", - "@rollup/rollup-linux-riscv64-gnu": "4.30.1", - "@rollup/rollup-linux-s390x-gnu": "4.30.1", - "@rollup/rollup-linux-x64-gnu": "4.30.1", - "@rollup/rollup-linux-x64-musl": "4.30.1", - "@rollup/rollup-win32-arm64-msvc": "4.30.1", - "@rollup/rollup-win32-ia32-msvc": "4.30.1", - "@rollup/rollup-win32-x64-msvc": "4.30.1", - "fsevents": "~2.3.2" - } - }, - "node_modules/tsup/node_modules/source-map": { - "version": "0.8.0-beta.0", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz", - "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "whatwg-url": "^7.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/tsup/node_modules/tr46": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", - "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", - "dev": true, - "license": "MIT", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/tsup/node_modules/webidl-conversions": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", - "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/tsup/node_modules/whatwg-url": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", - "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", - "dev": true, - "license": "MIT", - "dependencies": { - "lodash.sortby": "^4.7.0", - "tr46": "^1.0.1", - "webidl-conversions": "^4.0.2" - } - }, - "node_modules/tv4": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/tv4/-/tv4-1.3.0.tgz", - "integrity": "sha512-afizzfpJgvPr+eDkREK4MxJ/+r8nEEHcmitwgnPUqpaP+FpwQyadnxNoSACbgc/b1LsZYtODGoPiFxQrgJgjvw==", - "dev": true, - "license": [ - { - "type": "Public Domain", - "url": "http://geraintluff.github.io/tv4/LICENSE.txt" - }, - { - "type": "MIT", - "url": "http://jsonary.com/LICENSE.txt" - } - ], - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/tx2": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/tx2/-/tx2-1.0.5.tgz", - "integrity": "sha512-sJ24w0y03Md/bxzK4FU8J8JveYYUbSs2FViLJ2D/8bytSiyPRbuE3DyL/9UKYXTZlV3yXq0L8GLlhobTnekCVg==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "json-stringify-safe": "^5.0.1" - } - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-detect": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typescript": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", - "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", - "devOptional": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/uglify-js": { - "version": "3.19.3", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", - "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", - "license": "BSD-2-Clause", - "optional": true, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/undefsafe": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", - "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", - "dev": true, - "license": "MIT" - }, - "node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "license": "MIT" - }, - "node_modules/unique-names-generator": { - "version": "4.7.1", - "resolved": "https://registry.npmjs.org/unique-names-generator/-/unique-names-generator-4.7.1.tgz", - "integrity": "sha512-lMx9dX+KRmG8sq6gulYYpKWZc9RlGsgBR6aoO8Qsm3qvkSJ+3rAymr+TnV8EDMrIrwuFJ4kruzMWM/OpYzPoow==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.2.tgz", - "integrity": "sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "license": "BSD-2-Clause", - "peer": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/use-sync-external-store": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.4.0.tgz", - "integrity": "sha512-9WXSPC5fMv61vaupRkCKCxsPxBocVnwakBEkMIHHpkTTg6icbJtg6jzgtLDm4bl3cSHAca52rYWih0k4K3PfHw==", - "license": "MIT", - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, - "node_modules/utf-8-validate": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.10.tgz", - "integrity": "sha512-Z6czzLq4u8fPOyx7TU6X3dvUZVvoJmxSQ+IcrlmagKhilxlhZgxPK6C5Jqbkw1IDUmFTM+cz9QDnnLTwDz/2gQ==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "dependencies": { - "node-gyp-build": "^4.3.0" - }, - "engines": { - "node": ">=6.14.2" - } - }, - "node_modules/uuid": { - "version": "11.0.3", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/esm/bin/uuid" - } - }, - "node_modules/v8-compile-cache-lib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", - "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", - "dev": true, - "license": "MIT" - }, - "node_modules/v8-to-istanbul": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", - "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.12", - "@types/istanbul-lib-coverage": "^2.0.1", - "convert-source-map": "^2.0.0" - }, - "engines": { - "node": ">=10.12.0" - } - }, - "node_modules/vite": { - "version": "5.4.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz", - "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "esbuild": "^0.21.3", - "postcss": "^8.4.43", - "rollup": "^4.20.0" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^18.0.0 || >=20.0.0", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - } - } - }, - "node_modules/vite-node": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.1.5.tgz", - "integrity": "sha512-rd0QIgx74q4S1Rd56XIiL2cYEdyWn13cunYBIuqh9mpmQr7gGS0IxXoP8R6OaZtNQQLyXSWbd4rXKYUbhFpK5w==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.3.7", - "es-module-lexer": "^1.5.4", - "pathe": "^1.1.2", - "vite": "^5.0.0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/vite/node_modules/@esbuild/aix-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", - "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", - "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", - "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/android-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", - "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", - "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", - "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", - "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", - "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", - "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", - "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", - "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-loong64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", - "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-mips64el": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", - "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", - "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-riscv64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", - "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-s390x": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", - "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", - "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/netbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", - "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/openbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", - "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/sunos-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", - "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", - "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", - "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", - "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/esbuild": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", - "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "peer": true, - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.21.5", - "@esbuild/android-arm": "0.21.5", - "@esbuild/android-arm64": "0.21.5", - "@esbuild/android-x64": "0.21.5", - "@esbuild/darwin-arm64": "0.21.5", - "@esbuild/darwin-x64": "0.21.5", - "@esbuild/freebsd-arm64": "0.21.5", - "@esbuild/freebsd-x64": "0.21.5", - "@esbuild/linux-arm": "0.21.5", - "@esbuild/linux-arm64": "0.21.5", - "@esbuild/linux-ia32": "0.21.5", - "@esbuild/linux-loong64": "0.21.5", - "@esbuild/linux-mips64el": "0.21.5", - "@esbuild/linux-ppc64": "0.21.5", - "@esbuild/linux-riscv64": "0.21.5", - "@esbuild/linux-s390x": "0.21.5", - "@esbuild/linux-x64": "0.21.5", - "@esbuild/netbsd-x64": "0.21.5", - "@esbuild/openbsd-x64": "0.21.5", - "@esbuild/sunos-x64": "0.21.5", - "@esbuild/win32-arm64": "0.21.5", - "@esbuild/win32-ia32": "0.21.5", - "@esbuild/win32-x64": "0.21.5" - } - }, - "node_modules/vite/node_modules/rollup": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.30.1.tgz", - "integrity": "sha512-mlJ4glW020fPuLi7DkM/lN97mYEZGWeqBnrljzN0gs7GLctqX3lNWxKQ7Gl712UAX+6fog/L3jh4gb7R6aVi3w==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@types/estree": "1.0.6" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.30.1", - "@rollup/rollup-android-arm64": "4.30.1", - "@rollup/rollup-darwin-arm64": "4.30.1", - "@rollup/rollup-darwin-x64": "4.30.1", - "@rollup/rollup-freebsd-arm64": "4.30.1", - "@rollup/rollup-freebsd-x64": "4.30.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.30.1", - "@rollup/rollup-linux-arm-musleabihf": "4.30.1", - "@rollup/rollup-linux-arm64-gnu": "4.30.1", - "@rollup/rollup-linux-arm64-musl": "4.30.1", - "@rollup/rollup-linux-loongarch64-gnu": "4.30.1", - "@rollup/rollup-linux-powerpc64le-gnu": "4.30.1", - "@rollup/rollup-linux-riscv64-gnu": "4.30.1", - "@rollup/rollup-linux-s390x-gnu": "4.30.1", - "@rollup/rollup-linux-x64-gnu": "4.30.1", - "@rollup/rollup-linux-x64-musl": "4.30.1", - "@rollup/rollup-win32-arm64-msvc": "4.30.1", - "@rollup/rollup-win32-ia32-msvc": "4.30.1", - "@rollup/rollup-win32-x64-msvc": "4.30.1", - "fsevents": "~2.3.2" - } - }, - "node_modules/vitest": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-2.1.5.tgz", - "integrity": "sha512-P4ljsdpuzRTPI/kbND2sDZ4VmieerR2c9szEZpjc+98Z9ebvnXmM5+0tHEKqYZumXqlvnmfWsjeFOjXVriDG7A==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@vitest/expect": "2.1.5", - "@vitest/mocker": "2.1.5", - "@vitest/pretty-format": "^2.1.5", - "@vitest/runner": "2.1.5", - "@vitest/snapshot": "2.1.5", - "@vitest/spy": "2.1.5", - "@vitest/utils": "2.1.5", - "chai": "^5.1.2", - "debug": "^4.3.7", - "expect-type": "^1.1.0", - "magic-string": "^0.30.12", - "pathe": "^1.1.2", - "std-env": "^3.8.0", - "tinybench": "^2.9.0", - "tinyexec": "^0.3.1", - "tinypool": "^1.0.1", - "tinyrainbow": "^1.2.0", - "vite": "^5.0.0", - "vite-node": "2.1.5", - "why-is-node-running": "^2.3.0" - }, - "bin": { - "vitest": "vitest.mjs" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@edge-runtime/vm": "*", - "@types/node": "^18.0.0 || >=20.0.0", - "@vitest/browser": "2.1.5", - "@vitest/ui": "2.1.5", - "happy-dom": "*", - "jsdom": "*" - }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@types/node": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - } - } - }, - "node_modules/vizion": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/vizion/-/vizion-2.2.1.tgz", - "integrity": "sha512-sfAcO2yeSU0CSPFI/DmZp3FsFE9T+8913nv1xWBOyzODv13fwkn6Vl7HqxGpkr9F608M+8SuFId3s+BlZqfXww==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "async": "^2.6.3", - "git-node-fs": "^1.0.0", - "ini": "^1.3.5", - "js-git": "^0.7.8" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/vizion/node_modules/async": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", - "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", - "dev": true, - "license": "MIT", - "dependencies": { - "lodash": "^4.17.14" - } - }, - "node_modules/vue": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.13.tgz", - "integrity": "sha512-wmeiSMxkZCSc+PM2w2VRsOYAZC8GdipNFRTsLSfodVqI9mbejKeXEGr8SckuLnrQPGe3oJN5c3K0vpoU9q/wCQ==", - "license": "MIT", - "peer": true, - "dependencies": { - "@vue/compiler-dom": "3.5.13", - "@vue/compiler-sfc": "3.5.13", - "@vue/runtime-dom": "3.5.13", - "@vue/server-renderer": "3.5.13", - "@vue/shared": "3.5.13" - }, - "peerDependencies": { - "typescript": "*" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/walker": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", - "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "makeerror": "1.0.12" - } - }, - "node_modules/web-streams-polyfill": { - "version": "4.0.0-beta.3", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", - "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "license": "BSD-2-Clause" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/why-is-node-running": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", - "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "siginfo": "^2.0.0", - "stackback": "0.0.2" - }, - "bin": { - "why-is-node-running": "cli.js" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/word-wrap": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", - "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", - "license": "MIT" - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/write-file-atomic": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", - "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", - "dev": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.7" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/write-file-atomic/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/ws": { - "version": "7.5.10", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", - "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.3.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, - "node_modules/yaml": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", - "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", - "license": "ISC", - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/yargs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/yargs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/yargs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/yargs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/zimmerframe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.2.tgz", - "integrity": "sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==", - "license": "MIT", - "peer": true - }, - "node_modules/zod": { - "version": "3.23.8", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", - "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - } - } -} diff --git a/packages/core/package.json b/packages/core/package.json index 9f14496600629..c6853097a05cb 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@elizaos/core", - "version": "0.25.6-alpha.1", + "version": "0.25.8", "description": "", "type": "module", "main": "dist/index.js", @@ -62,18 +62,19 @@ "typescript": "5.6.3" }, "dependencies": { - "@ai-sdk/anthropic": "0.0.56", - "@ai-sdk/google": "0.0.55", - "@ai-sdk/google-vertex": "0.0.43", - "@ai-sdk/groq": "0.0.3", - "@ai-sdk/mistral": "1.0.9", + "@ai-sdk/anthropic": "1.1.6", + "@ai-sdk/google": "1.1.0", + "@ai-sdk/google-vertex": "2.1.11", + "@ai-sdk/groq": "1.1.7", + "@ai-sdk/mistral": "1.1.6", "@ai-sdk/openai": "1.1.9", - "@ai-sdk/amazon-bedrock": "1.1.0", + "@ai-sdk/amazon-bedrock": "1.1.6", "@fal-ai/client": "1.2.0", "@tavily/core": "^0.0.2", "@types/uuid": "10.0.0", "ai": "4.1.16", "anthropic-vertex-ai": "1.0.2", + "bignumber.js": "9.1.2", "dotenv": "16.4.5", "fastembed": "1.14.1", "fastestsmallesttextencoderdecoder": "1.0.22", @@ -82,7 +83,6 @@ "handlebars": "^4.7.8", "js-sha1": "0.7.0", "js-tiktoken": "1.0.15", - "langchain": "0.3.6", "ollama-ai-provider": "0.16.1", "openai": "4.82.0", "pino": "^9.6.0", @@ -91,5 +91,8 @@ "together-ai": "0.7.0", "unique-names-generator": "4.7.1", "uuid": "11.0.3" + }, + "publishConfig": { + "access": "public" } } diff --git a/packages/core/src/environment.ts b/packages/core/src/environment.ts index 9d51e0f4e68c2..7fd7d138a0cf6 100644 --- a/packages/core/src/environment.ts +++ b/packages/core/src/environment.ts @@ -1,5 +1,5 @@ import { z } from "zod"; -import { ModelProviderName, Clients } from "./types"; +import { ModelProviderName } from "./types"; import elizaLogger from "./logger"; // TODO: TO COMPLETE @@ -94,7 +94,6 @@ export const CharacterSchema = z.object({ ]) ) .optional(), - clients: z.array(z.nativeEnum(Clients)), plugins: z.union([z.array(z.string()), z.array(PluginSchema)]), settings: z .object({ @@ -106,6 +105,14 @@ export const CharacterSchema = z.object({ }) .optional(), model: z.string().optional(), + modelConfig: z.object({ + maxInputTokens: z.number().optional(), + maxOutputTokens: z.number().optional(), + temperature: z.number().optional(), + frequency_penalty: z.number().optional(), + presence_penalty:z.number().optional() + }) + .optional(), embeddingModel: z.string().optional(), }) .optional(), diff --git a/packages/core/src/generation.ts b/packages/core/src/generation.ts index 6adaf9b7232d5..4bae6f2624c3d 100644 --- a/packages/core/src/generation.ts +++ b/packages/core/src/generation.ts @@ -4,7 +4,6 @@ import { createMistral } from "@ai-sdk/mistral"; import { createGroq } from "@ai-sdk/groq"; import { createOpenAI } from "@ai-sdk/openai"; import { bedrock } from "@ai-sdk/amazon-bedrock"; -import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; import { generateObject as aiGenerateObject, generateText as aiGenerateText, @@ -16,7 +15,7 @@ import { Buffer } from "buffer"; import { createOllama } from "ollama-ai-provider"; import OpenAI from "openai"; import { encodingForModel, type TiktokenModel } from "js-tiktoken"; -import { AutoTokenizer } from "@huggingface/transformers"; +// import { AutoTokenizer } from "@huggingface/transformers"; import Together from "together-ai"; import type { ZodSchema } from "zod"; import { elizaLogger } from "./index.ts"; @@ -43,9 +42,9 @@ import { ModelProviderName, ServiceType, type ActionResponse, - type IVerifiableInferenceAdapter, - type VerifiableInferenceOptions, - type VerifiableInferenceResult, + // type IVerifiableInferenceAdapter, + // type VerifiableInferenceOptions, + // type VerifiableInferenceResult, //VerifiableInferenceProvider, type TelemetrySettings, TokenizerType, @@ -54,6 +53,9 @@ import { fal } from "@fal-ai/client"; import BigNumber from "bignumber.js"; import { createPublicClient, http } from "viem"; +import fs from "fs"; +import os from "os"; +import path from "path"; type Tool = CoreTool; type StepResult = AIStepResult; @@ -96,9 +98,9 @@ export async function trimTokens( } // Choose the truncation method based on tokenizer type - if (tokenizerType === TokenizerType.Auto) { - return truncateAuto(tokenizerModel, context, maxTokens); - } + // if (tokenizerType === TokenizerType.Auto) { + // return truncateAuto(tokenizerModel, context, maxTokens); + // } if (tokenizerType === TokenizerType.TikToken) { return truncateTiktoken( @@ -112,31 +114,31 @@ export async function trimTokens( return truncateTiktoken("gpt-4o", context, maxTokens); } -async function truncateAuto( - modelPath: string, - context: string, - maxTokens: number -) { - try { - const tokenizer = await AutoTokenizer.from_pretrained(modelPath); - const tokens = tokenizer.encode(context); - - // If already within limits, return unchanged - if (tokens.length <= maxTokens) { - return context; - } - - // Keep the most recent tokens by slicing from the end - const truncatedTokens = tokens.slice(-maxTokens); - - // Decode back to text - js-tiktoken decode() returns a string directly - return tokenizer.decode(truncatedTokens); - } catch (error) { - elizaLogger.error("Error in trimTokens:", error); - // Return truncated string if tokenization fails - return context.slice(-maxTokens * 4); // Rough estimate of 4 chars per token - } -} +// async function truncateAuto( +// modelPath: string, +// context: string, +// maxTokens: number +// ) { +// try { +// const tokenizer = await AutoTokenizer.from_pretrained(modelPath); +// const tokens = tokenizer.encode(context); + +// // If already within limits, return unchanged +// if (tokens.length <= maxTokens) { +// return context; +// } + +// // Keep the most recent tokens by slicing from the end +// const truncatedTokens = tokens.slice(-maxTokens); + +// // Decode back to text - js-tiktoken decode() returns a string directly +// return tokenizer.decode(truncatedTokens); +// } catch (error) { +// elizaLogger.error("Error in trimTokens:", error); +// // Return truncated string if tokenization fails +// return context.slice(-maxTokens * 4); // Rough estimate of 4 chars per token +// } +// } async function truncateTiktoken( model: TiktokenModel, @@ -347,8 +349,8 @@ export async function generateText({ maxSteps = 1, stop, customSystemPrompt, - verifiableInference = process.env.VERIFIABLE_INFERENCE_ENABLED === "true", - verifiableInferenceOptions, + // verifiableInference = process.env.VERIFIABLE_INFERENCE_ENABLED === "true", + // verifiableInferenceOptions, }: { runtime: IAgentRuntime; context: string; @@ -358,9 +360,9 @@ export async function generateText({ maxSteps?: number; stop?: string[]; customSystemPrompt?: string; - verifiableInference?: boolean; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter; - verifiableInferenceOptions?: VerifiableInferenceOptions; + // verifiableInference?: boolean; + // verifiableInferenceAdapter?: IVerifiableInferenceAdapter; + // verifiableInferenceOptions?: VerifiableInferenceOptions; }): Promise { if (!context) { console.error("generateText context is empty"); @@ -372,36 +374,36 @@ export async function generateText({ elizaLogger.info("Generating text with options:", { modelProvider: runtime.modelProvider, model: modelClass, - verifiableInference, + // verifiableInference, }); elizaLogger.log("Using provider:", runtime.modelProvider); // If verifiable inference is requested and adapter is provided, use it - if (verifiableInference && runtime.verifiableInferenceAdapter) { - elizaLogger.log( - "Using verifiable inference adapter:", - runtime.verifiableInferenceAdapter - ); - try { - const result: VerifiableInferenceResult = - await runtime.verifiableInferenceAdapter.generateText( - context, - modelClass, - verifiableInferenceOptions - ); - elizaLogger.log("Verifiable inference result:", result); - // Verify the proof - const isValid = - await runtime.verifiableInferenceAdapter.verifyProof(result); - if (!isValid) { - throw new Error("Failed to verify inference proof"); - } - - return result.text; - } catch (error) { - elizaLogger.error("Error in verifiable inference:", error); - throw error; - } - } + // if (verifiableInference && runtime.verifiableInferenceAdapter) { + // elizaLogger.log( + // "Using verifiable inference adapter:", + // runtime.verifiableInferenceAdapter + // ); + // try { + // const result: VerifiableInferenceResult = + // await runtime.verifiableInferenceAdapter.generateText( + // context, + // modelClass, + // verifiableInferenceOptions + // ); + // elizaLogger.log("Verifiable inference result:", result); + // // Verify the proof + // const isValid = + // await runtime.verifiableInferenceAdapter.verifyProof(result); + // if (!isValid) { + // throw new Error("Failed to verify inference proof"); + // } + + // return result.text; + // } catch (error) { + // elizaLogger.error("Error in verifiable inference:", error); + // throw error; + // } + // } const provider = runtime.modelProvider; elizaLogger.debug("Provider settings:", { @@ -502,7 +504,7 @@ export async function generateText({ const max_context_length = modelConfiguration?.maxInputTokens || modelSettings.maxInputTokens; const max_response_length = - modelConfiguration?.max_response_length || + modelConfiguration?.maxOutputTokens || modelSettings.maxOutputTokens; const experimental_telemetry = modelConfiguration?.experimental_telemetry || @@ -535,7 +537,8 @@ export async function generateText({ case ModelProviderName.TOGETHER: case ModelProviderName.NINETEEN_AI: case ModelProviderName.AKASH_CHAT_API: - case ModelProviderName.LMSTUDIO: { + case ModelProviderName.LMSTUDIO: + case ModelProviderName.NEARAI: { elizaLogger.debug( "Initializing OpenAI model with Cloudflare check" ); @@ -953,7 +956,7 @@ export async function generateText({ experimental_telemetry: experimental_telemetry, }); - response = ollamaResponse; + response = ollamaResponse.replace(/[\s\S]*?<\/think>\s*\n*/g, ''); } elizaLogger.debug("Received response from Ollama model."); break; @@ -1290,6 +1293,53 @@ export async function generateText({ break; } + case ModelProviderName.SECRETAI: + { + elizaLogger.debug("Initializing SecretAI model."); + + const secretAiProvider = createOllama({ + baseURL: getEndpoint(provider) + "/api", + fetch: runtime.fetch, + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + } + }); + const secretAi = secretAiProvider(model); + + const { text: secretAiResponse } = await aiGenerateText({ + model: secretAi, + prompt: context, + tools: tools, + onStepFinish: onStepFinish, + temperature: temperature, + maxSteps: maxSteps, + maxTokens: max_response_length, + }); + + response = secretAiResponse; + } + break; + + case ModelProviderName.BEDROCK: { + elizaLogger.debug("Initializing Bedrock model."); + + const { text: bedrockResponse } = await aiGenerateText({ + model: bedrock(model), + maxSteps: maxSteps, + temperature: temperature, + maxTokens: max_response_length, + frequencyPenalty: frequency_penalty, + presencePenalty: presence_penalty, + experimental_telemetry: experimental_telemetry, + prompt: context + }); + + response = bedrockResponse; + elizaLogger.debug("Received response from Bedrock model."); + break; + } + default: { const errorMessage = `Unsupported provider: ${provider}`; elizaLogger.error(errorMessage); @@ -1375,17 +1425,13 @@ export async function generateShouldRespond({ */ export async function splitChunks( content: string, - chunkSize = 512, - bleed = 20 + chunkSize = 1500, + bleed = 100 ): Promise { elizaLogger.debug(`[splitChunks] Starting text split`); - const textSplitter = new RecursiveCharacterTextSplitter({ - chunkSize: Number(chunkSize), - chunkOverlap: Number(bleed), - }); + const chunks = splitText(content, chunkSize, bleed); - const chunks = await textSplitter.splitText(content); elizaLogger.debug(`[splitChunks] Split complete:`, { numberOfChunks: chunks.length, averageChunkSize: @@ -1396,6 +1442,19 @@ export async function splitChunks( return chunks; } +export function splitText(content: string, chunkSize: number, bleed: number): string[] { + const chunks: string[] = []; + let start = 0; + + while (start < content.length) { + const end = Math.min(start + chunkSize, content.length); + chunks.push(content.substring(start, end)); + start = end - bleed; // Apply overlap + } + + return chunks; +} + /** * Sends a message to the model and parses the response as a boolean value * @param opts - The options for the generateText request @@ -1676,6 +1735,17 @@ export const generateImage = async ( return runtime.getSetting("VENICE_API_KEY"); case ModelProviderName.LIVEPEER: return runtime.getSetting("LIVEPEER_GATEWAY_URL"); + case ModelProviderName.SECRETAI: + return runtime.getSetting("SECRET_AI_API_KEY"); + case ModelProviderName.NEARAI: + try { + // Read auth config from ~/.nearai/config.json if it exists + const config = JSON.parse(fs.readFileSync(path.join(os.homedir(), '.nearai/config.json'), 'utf8')); + return JSON.stringify(config?.auth); + } catch (e) { + elizaLogger.warn(`Error loading NEAR AI config. The environment variable NEARAI_API_KEY will be used. ${e}`); + } + return runtime.getSetting("NEARAI_API_KEY"); default: // If no specific match, try the fallback chain return ( @@ -2050,9 +2120,9 @@ export interface GenerationOptions { stop?: string[]; mode?: "auto" | "json" | "tool"; experimental_providerMetadata?: Record; - verifiableInference?: boolean; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter; - verifiableInferenceOptions?: VerifiableInferenceOptions; + // verifiableInference?: boolean; + // verifiableInferenceAdapter?: IVerifiableInferenceAdapter; + // verifiableInferenceOptions?: VerifiableInferenceOptions; } /** @@ -2084,9 +2154,9 @@ export const generateObject = async ({ schemaDescription, stop, mode = "json", - verifiableInference = false, - verifiableInferenceAdapter, - verifiableInferenceOptions, + // verifiableInference = false, + // verifiableInferenceAdapter, + // verifiableInferenceOptions, }: GenerationOptions): Promise> => { if (!context) { const errorMessage = "generateObject context is empty"; @@ -2130,9 +2200,9 @@ export const generateObject = async ({ runtime, context, modelClass, - verifiableInference, - verifiableInferenceAdapter, - verifiableInferenceOptions, + // verifiableInference, + // verifiableInferenceAdapter, + // verifiableInferenceOptions, }); return response; @@ -2158,9 +2228,9 @@ interface ProviderOptions { modelOptions: ModelSettings; modelClass: ModelClass; context: string; - verifiableInference?: boolean; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter; - verifiableInferenceOptions?: VerifiableInferenceOptions; + // verifiableInference?: boolean; + // verifiableInferenceAdapter?: IVerifiableInferenceAdapter; + // verifiableInferenceOptions?: VerifiableInferenceOptions; } /** @@ -2219,6 +2289,10 @@ export async function handleProvider( return await handleDeepSeek(options); case ModelProviderName.LIVEPEER: return await handleLivepeer(options); + case ModelProviderName.SECRETAI: + return await handleSecretAi(options); + case ModelProviderName.NEARAI: + return await handleNearAi(options); default: { const errorMessage = `Unsupported provider: ${provider}`; elizaLogger.error(errorMessage); @@ -2358,14 +2432,14 @@ async function handleGroq({ */ async function handleGoogle({ model, - apiKey: _apiKey, + apiKey, schema, schemaName, schemaDescription, mode = "json", modelOptions, }: ProviderOptions): Promise> { - const google = createGoogleGenerativeAI(); + const google = createGoogleGenerativeAI({apiKey}); return await aiGenerateObject({ model: google(model), schema, @@ -2567,6 +2641,68 @@ async function handleLivepeer({ }); } +/** + * Handles object generation for Secret AI models. + * + * @param {ProviderOptions} options - Options specific to Secret AI. + * @returns {Promise>} - A promise that resolves to generated objects. + */ +async function handleSecretAi({ + model, + apiKey, + schema, + schemaName, + schemaDescription, + mode = "json", + modelOptions, + provider, +}: ProviderOptions): Promise> { + const secretAiProvider = createOllama({ + baseURL: getEndpoint(provider) + "/api", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + } + }); + const secretAi = secretAiProvider(model); + return await aiGenerateObject({ + model: secretAi, + schema, + schemaName, + schemaDescription, + mode, + ...modelOptions, + }); +} + +/** + * Handles object generation for NEAR AI models. + * + * @param {ProviderOptions} options - Options specific to NEAR AI. + * @returns {Promise>} - A promise that resolves to generated objects. + */ +async function handleNearAi({ + model, + apiKey, + schema, + schemaName, + schemaDescription, + mode = "json", + modelOptions, +}: ProviderOptions): Promise> { + const nearai = createOpenAI({ apiKey, baseURL: models.nearai.endpoint }); + // Require structured output if schema is provided + const settings = schema ? { structuredOutputs: true } : undefined; + return await aiGenerateObject({ + model: nearai.languageModel(model, settings), + schema, + schemaName, + schemaDescription, + mode, + ...modelOptions, + }); +} + // Add type definition for Together AI response interface TogetherAIImageResponse { data: Array<{ diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 7dbf7f832d59d..99edf9393ebad 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -3,7 +3,6 @@ import "./config.ts"; // Add this line first export * from "./actions.ts"; export * from "./context.ts"; export * from "./database.ts"; -export * from "./defaultCharacter.ts"; export * from "./embedding.ts"; export * from "./evaluators.ts"; export * from "./generation.ts"; diff --git a/packages/core/src/models.ts b/packages/core/src/models.ts index 76a6838d1ba0b..4161ac356c763 100644 --- a/packages/core/src/models.ts +++ b/packages/core/src/models.ts @@ -987,7 +987,7 @@ export const models: Models = { }, }, [ModelProviderName.LIVEPEER]: { - endpoint: settings.LIVEPEER_GATEWAY_URL, + endpoint: settings.LIVEPEER_GATEWAY_URL || "http://gateway.test-gateway", model: { [ModelClass.SMALL]: { name: @@ -1151,6 +1151,76 @@ export const models: Models = { }, }, }, + [ModelProviderName.SECRETAI]: { + endpoint: settings.SECRET_AI_URL || "https://ai1.scrtlabs.com:21434", + model: { + [ModelClass.SMALL]: { + name: + settings.SMALL_SECRET_AI_MODEL || + "deepseek-r1:70b", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.7, + }, + [ModelClass.MEDIUM]: { + name: + settings.MEDIUM_SECRET_AI_MODEL || + "deepseek-r1:70b", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.7, + }, + [ModelClass.LARGE]: { + name: + settings.LARGE_SECRET_AI_MODEL || + "deepseek-r1:70b", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.7, + }, + }, + }, + [ModelProviderName.NEARAI]: { + endpoint: settings.NEARAI_API_URL || "https://api.near.ai/v1", + model: { + [ModelClass.SMALL]: { + name: + settings.SMALL_NEARAI_MODEL || + settings.NEARAI_MODEL || + "fireworks::accounts/fireworks/models/llama-v3p2-3b-instruct", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.6, + }, + [ModelClass.MEDIUM]: { + name: + settings.MEDIUM_NEARAI_MODEL || + settings.NEARAI_MODEL || + "fireworks::accounts/fireworks/models/llama-v3p1-70b-instruct", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.6, + }, + [ModelClass.LARGE]: { + name: + settings.LARGE_NEARAI_MODEL || + settings.NEARAI_MODEL || + "fireworks::accounts/fireworks/models/llama-v3p1-405b-instruct", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.6, + }, + [ModelClass.IMAGE]: { + name: settings.IMAGE_NEARAI_MODEL || "fireworks::accounts/fireworks/models/playground-v2-5-1024px-aesthetic", + }, + }, + }, }; export function getModelSettings( diff --git a/packages/core/src/parsing.ts b/packages/core/src/parsing.ts index fadebbab929dd..f7393875e28f7 100644 --- a/packages/core/src/parsing.ts +++ b/packages/core/src/parsing.ts @@ -146,6 +146,7 @@ export function parseJSONObjectFromText( const jsonBlockMatch = text.match(jsonBlockPattern); if (jsonBlockMatch) { + text = cleanJsonResponse(text); const parsingText = normalizeJsonString(text); try { jsonData = JSON.parse(parsingText); @@ -159,6 +160,7 @@ export function parseJSONObjectFromText( const objectMatch = text.match(objectPattern); if (objectMatch) { + text = cleanJsonResponse(text); const parsingText = normalizeJsonString(text); try { jsonData = JSON.parse(parsingText); @@ -214,7 +216,7 @@ export function extractAttributes( }); } - return attributes; + return Object.entries(attributes).length > 0 ? attributes : null; } /** diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index b4382e8ef54c5..1072d37657cc5 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -8,7 +8,6 @@ import { formatActions, } from "./actions.ts"; import { addHeader, composeContext } from "./context.ts"; -import { defaultCharacter } from "./defaultCharacter.ts"; import { evaluationTemplate, formatEvaluatorExamples, @@ -35,7 +34,7 @@ import { type IDatabaseAdapter, type IMemoryManager, type IRAGKnowledgeManager, - type IVerifiableInferenceAdapter, + // type IVerifiableInferenceAdapter, type KnowledgeItem, // RAGKnowledgeItem, //Media, @@ -43,6 +42,7 @@ import { ModelProviderName, type Plugin, type Provider, + type Adapter, type Service, type ServiceType, type State, @@ -52,6 +52,7 @@ import { type Evaluator, type Memory, type DirectoryItem, + type ClientInstance, } from "./types.ts"; import { stringToUuid } from "./uuid.ts"; import { glob } from "glob"; @@ -110,6 +111,11 @@ export class AgentRuntime implements IAgentRuntime { */ providers: Provider[] = []; + /** + * Database adapters used to interact with the database. + */ + adapters: Adapter[] = []; + plugins: Plugin[] = []; /** @@ -170,9 +176,9 @@ export class AgentRuntime implements IAgentRuntime { services: Map = new Map(); memoryManagers: Map = new Map(); cacheManager: ICacheManager; - clients: Record; + clients: ClientInstance[] = []; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter; + // verifiableInferenceAdapter?: IVerifiableInferenceAdapter; registerMemoryManager(manager: IMemoryManager): void { if (!manager.tableName) { @@ -250,19 +256,23 @@ export class AgentRuntime implements IAgentRuntime { services?: Service[]; // Map of service name to service instance managers?: IMemoryManager[]; // Map of table name to memory manager - databaseAdapter: IDatabaseAdapter; // The database adapter used for interacting with the database + databaseAdapter?: IDatabaseAdapter; // The database adapter used for interacting with the database fetch?: typeof fetch | unknown; speechModelPath?: string; - cacheManager: ICacheManager; + cacheManager?: ICacheManager; logging?: boolean; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter; + // verifiableInferenceAdapter?: IVerifiableInferenceAdapter; }) { // use the character id if it exists, otherwise use the agentId if it is passed in, otherwise use the character name this.agentId = opts.character?.id ?? opts?.agentId ?? stringToUuid(opts.character?.name ?? uuidv4()); - this.character = opts.character || defaultCharacter; + this.character = opts.character; + + if(!this.character) { + throw new Error("Character input is required"); + } elizaLogger.info(`${this.character.name}(${this.agentId}) - Initializing AgentRuntime with options:`, { character: opts.character?.name, @@ -289,23 +299,8 @@ export class AgentRuntime implements IAgentRuntime { this.#conversationLength = opts.conversationLength ?? this.#conversationLength; - if (!opts.databaseAdapter) { - throw new Error("No database adapter provided"); - } this.databaseAdapter = opts.databaseAdapter; - // By convention, we create a user and room using the agent id. - // Memories related to it are considered global context for the agent. - this.ensureRoomExists(this.agentId); - this.ensureUserExists( - this.agentId, - this.character.username || this.character.name, - this.character.name, - ).then(() => { - // postgres needs the user to exist before you can add a participant - this.ensureParticipantExists(this.agentId, this.agentId); - }); - elizaLogger.success(`Agent ID: ${this.agentId}`); this.fetch = (opts.fetch as typeof fetch) ?? this.fetch; @@ -426,6 +421,10 @@ export class AgentRuntime implements IAgentRuntime { plugin.providers?.forEach((provider) => { this.registerContextProvider(provider); }); + + plugin.adapters?.forEach((adapter) => { + this.registerAdapter(adapter); + }); }); (opts.actions ?? []).forEach((action) => { @@ -440,10 +439,26 @@ export class AgentRuntime implements IAgentRuntime { this.registerEvaluator(evaluator); }); - this.verifiableInferenceAdapter = opts.verifiableInferenceAdapter; + // this.verifiableInferenceAdapter = opts.verifiableInferenceAdapter; + } + + private async initializeDatabase() { + // By convention, we create a user and room using the agent id. + // Memories related to it are considered global context for the agent. + this.ensureRoomExists(this.agentId); + this.ensureUserExists( + this.agentId, + this.character.username || this.character.name, + this.character.name, + ).then(() => { + // postgres needs the user to exist before you can add a participant + this.ensureParticipantExists(this.agentId, this.agentId); + }); } async initialize() { + this.initializeDatabase(); + for (const [serviceType, service] of this.services.entries()) { try { await service.initialize(this); @@ -543,7 +558,7 @@ export class AgentRuntime implements IAgentRuntime { elizaLogger.info( `[RAG Process] Processing direct string knowledge`, ); - await this.processCharacterKnowledge(stringKnowledge); + await this.processCharacterRAGKnowledge(stringKnowledge); } } else { // Non-RAG mode: only process string knowledge @@ -572,15 +587,14 @@ export class AgentRuntime implements IAgentRuntime { // services (just initialized), clients // client have a start - for (const cStr in this.clients) { - const c = this.clients[cStr]; + for (const c of this.clients) { elizaLogger.log( "runtime::stop - requesting", - cStr, + c, "client stop for", this.character.name, ); - c.stop(); + c.stop(this); } // we don't need to unregister with directClient // don't need to worry about knowledge @@ -979,6 +993,14 @@ export class AgentRuntime implements IAgentRuntime { this.providers.push(provider); } + /** + * Register an adapter for the agent to use. + * @param adapter The adapter to register. + */ + registerAdapter(adapter: Adapter) { + this.adapters.push(adapter); + } + /** * Process the actions of a message. * @param message The message to process. @@ -1116,7 +1138,7 @@ export class AgentRuntime implements IAgentRuntime { runtime: this, context, modelClass: ModelClass.SMALL, - verifiableInferenceAdapter: this.verifiableInferenceAdapter, + // verifiableInferenceAdapter: this.verifiableInferenceAdapter, }); const evaluators = parseJsonArrayFromText( @@ -1167,8 +1189,14 @@ export class AgentRuntime implements IAgentRuntime { id: userId, name: name || this.character.name || "Unknown User", username: userName || this.character.username || "Unknown", - email: email || this.character.email || userId, // Temporary - details: this.character || { summary: "" }, + // TODO: We might not need these account pieces + email: email || this.character.email || userId, + // When invoke ensureUserExists and saving account.details + // Performing a complete JSON.stringify on character will cause a TypeError: Converting circular structure to JSON error in some more complex plugins. + details: this.character ? Object.assign({}, this.character, { + source, + plugins: this.character?.plugins?.map((plugin) => plugin.name), + }) : { summary: "" }, }); elizaLogger.success(`User ${userName} created successfully.`); } @@ -1761,14 +1789,6 @@ Text: ${attachment.text} attachments: formattedAttachments, } as State; } - - getVerifiableInferenceAdapter(): IVerifiableInferenceAdapter | undefined { - return this.verifiableInferenceAdapter; - } - - setVerifiableInferenceAdapter(adapter: IVerifiableInferenceAdapter): void { - this.verifiableInferenceAdapter = adapter; - } } const formatKnowledge = (knowledge: KnowledgeItem[]) => { diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index ee9687ee4c889..4087ea091459a 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -233,6 +233,8 @@ export type Models = { [ModelProviderName.INFERA]: Model; [ModelProviderName.BEDROCK]: Model; [ModelProviderName.ATOMA]: Model; + [ModelProviderName.SECRETAI]: Model; + [ModelProviderName.NEARAI]: Model; }; /** @@ -272,6 +274,8 @@ export enum ModelProviderName { INFERA = "infera", BEDROCK = "bedrock", ATOMA = "atoma", + SECRETAI = "secret_ai", + NEARAI = "nearai", } /** @@ -607,15 +611,34 @@ export type Media = { contentType?: string; }; +/** + * Client instance + */ +export type ClientInstance = { + /** Client name */ + // name: string; + + /** Stop client connection */ + stop: (runtime: IAgentRuntime) => Promise; +}; + /** * Client interface for platform connections */ export type Client = { + /** Client name */ + name: string; + + /** Client configuration */ + config?: { [key: string]: any }; + /** Start client connection */ - start: (runtime: IAgentRuntime) => Promise; + start: (runtime: IAgentRuntime) => Promise; +}; - /** Stop client connection */ - stop: (runtime: IAgentRuntime) => Promise; +export type Adapter = { + /** Initialize adapter */ + init: (runtime: IAgentRuntime) => IDatabaseAdapter & IDatabaseCacheAdapter; }; /** @@ -625,6 +648,9 @@ export type Plugin = { /** Plugin name */ name: string; + /** Plugin configuration */ + config?: { [key: string]: any }; + /** Plugin description */ description: string; @@ -642,28 +668,10 @@ export type Plugin = { /** Optional clients */ clients?: Client[]; -}; -/** - * Available client platforms - */ -export enum Clients { - ALEXA= "alexa", - DISCORD = "discord", - DIRECT = "direct", - TWITTER = "twitter", - TELEGRAM = "telegram", - TELEGRAM_ACCOUNT = "telegram-account", - FARCASTER = "farcaster", - LENS = "lens", - AUTO = "auto", - SLACK = "slack", - GITHUB = "github", - INSTAGRAM = "instagram", - SIMSAI = "simsai", - XMTP = "xmtp", - DEVA = "deva", -} + /** Optional adapters */ + adapters?: Adapter[]; +}; export interface IAgentConfig { [key: string]: string; @@ -696,7 +704,7 @@ export type TelemetrySettings = { export interface ModelConfiguration { temperature?: number; - max_response_length?: number; + maxOutputTokens?: number; frequency_penalty?: number; presence_penalty?: number; maxInputTokens?: number; @@ -799,9 +807,6 @@ export type Character = { /** Optional knowledge base */ knowledge?: (string | { path: string; shared?: boolean })[]; - /** Supported client platforms */ - clients: Clients[]; - /** Available plugins */ plugins: Plugin[]; @@ -1294,11 +1299,9 @@ export interface IAgentRuntime { cacheManager: ICacheManager; services: Map; - // any could be EventEmitter - // but I think the real solution is forthcoming as a base client interface - clients: Record; + clients: ClientInstance[]; - verifiableInferenceAdapter?: IVerifiableInferenceAdapter | null; + // verifiableInferenceAdapter?: IVerifiableInferenceAdapter | null; initialize(): Promise; @@ -1524,6 +1527,7 @@ export enum ServiceType { GOPLUS_SECURITY = "goplus_security", WEB_SEARCH = "web_search", EMAIL_AUTOMATION = "email_automation", + NKN_CLIENT_SERVICE = "nkn_client_service", } export enum LoggingLevel { @@ -1570,69 +1574,6 @@ export interface ISlackService extends Service { client: any; } -/** - * Available verifiable inference providers - */ -export enum VerifiableInferenceProvider { - RECLAIM = "reclaim", - OPACITY = "opacity", - PRIMUS = "primus", -} - -/** - * Options for verifiable inference - */ -export interface VerifiableInferenceOptions { - /** Custom endpoint URL */ - endpoint?: string; - /** Custom headers */ - headers?: Record; - /** Provider-specific options */ - providerOptions?: Record; -} - -/** - * Result of a verifiable inference request - */ -export interface VerifiableInferenceResult { - /** Generated text */ - text: string; - /** Proof */ - proof: any; - /** Proof id */ - id?: string; - /** Provider information */ - provider: VerifiableInferenceProvider; - /** Timestamp */ - timestamp: number; -} - -/** - * Interface for verifiable inference adapters - */ -export interface IVerifiableInferenceAdapter { - options: any; - /** - * Generate text with verifiable proof - * @param context The input text/prompt - * @param modelClass The model class/name to use - * @param options Additional provider-specific options - * @returns Promise containing the generated text and proof data - */ - generateText( - context: string, - modelClass: string, - options?: VerifiableInferenceOptions, - ): Promise; - - /** - * Verify the proof of a generated response - * @param result The result containing response and proof to verify - * @returns Promise indicating if the proof is valid - */ - verifyProof(result: VerifiableInferenceResult): Promise; -} - export enum TokenizerType { Auto = "auto", TikToken = "tiktoken", diff --git a/packages/create-eliza-app/README.md b/packages/create-eliza-app/README.md deleted file mode 100644 index 89a8dc5ee2ddb..0000000000000 --- a/packages/create-eliza-app/README.md +++ /dev/null @@ -1,74 +0,0 @@ -# create-eliza-app - -A minimal CLI tool to scaffold ELIZA applications with zero configuration. Get started building your own ELIZA-style chatbot in seconds. - - - -[![npm version](https://img.shields.io/npm/v/create-eliza-app?color=yellow)](https://npmjs.com/package/create-eliza-app) -[![npm downloads](https://img.shields.io/npm/dm/create-eliza-app?color=yellow)](https://npm.chart.dev/create-eliza-app) -[![bundle size](https://img.shields.io/bundlephobia/minzip/create-eliza-app?color=yellow)](https://bundlephobia.com/package/create-eliza-app) - - - -## Usage - -You can create a new ELIZA app with your preferred package manager: - - - -```sh -# npm -npx create-eliza-app@latest path - -# pnpm -pnpm dlx create-eliza-app@latest path - -# bun -bunx create-eliza-app@latest path - -# deno -deno run -A npm:create-eliza-app@latest path -``` - - - -## Command Line Arguments - -- `--name`: Name of the template to use (default: "eliza") -- `--dir`: Directory where the project will be created (default: current directory) -- `--registry`: Custom registry URL for templates - -## Getting Started - -Once your project is created: - -1. Navigate to the project directory: - - ```bash - cd your-project-name - ``` - -2. Copy the example environment file: - - ```bash - cp .env.example .env - ``` - -3. Install dependencies: - - ```bash - pnpm install - ``` - -4. Start the development server: - ```bash - pnpm start - ``` - - - ---- - -_🤖 auto updated with [automd](https://automd.unjs.io)_ - - diff --git a/packages/create-eliza-app/package-lock.json b/packages/create-eliza-app/package-lock.json deleted file mode 100644 index b5195803a36c6..0000000000000 --- a/packages/create-eliza-app/package-lock.json +++ /dev/null @@ -1,4617 +0,0 @@ -{ - "name": "create-eliza-app", - "version": "0.1.8+build.1", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "create-eliza-app", - "version": "0.1.8+build.1", - "license": "ISC", - "dependencies": { - "citty": "0.1.6", - "giget": "1.2.3" - }, - "bin": { - "create-eliza-app": "dist/index.mjs" - }, - "devDependencies": { - "automd": "0.3.12", - "jiti": "2.4.0", - "unbuild": "2.0.0" - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", - "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.5.tgz", - "integrity": "sha512-XvcZi1KWf88RVbF9wn8MN6tYFloU5qX8KjuF3E1PVBmJ9eypXfs4GRiJwLuTZL0iSnJUKn1BFPa5BPZZJyFzPg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", - "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.26.0", - "@babel/generator": "^7.26.0", - "@babel/helper-compilation-targets": "^7.25.9", - "@babel/helper-module-transforms": "^7.26.0", - "@babel/helpers": "^7.26.0", - "@babel/parser": "^7.26.0", - "@babel/template": "^7.25.9", - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.26.0", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/generator": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.5.tgz", - "integrity": "sha512-2caSP6fN9I7HOe6nqhtft7V4g7/V/gfDsC3Ag4W7kEzzvRGKqiv0pu0HogPiZ3KaVSoNDhUws6IJjDjpfmYIXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.26.5", - "@babel/types": "^7.26.5", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", - "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.26.5", - "@babel/helper-validator-option": "^7.25.9", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", - "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", - "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9", - "@babel/traverse": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", - "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz", - "integrity": "sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.25.9", - "@babel/types": "^7.26.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.5.tgz", - "integrity": "sha512-SRJ4jYmXRqV1/Xc+TIVG84WjHBXKlxO9sHQnA2Pf12QQEAp1LOh6kDzNHXcUnbH1QI0FDoPPVOt+vyUDucxpaw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.26.5" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", - "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==", - "dev": true, - "license": "MIT", - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/standalone": { - "version": "7.26.6", - "resolved": "https://registry.npmjs.org/@babel/standalone/-/standalone-7.26.6.tgz", - "integrity": "sha512-h1mkoNFYCqDkS+vTLGzsQYvp1v1qbuugk4lOtb/oyjArZ+EtreAaxcSYg3rSIzWZRQOjx4iqGe7A8NRYIMSTTw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", - "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.25.9", - "@babel/parser": "^7.25.9", - "@babel/types": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.5.tgz", - "integrity": "sha512-rkOSPOw+AXbgtwUga3U4u8RpoK9FEFWBNAlTpcnkLFjL5CT+oyHNuUUC/xx6XefEJ16r38r8Bc/lfp6rYuHeJQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.5", - "@babel/parser": "^7.26.5", - "@babel/template": "^7.25.9", - "@babel/types": "^7.26.5", - "debug": "^4.3.1", - "globals": "^11.1.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/types": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.5.tgz", - "integrity": "sha512-L6mZmwFDK6Cjh1nRCLXpa6no13ZIioJDz7mdkzHv399pThrTa/k0nUlNaenOeh2kWu/iaOQYElEpKPUswUa9Vg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", - "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", - "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", - "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", - "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", - "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", - "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", - "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", - "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", - "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", - "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", - "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", - "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", - "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", - "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", - "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", - "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", - "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.24.2.tgz", - "integrity": "sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", - "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.2.tgz", - "integrity": "sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", - "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", - "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", - "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", - "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", - "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@parcel/watcher": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.0.tgz", - "integrity": "sha512-i0GV1yJnm2n3Yq1qw6QrUrd/LI9bE8WEBOTtOkpCXHHdyN3TAGgqAK/DAT05z4fq2x04cARXt2pDmjWjL92iTQ==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "detect-libc": "^1.0.3", - "is-glob": "^4.0.3", - "micromatch": "^4.0.5", - "node-addon-api": "^7.0.0" - }, - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - }, - "optionalDependencies": { - "@parcel/watcher-android-arm64": "2.5.0", - "@parcel/watcher-darwin-arm64": "2.5.0", - "@parcel/watcher-darwin-x64": "2.5.0", - "@parcel/watcher-freebsd-x64": "2.5.0", - "@parcel/watcher-linux-arm-glibc": "2.5.0", - "@parcel/watcher-linux-arm-musl": "2.5.0", - "@parcel/watcher-linux-arm64-glibc": "2.5.0", - "@parcel/watcher-linux-arm64-musl": "2.5.0", - "@parcel/watcher-linux-x64-glibc": "2.5.0", - "@parcel/watcher-linux-x64-musl": "2.5.0", - "@parcel/watcher-win32-arm64": "2.5.0", - "@parcel/watcher-win32-ia32": "2.5.0", - "@parcel/watcher-win32-x64": "2.5.0" - } - }, - "node_modules/@parcel/watcher-android-arm64": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.0.tgz", - "integrity": "sha512-qlX4eS28bUcQCdribHkg/herLe+0A9RyYC+mm2PXpncit8z5b3nSqGVzMNR3CmtAOgRutiZ02eIJJgP/b1iEFQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-darwin-arm64": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.0.tgz", - "integrity": "sha512-hyZ3TANnzGfLpRA2s/4U1kbw2ZI4qGxaRJbBH2DCSREFfubMswheh8TeiC1sGZ3z2jUf3s37P0BBlrD3sjVTUw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-darwin-x64": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.0.tgz", - "integrity": "sha512-9rhlwd78saKf18fT869/poydQK8YqlU26TMiNg7AIu7eBp9adqbJZqmdFOsbZ5cnLp5XvRo9wcFmNHgHdWaGYA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-freebsd-x64": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.0.tgz", - "integrity": "sha512-syvfhZzyM8kErg3VF0xpV8dixJ+RzbUaaGaeb7uDuz0D3FK97/mZ5AJQ3XNnDsXX7KkFNtyQyFrXZzQIcN49Tw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm-glibc": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.0.tgz", - "integrity": "sha512-0VQY1K35DQET3dVYWpOaPFecqOT9dbuCfzjxoQyif1Wc574t3kOSkKevULddcR9znz1TcklCE7Ht6NIxjvTqLA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm-musl": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.0.tgz", - "integrity": "sha512-6uHywSIzz8+vi2lAzFeltnYbdHsDm3iIB57d4g5oaB9vKwjb6N6dRIgZMujw4nm5r6v9/BQH0noq6DzHrqr2pA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm64-glibc": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.0.tgz", - "integrity": "sha512-BfNjXwZKxBy4WibDb/LDCriWSKLz+jJRL3cM/DllnHH5QUyoiUNEp3GmL80ZqxeumoADfCCP19+qiYiC8gUBjA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm64-musl": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.0.tgz", - "integrity": "sha512-S1qARKOphxfiBEkwLUbHjCY9BWPdWnW9j7f7Hb2jPplu8UZ3nes7zpPOW9bkLbHRvWM0WDTsjdOTUgW0xLBN1Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-x64-glibc": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.0.tgz", - "integrity": "sha512-d9AOkusyXARkFD66S6zlGXyzx5RvY+chTP9Jp0ypSTC9d4lzyRs9ovGf/80VCxjKddcUvnsGwCHWuF2EoPgWjw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-x64-musl": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.0.tgz", - "integrity": "sha512-iqOC+GoTDoFyk/VYSFHwjHhYrk8bljW6zOhPuhi5t9ulqiYq1togGJB5e3PwYVFFfeVgc6pbz3JdQyDoBszVaA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-win32-arm64": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.0.tgz", - "integrity": "sha512-twtft1d+JRNkM5YbmexfcH/N4znDtjgysFaV9zvZmmJezQsKpkfLYJ+JFV3uygugK6AtIM2oADPkB2AdhBrNig==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-win32-ia32": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.0.tgz", - "integrity": "sha512-+rgpsNRKwo8A53elqbbHXdOMtY/tAtTzManTWShB5Kk54N8Q9mzNWV7tV+IbGueCbcj826MfWGU3mprWtuf1TA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-win32-x64": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.0.tgz", - "integrity": "sha512-lPrxve92zEHdgeff3aiu4gDOIt4u7sJYha6wbdEZDCDUhtjTsOMiaJzG5lMY4GkWH8p0fMmO2Ppq5G5XXG+DQw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@rollup/plugin-alias": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-5.1.1.tgz", - "integrity": "sha512-PR9zDb+rOzkRb2VD+EuKB7UC41vU5DIwZ5qqCpk0KJudcWAyi8rvYOhS7+L5aZCspw1stTViLgN5v6FF1p5cgQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-commonjs": { - "version": "25.0.8", - "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-25.0.8.tgz", - "integrity": "sha512-ZEZWTK5n6Qde0to4vS9Mr5x/0UZoqCxPVR9KRUjU4kA2sO7GEUn1fop0DAwpO6z0Nw/kJON9bDmSxdWxO/TT1A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "commondir": "^1.0.1", - "estree-walker": "^2.0.2", - "glob": "^8.0.3", - "is-reference": "1.2.1", - "magic-string": "^0.30.3" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^2.68.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-json": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", - "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.1.0" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-node-resolve": { - "version": "15.3.1", - "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.3.1.tgz", - "integrity": "sha512-tgg6b91pAybXHJQMAAwW9VuWBO6Thi+q7BCNARLwSqlmsHz0XYURtGvh/AuwSADXSI4h/2uHbs7s4FzlZDGSGA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "@types/resolve": "1.20.2", - "deepmerge": "^4.2.2", - "is-module": "^1.0.0", - "resolve": "^1.22.1" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^2.78.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-replace": { - "version": "5.0.7", - "resolved": "https://registry.npmjs.org/@rollup/plugin-replace/-/plugin-replace-5.0.7.tgz", - "integrity": "sha512-PqxSfuorkHz/SPpyngLyg5GCEkOcee9M1bkxiVDr41Pd61mqP1PLOoDPbpl44SB2mQGKwV/In74gqQmGITOhEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "magic-string": "^0.30.3" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/pluginutils": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.4.tgz", - "integrity": "sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-walker": "^2.0.2", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/pluginutils/node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/@sindresorhus/merge-streams": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", - "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@trysound/sax": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", - "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/resolve": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", - "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", - "license": "MIT", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/automd": { - "version": "0.3.12", - "resolved": "https://registry.npmjs.org/automd/-/automd-0.3.12.tgz", - "integrity": "sha512-qNHdFSAE7zMIO12FJpGBp98uLrIUxg3i8WzvsEGGq0rD5olkgSK9KE0SsYfwciW1LdP6q8lWX+3chaxjtgN9gA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@parcel/watcher": "^2.4.1", - "c12": "^2.0.1", - "citty": "^0.1.6", - "consola": "^3.2.3", - "defu": "^6.1.4", - "destr": "^2.0.3", - "didyoumean2": "^7.0.4", - "globby": "^14.0.2", - "magic-string": "^0.30.11", - "mdbox": "^0.1.0", - "mlly": "^1.7.2", - "ofetch": "^1.4.1", - "pathe": "^1.1.2", - "perfect-debounce": "^1.0.0", - "pkg-types": "^1.2.1", - "scule": "^1.3.0", - "untyped": "^1.5.1" - }, - "bin": { - "automd": "dist/cli.mjs" - } - }, - "node_modules/autoprefixer": { - "version": "10.4.20", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.20.tgz", - "integrity": "sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "caniuse-lite": "^1.0.30001646", - "fraction.js": "^4.3.7", - "normalize-range": "^0.1.2", - "picocolors": "^1.0.1", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true, - "license": "MIT" - }, - "node_modules/boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", - "dev": true, - "license": "ISC" - }, - "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.24.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", - "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "caniuse-lite": "^1.0.30001688", - "electron-to-chromium": "^1.5.73", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.1" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/c12": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/c12/-/c12-2.0.1.tgz", - "integrity": "sha512-Z4JgsKXHG37C6PYUtIxCfLJZvo6FyhHJoClwwb9ftUkLpPSkuYqn6Tr+vnaN8hymm0kIbcg6Ey3kv/Q71k5w/A==", - "dev": true, - "license": "MIT", - "dependencies": { - "chokidar": "^4.0.1", - "confbox": "^0.1.7", - "defu": "^6.1.4", - "dotenv": "^16.4.5", - "giget": "^1.2.3", - "jiti": "^2.3.0", - "mlly": "^1.7.1", - "ohash": "^1.1.4", - "pathe": "^1.1.2", - "perfect-debounce": "^1.0.0", - "pkg-types": "^1.2.0", - "rc9": "^2.1.2" - }, - "peerDependencies": { - "magicast": "^0.3.5" - }, - "peerDependenciesMeta": { - "magicast": { - "optional": true - } - } - }, - "node_modules/caniuse-api": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", - "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.0.0", - "caniuse-lite": "^1.0.0", - "lodash.memoize": "^4.1.2", - "lodash.uniq": "^4.5.0" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001692", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001692.tgz", - "integrity": "sha512-A95VKan0kdtrsnMubMKxEKUKImOPSuCpYgxSQBo036P5YYgVIcOYJEgt/txJWqObiRQeISNCfef9nvlQ0vbV7A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chalk": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", - "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "readdirp": "^4.0.1" - }, - "engines": { - "node": ">= 14.16.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/citty": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", - "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", - "license": "MIT", - "dependencies": { - "consola": "^3.2.3" - } - }, - "node_modules/colord": { - "version": "2.9.3", - "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", - "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", - "dev": true, - "license": "MIT" - }, - "node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", - "dev": true, - "license": "MIT" - }, - "node_modules/confbox": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", - "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", - "license": "MIT" - }, - "node_modules/consola": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.0.tgz", - "integrity": "sha512-EiPU8G6dQG0GFHNR8ljnZFki/8a+cQwEQ+7wpxdChl02Q8HXlwEZWD5lqAF8vC2sEC3Tehr8hy7vErz88LHyUA==", - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true, - "license": "MIT" - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/css-declaration-sorter": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz", - "integrity": "sha512-h70rUM+3PNFuaBDTLe8wF/cdWu+dOZmb7pJt8Z2sedYbAcQVQV/tEchueg3GWxwqS0cxtbxmaHEdkNACqcvsow==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14 || ^16 || >=18" - }, - "peerDependencies": { - "postcss": "^8.0.9" - } - }, - "node_modules/css-select": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", - "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0", - "css-what": "^6.1.0", - "domhandler": "^5.0.2", - "domutils": "^3.0.1", - "nth-check": "^2.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/css-tree": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", - "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mdn-data": "2.0.30", - "source-map-js": "^1.0.1" - }, - "engines": { - "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" - } - }, - "node_modules/css-what": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", - "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", - "dev": true, - "license": "MIT", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/cssnano": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-7.0.6.tgz", - "integrity": "sha512-54woqx8SCbp8HwvNZYn68ZFAepuouZW4lTwiMVnBErM3VkO7/Sd4oTOt3Zz3bPx3kxQ36aISppyXj2Md4lg8bw==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssnano-preset-default": "^7.0.6", - "lilconfig": "^3.1.2" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/cssnano" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/cssnano-preset-default": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-7.0.6.tgz", - "integrity": "sha512-ZzrgYupYxEvdGGuqL+JKOY70s7+saoNlHSCK/OGn1vB2pQK8KSET8jvenzItcY+kA7NoWvfbb/YhlzuzNKjOhQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "css-declaration-sorter": "^7.2.0", - "cssnano-utils": "^5.0.0", - "postcss-calc": "^10.0.2", - "postcss-colormin": "^7.0.2", - "postcss-convert-values": "^7.0.4", - "postcss-discard-comments": "^7.0.3", - "postcss-discard-duplicates": "^7.0.1", - "postcss-discard-empty": "^7.0.0", - "postcss-discard-overridden": "^7.0.0", - "postcss-merge-longhand": "^7.0.4", - "postcss-merge-rules": "^7.0.4", - "postcss-minify-font-values": "^7.0.0", - "postcss-minify-gradients": "^7.0.0", - "postcss-minify-params": "^7.0.2", - "postcss-minify-selectors": "^7.0.4", - "postcss-normalize-charset": "^7.0.0", - "postcss-normalize-display-values": "^7.0.0", - "postcss-normalize-positions": "^7.0.0", - "postcss-normalize-repeat-style": "^7.0.0", - "postcss-normalize-string": "^7.0.0", - "postcss-normalize-timing-functions": "^7.0.0", - "postcss-normalize-unicode": "^7.0.2", - "postcss-normalize-url": "^7.0.0", - "postcss-normalize-whitespace": "^7.0.0", - "postcss-ordered-values": "^7.0.1", - "postcss-reduce-initial": "^7.0.2", - "postcss-reduce-transforms": "^7.0.0", - "postcss-svgo": "^7.0.1", - "postcss-unique-selectors": "^7.0.3" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/cssnano-utils": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-5.0.0.tgz", - "integrity": "sha512-Uij0Xdxc24L6SirFr25MlwC2rCFX6scyUmuKpzI+JQ7cyqDEwD42fJ0xfB3yLfOnRDU5LKGgjQ9FA6LYh76GWQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/csso": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", - "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "css-tree": "~2.2.0" - }, - "engines": { - "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", - "npm": ">=7.0.0" - } - }, - "node_modules/csso/node_modules/css-tree": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", - "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", - "dev": true, - "license": "MIT", - "dependencies": { - "mdn-data": "2.0.28", - "source-map-js": "^1.0.1" - }, - "engines": { - "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", - "npm": ">=7.0.0" - } - }, - "node_modules/csso/node_modules/mdn-data": { - "version": "2.0.28", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", - "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==", - "dev": true, - "license": "CC0-1.0" - }, - "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/deepmerge": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/defu": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", - "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", - "license": "MIT" - }, - "node_modules/destr": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/destr/-/destr-2.0.3.tgz", - "integrity": "sha512-2N3BOUU4gYMpTP24s5rF5iP7BDr7uNTCs4ozw3kf/eKfvWSIu93GEBi5m427YoyJoeOzQ5smuu4nNAPGb8idSQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "detect-libc": "bin/detect-libc.js" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/didyoumean2": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/didyoumean2/-/didyoumean2-7.0.4.tgz", - "integrity": "sha512-+yW4SNY7W2DOWe2Jx5H4c2qMTFbLGM6wIyoDPkAPy66X+sD1KfYjBPAIWPVsYqMxelflaMQCloZDudELIPhLqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.24.8", - "fastest-levenshtein": "^1.0.16", - "lodash.deburr": "^4.1.0" - }, - "engines": { - "node": "^18.12.0 || >=20.9.0" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/dir-glob/node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/dom-serializer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", - "dev": true, - "license": "MIT", - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" - } - }, - "node_modules/domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "BSD-2-Clause" - }, - "node_modules/domhandler": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "domelementtype": "^2.3.0" - }, - "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" - } - }, - "node_modules/domutils": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", - "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/dotenv": { - "version": "16.4.7", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", - "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://dotenvx.com" - } - }, - "node_modules/electron-to-chromium": { - "version": "1.5.80", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.80.tgz", - "integrity": "sha512-LTrKpW0AqIuHwmlVNV+cjFYTnXtM9K37OGhpe0ZI10ScPSxqVSryZHIY3WnCS5NSYbBODRTZyhRMS2h5FAEqAw==", - "dev": true, - "license": "ISC" - }, - "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/esbuild": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", - "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.19.12", - "@esbuild/android-arm": "0.19.12", - "@esbuild/android-arm64": "0.19.12", - "@esbuild/android-x64": "0.19.12", - "@esbuild/darwin-arm64": "0.19.12", - "@esbuild/darwin-x64": "0.19.12", - "@esbuild/freebsd-arm64": "0.19.12", - "@esbuild/freebsd-x64": "0.19.12", - "@esbuild/linux-arm": "0.19.12", - "@esbuild/linux-arm64": "0.19.12", - "@esbuild/linux-ia32": "0.19.12", - "@esbuild/linux-loong64": "0.19.12", - "@esbuild/linux-mips64el": "0.19.12", - "@esbuild/linux-ppc64": "0.19.12", - "@esbuild/linux-riscv64": "0.19.12", - "@esbuild/linux-s390x": "0.19.12", - "@esbuild/linux-x64": "0.19.12", - "@esbuild/netbsd-x64": "0.19.12", - "@esbuild/openbsd-x64": "0.19.12", - "@esbuild/sunos-x64": "0.19.12", - "@esbuild/win32-arm64": "0.19.12", - "@esbuild/win32-ia32": "0.19.12", - "@esbuild/win32-x64": "0.19.12" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "dev": true, - "license": "MIT" - }, - "node_modules/execa": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", - "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^8.0.1", - "human-signals": "^5.0.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", - "signal-exit": "^4.1.0", - "strip-final-newline": "^3.0.0" - }, - "engines": { - "node": ">=16.17" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fastest-levenshtein": { - "version": "1.0.16", - "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz", - "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4.9.1" - } - }, - "node_modules/fastq": { - "version": "1.18.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.18.0.tgz", - "integrity": "sha512-QKHXPW0hD8g4UET03SdOdunzSouc9N4AuHdsX8XNcTsuz+yYFILVNIX4l9yHABMhiEI9Db0JTTIpu0wB+Y1QQw==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fraction.js": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", - "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", - "dev": true, - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true, - "license": "ISC" - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-stream": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", - "license": "MIT", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/giget": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.3.tgz", - "integrity": "sha512-8EHPljDvs7qKykr6uw8b+lqLiUc/vUg+KVTI0uND4s63TdsZM2Xus3mflvF0DDG9SiM4RlCkFGL+7aAjRmV7KA==", - "license": "MIT", - "dependencies": { - "citty": "^0.1.6", - "consola": "^3.2.3", - "defu": "^6.1.4", - "node-fetch-native": "^1.6.3", - "nypm": "^0.3.8", - "ohash": "^1.1.3", - "pathe": "^1.1.2", - "tar": "^6.2.0" - }, - "bin": { - "giget": "dist/cli.mjs" - } - }, - "node_modules/glob": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", - "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^5.0.1", - "once": "^1.3.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/globby": { - "version": "14.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.2.tgz", - "integrity": "sha512-s3Fq41ZVh7vbbe2PN3nrW7yC7U7MFVc5c98/iTl9c2GawNMKx/J648KQRW6WKkuU8GIbbh2IXfIRQjOZnXcTnw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@sindresorhus/merge-streams": "^2.1.0", - "fast-glob": "^3.3.2", - "ignore": "^5.2.4", - "path-type": "^5.0.0", - "slash": "^5.1.0", - "unicorn-magic": "^0.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hookable": { - "version": "5.5.3", - "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", - "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/human-signals": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", - "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=16.17.0" - } - }, - "node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", - "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", - "dev": true, - "license": "MIT" - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-reference": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", - "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "*" - } - }, - "node_modules/is-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", - "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "license": "ISC" - }, - "node_modules/jiti": { - "version": "2.4.0", - "dev": true, - "license": "MIT", - "bin": { - "jiti": "lib/jiti-cli.mjs" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/knitwork": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/knitwork/-/knitwork-1.2.0.tgz", - "integrity": "sha512-xYSH7AvuQ6nXkq42x0v5S8/Iry+cfulBz/DJQzhIyESdLD7425jXsPy4vn5cCXU+HhRN2kVw51Vd1K6/By4BQg==", - "dev": true, - "license": "MIT" - }, - "node_modules/lilconfig": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", - "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/antonk52" - } - }, - "node_modules/lodash.deburr": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/lodash.deburr/-/lodash.deburr-4.1.0.tgz", - "integrity": "sha512-m/M1U1f3ddMCs6Hq2tAsYThTBDaAKFDX3dwDo97GEYzamXi9SqUpjWi/Rrj/gf3X2n8ktwgZrlP1z6E3v/IExQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.uniq": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/lru-cache/node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true, - "license": "ISC" - }, - "node_modules/magic-string": { - "version": "0.30.17", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", - "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" - } - }, - "node_modules/md4w": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/md4w/-/md4w-0.2.6.tgz", - "integrity": "sha512-CBLQ2PxVe9WA+/nndZCx/Y+1C3DtmtSeubmXTPhMIgsXtq9gVGleikREko5FYnV6Dz4cHDWm0Ea+YMLpIjP4Kw==", - "dev": true, - "license": "MIT" - }, - "node_modules/mdbox": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/mdbox/-/mdbox-0.1.1.tgz", - "integrity": "sha512-jvLISenzbLRPWWamTG3THlhTcMbKWzJQNyTi61AVXhCBOC+gsldNTUfUNH8d3Vay83zGehFw3wZpF3xChzkTIQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "md4w": "^0.2.6" - } - }, - "node_modules/mdn-data": { - "version": "2.0.30", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", - "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", - "dev": true, - "license": "CC0-1.0" - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/mkdist": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mkdist/-/mkdist-1.6.0.tgz", - "integrity": "sha512-nD7J/mx33Lwm4Q4qoPgRBVA9JQNKgyE7fLo5vdPWVDdjz96pXglGERp/fRnGPCTB37Kykfxs5bDdXa9BWOT9nw==", - "dev": true, - "license": "MIT", - "dependencies": { - "autoprefixer": "^10.4.20", - "citty": "^0.1.6", - "cssnano": "^7.0.6", - "defu": "^6.1.4", - "esbuild": "^0.24.0", - "jiti": "^1.21.6", - "mlly": "^1.7.1", - "pathe": "^1.1.2", - "pkg-types": "^1.2.0", - "postcss": "^8.4.45", - "postcss-nested": "^6.2.0", - "semver": "^7.6.3", - "tinyglobby": "^0.2.9" - }, - "bin": { - "mkdist": "dist/cli.cjs" - }, - "peerDependencies": { - "sass": "^1.78.0", - "typescript": ">=5.5.4", - "vue-tsc": "^1.8.27 || ^2.0.21" - }, - "peerDependenciesMeta": { - "sass": { - "optional": true - }, - "typescript": { - "optional": true - }, - "vue-tsc": { - "optional": true - } - } - }, - "node_modules/mkdist/node_modules/@esbuild/aix-ppc64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.2.tgz", - "integrity": "sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/android-arm": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.2.tgz", - "integrity": "sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/android-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.2.tgz", - "integrity": "sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/android-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.2.tgz", - "integrity": "sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/darwin-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.2.tgz", - "integrity": "sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/darwin-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.2.tgz", - "integrity": "sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/freebsd-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.2.tgz", - "integrity": "sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/freebsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.2.tgz", - "integrity": "sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-arm": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.2.tgz", - "integrity": "sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.2.tgz", - "integrity": "sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-ia32": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.2.tgz", - "integrity": "sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-loong64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.2.tgz", - "integrity": "sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-mips64el": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.2.tgz", - "integrity": "sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-ppc64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.2.tgz", - "integrity": "sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-riscv64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.2.tgz", - "integrity": "sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-s390x": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.2.tgz", - "integrity": "sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/linux-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.2.tgz", - "integrity": "sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/netbsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.2.tgz", - "integrity": "sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/openbsd-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.2.tgz", - "integrity": "sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/sunos-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz", - "integrity": "sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/win32-arm64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.2.tgz", - "integrity": "sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/win32-ia32": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.2.tgz", - "integrity": "sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/@esbuild/win32-x64": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.2.tgz", - "integrity": "sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/mkdist/node_modules/esbuild": { - "version": "0.24.2", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.2.tgz", - "integrity": "sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.24.2", - "@esbuild/android-arm": "0.24.2", - "@esbuild/android-arm64": "0.24.2", - "@esbuild/android-x64": "0.24.2", - "@esbuild/darwin-arm64": "0.24.2", - "@esbuild/darwin-x64": "0.24.2", - "@esbuild/freebsd-arm64": "0.24.2", - "@esbuild/freebsd-x64": "0.24.2", - "@esbuild/linux-arm": "0.24.2", - "@esbuild/linux-arm64": "0.24.2", - "@esbuild/linux-ia32": "0.24.2", - "@esbuild/linux-loong64": "0.24.2", - "@esbuild/linux-mips64el": "0.24.2", - "@esbuild/linux-ppc64": "0.24.2", - "@esbuild/linux-riscv64": "0.24.2", - "@esbuild/linux-s390x": "0.24.2", - "@esbuild/linux-x64": "0.24.2", - "@esbuild/netbsd-arm64": "0.24.2", - "@esbuild/netbsd-x64": "0.24.2", - "@esbuild/openbsd-arm64": "0.24.2", - "@esbuild/openbsd-x64": "0.24.2", - "@esbuild/sunos-x64": "0.24.2", - "@esbuild/win32-arm64": "0.24.2", - "@esbuild/win32-ia32": "0.24.2", - "@esbuild/win32-x64": "0.24.2" - } - }, - "node_modules/mkdist/node_modules/jiti": { - "version": "1.21.7", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", - "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", - "dev": true, - "license": "MIT", - "bin": { - "jiti": "bin/jiti.js" - } - }, - "node_modules/mlly": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.4.tgz", - "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==", - "license": "MIT", - "dependencies": { - "acorn": "^8.14.0", - "pathe": "^2.0.1", - "pkg-types": "^1.3.0", - "ufo": "^1.5.4" - } - }, - "node_modules/mlly/node_modules/pathe": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.1.tgz", - "integrity": "sha512-6jpjMpOth5S9ITVu5clZ7NOgHNsv5vRQdheL9ztp2vZmM6fRbLvyua1tiBIL4lk8SAe3ARzeXEly6siXCjDHDw==", - "license": "MIT" - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/node-addon-api": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", - "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/node-fetch-native": { - "version": "1.6.4", - "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.4.tgz", - "integrity": "sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==", - "license": "MIT" - }, - "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", - "dev": true, - "license": "MIT" - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-run-path": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", - "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", - "license": "MIT", - "dependencies": { - "path-key": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/npm-run-path/node_modules/path-key": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", - "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, - "node_modules/nypm": { - "version": "0.3.12", - "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.3.12.tgz", - "integrity": "sha512-D3pzNDWIvgA+7IORhD/IuWzEk4uXv6GsgOxiid4UU3h9oq5IqV1KtPDi63n4sZJ/xcWlr88c0QM2RgN5VbOhFA==", - "license": "MIT", - "dependencies": { - "citty": "^0.1.6", - "consola": "^3.2.3", - "execa": "^8.0.1", - "pathe": "^1.1.2", - "pkg-types": "^1.2.0", - "ufo": "^1.5.4" - }, - "bin": { - "nypm": "dist/cli.mjs" - }, - "engines": { - "node": "^14.16.0 || >=16.10.0" - } - }, - "node_modules/ofetch": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/ofetch/-/ofetch-1.4.1.tgz", - "integrity": "sha512-QZj2DfGplQAr2oj9KzceK9Hwz6Whxazmn85yYeVuS3u9XTMOGMRx0kO95MQ+vLsj/S/NwBDMMLU5hpxvI6Tklw==", - "dev": true, - "license": "MIT", - "dependencies": { - "destr": "^2.0.3", - "node-fetch-native": "^1.6.4", - "ufo": "^1.5.4" - } - }, - "node_modules/ohash": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/ohash/-/ohash-1.1.4.tgz", - "integrity": "sha512-FlDryZAahJmEF3VR3w1KogSEdWX3WhA5GPakFx4J81kEAiHyLMpdLLElS8n8dfNadMgAne/MywcvmogzscVt4g==", - "license": "MIT" - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "license": "ISC", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true, - "license": "MIT" - }, - "node_modules/path-type": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", - "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pathe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", - "license": "MIT" - }, - "node_modules/perfect-debounce": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", - "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", - "dev": true, - "license": "MIT" - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pkg-types": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.0.tgz", - "integrity": "sha512-kS7yWjVFCkIw9hqdJBoMxDdzEngmkr5FXeWZZfQ6GoYacjVnsW6l2CcYW/0ThD0vF4LPJgVYnrg4d0uuhwYQbg==", - "license": "MIT", - "dependencies": { - "confbox": "^0.1.8", - "mlly": "^1.7.3", - "pathe": "^1.1.2" - } - }, - "node_modules/postcss": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.0.tgz", - "integrity": "sha512-27VKOqrYfPncKA2NrFOVhP5MGAfHKLYn/Q0mz9cNQyRAKYi3VNHwYU2qKKqPCqgBmeeJ0uAFB56NumXZ5ZReXg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.8", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-calc": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-10.1.0.tgz", - "integrity": "sha512-uQ/LDGsf3mgsSUEXmAt3VsCSHR3aKqtEIkmB+4PhzYwRYOW5MZs/GhCCFpsOtJJkP6EC6uGipbrnaTjqaJZcJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^7.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12 || ^20.9 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.38" - } - }, - "node_modules/postcss-colormin": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-7.0.2.tgz", - "integrity": "sha512-YntRXNngcvEvDbEjTdRWGU606eZvB5prmHG4BF0yLmVpamXbpsRJzevyy6MZVyuecgzI2AWAlvFi8DAeCqwpvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "caniuse-api": "^3.0.0", - "colord": "^2.9.3", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-convert-values": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-7.0.4.tgz", - "integrity": "sha512-e2LSXPqEHVW6aoGbjV9RsSSNDO3A0rZLCBxN24zvxF25WknMPpX8Dm9UxxThyEbaytzggRuZxaGXqaOhxQ514Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-comments": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-7.0.3.tgz", - "integrity": "sha512-q6fjd4WU4afNhWOA2WltHgCbkRhZPgQe7cXF74fuVB/ge4QbM9HEaOIzGSiMvM+g/cOsNAUGdf2JDzqA2F8iLA==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.1.2" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-comments/node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-discard-duplicates": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-7.0.1.tgz", - "integrity": "sha512-oZA+v8Jkpu1ct/xbbrntHRsfLGuzoP+cpt0nJe5ED2FQF8n8bJtn7Bo28jSmBYwqgqnqkuSXJfSUEE7if4nClQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-empty": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-7.0.0.tgz", - "integrity": "sha512-e+QzoReTZ8IAwhnSdp/++7gBZ/F+nBq9y6PomfwORfP7q9nBpK5AMP64kOt0bA+lShBFbBDcgpJ3X4etHg4lzA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-overridden": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-7.0.0.tgz", - "integrity": "sha512-GmNAzx88u3k2+sBTZrJSDauR0ccpE24omTQCVmaTTZFz1du6AasspjaUPMJ2ud4RslZpoFKyf+6MSPETLojc6w==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-merge-longhand": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-7.0.4.tgz", - "integrity": "sha512-zer1KoZA54Q8RVHKOY5vMke0cCdNxMP3KBfDerjH/BYHh4nCIh+1Yy0t1pAEQF18ac/4z3OFclO+ZVH8azjR4A==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0", - "stylehacks": "^7.0.4" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-merge-rules": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-7.0.4.tgz", - "integrity": "sha512-ZsaamiMVu7uBYsIdGtKJ64PkcQt6Pcpep/uO90EpLS3dxJi6OXamIobTYcImyXGoW0Wpugh7DSD3XzxZS9JCPg==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "caniuse-api": "^3.0.0", - "cssnano-utils": "^5.0.0", - "postcss-selector-parser": "^6.1.2" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-merge-rules/node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-minify-font-values": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-7.0.0.tgz", - "integrity": "sha512-2ckkZtgT0zG8SMc5aoNwtm5234eUx1GGFJKf2b1bSp8UflqaeFzR50lid4PfqVI9NtGqJ2J4Y7fwvnP/u1cQog==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-gradients": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-7.0.0.tgz", - "integrity": "sha512-pdUIIdj/C93ryCHew0UgBnL2DtUS3hfFa5XtERrs4x+hmpMYGhbzo6l/Ir5de41O0GaKVpK1ZbDNXSY6GkXvtg==", - "dev": true, - "license": "MIT", - "dependencies": { - "colord": "^2.9.3", - "cssnano-utils": "^5.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-params": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-7.0.2.tgz", - "integrity": "sha512-nyqVLu4MFl9df32zTsdcLqCFfE/z2+f8GE1KHPxWOAmegSo6lpV2GNy5XQvrzwbLmiU7d+fYay4cwto1oNdAaQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "cssnano-utils": "^5.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-selectors": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-7.0.4.tgz", - "integrity": "sha512-JG55VADcNb4xFCf75hXkzc1rNeURhlo7ugf6JjiiKRfMsKlDzN9CXHZDyiG6x/zGchpjQS+UAgb1d4nqXqOpmA==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "postcss-selector-parser": "^6.1.2" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-selectors/node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-nested": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", - "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.1.1" - }, - "engines": { - "node": ">=12.0" - }, - "peerDependencies": { - "postcss": "^8.2.14" - } - }, - "node_modules/postcss-nested/node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-normalize-charset": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-7.0.0.tgz", - "integrity": "sha512-ABisNUXMeZeDNzCQxPxBCkXexvBrUHV+p7/BXOY+ulxkcjUZO0cp8ekGBwvIh2LbCwnWbyMPNJVtBSdyhM2zYQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-display-values": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-7.0.0.tgz", - "integrity": "sha512-lnFZzNPeDf5uGMPYgGOw7v0BfB45+irSRz9gHQStdkkhiM0gTfvWkWB5BMxpn0OqgOQuZG/mRlZyJxp0EImr2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-positions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-7.0.0.tgz", - "integrity": "sha512-I0yt8wX529UKIGs2y/9Ybs2CelSvItfmvg/DBIjTnoUSrPxSV7Z0yZ8ShSVtKNaV/wAY+m7bgtyVQLhB00A1NQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-repeat-style": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-7.0.0.tgz", - "integrity": "sha512-o3uSGYH+2q30ieM3ppu9GTjSXIzOrRdCUn8UOMGNw7Af61bmurHTWI87hRybrP6xDHvOe5WlAj3XzN6vEO8jLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-string": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-7.0.0.tgz", - "integrity": "sha512-w/qzL212DFVOpMy3UGyxrND+Kb0fvCiBBujiaONIihq7VvtC7bswjWgKQU/w4VcRyDD8gpfqUiBQ4DUOwEJ6Qg==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-timing-functions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-7.0.0.tgz", - "integrity": "sha512-tNgw3YV0LYoRwg43N3lTe3AEWZ66W7Dh7lVEpJbHoKOuHc1sLrzMLMFjP8SNULHaykzsonUEDbKedv8C+7ej6g==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-unicode": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-7.0.2.tgz", - "integrity": "sha512-ztisabK5C/+ZWBdYC+Y9JCkp3M9qBv/XFvDtSw0d/XwfT3UaKeW/YTm/MD/QrPNxuecia46vkfEhewjwcYFjkg==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-url": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-7.0.0.tgz", - "integrity": "sha512-+d7+PpE+jyPX1hDQZYG+NaFD+Nd2ris6r8fPTBAjE8z/U41n/bib3vze8x7rKs5H1uEw5ppe9IojewouHk0klQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-whitespace": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-7.0.0.tgz", - "integrity": "sha512-37/toN4wwZErqohedXYqWgvcHUGlT8O/m2jVkAfAe9Bd4MzRqlBmXrJRePH0e9Wgnz2X7KymTgTOaaFizQe3AQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-ordered-values": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-7.0.1.tgz", - "integrity": "sha512-irWScWRL6nRzYmBOXReIKch75RRhNS86UPUAxXdmW/l0FcAsg0lvAXQCby/1lymxn/o0gVa6Rv/0f03eJOwHxw==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssnano-utils": "^5.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-reduce-initial": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-7.0.2.tgz", - "integrity": "sha512-pOnu9zqQww7dEKf62Nuju6JgsW2V0KRNBHxeKohU+JkHd/GAH5uvoObqFLqkeB2n20mr6yrlWDvo5UBU5GnkfA==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "caniuse-api": "^3.0.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-reduce-transforms": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-7.0.0.tgz", - "integrity": "sha512-pnt1HKKZ07/idH8cpATX/ujMbtOGhUfE+m8gbqwJE05aTaNw8gbo34a2e3if0xc0dlu75sUOiqvwCGY3fzOHew==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-selector-parser": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.0.0.tgz", - "integrity": "sha512-9RbEr1Y7FFfptd/1eEdntyjMwLeghW1bHX9GWjXo19vx4ytPQhANltvVxDggzJl7mnWM+dX28kb6cyS/4iQjlQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-svgo": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-7.0.1.tgz", - "integrity": "sha512-0WBUlSL4lhD9rA5k1e5D8EN5wCEyZD6HJk0jIvRxl+FDVOMlJ7DePHYWGGVc5QRqrJ3/06FTXM0bxjmJpmTPSA==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0", - "svgo": "^3.3.2" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >= 18" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-unique-selectors": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-7.0.3.tgz", - "integrity": "sha512-J+58u5Ic5T1QjP/LDV9g3Cx4CNOgB5vz+kM6+OxHHhFACdcDeKhBXjQmB7fnIZM12YSTvsL0Opwco83DmacW2g==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.1.2" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-unique-selectors/node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/pretty-bytes": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.1.tgz", - "integrity": "sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/rc9": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/rc9/-/rc9-2.1.2.tgz", - "integrity": "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==", - "dev": true, - "license": "MIT", - "dependencies": { - "defu": "^6.1.4", - "destr": "^2.0.3" - } - }, - "node_modules/readdirp": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.1.tgz", - "integrity": "sha512-h80JrZu/MHUZCyHu5ciuoI0+WxsCxzxJTILn6Fs8rxSnFPh+UVHYfeIxK1nVGugMqkfC4vJcBOYbkfkwYK0+gw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.18.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "dev": true, - "license": "MIT" - }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rollup": { - "version": "3.29.5", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.5.tgz", - "integrity": "sha512-GVsDdsbJzzy4S/v3dqWPJ7EfvZJfCHiDqe80IyrF59LYuP+e6U1LJoUqeuqRbwAWoMNoXivMNeNAOf5E22VA1w==", - "dev": true, - "license": "MIT", - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=14.18.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/rollup-plugin-dts": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/rollup-plugin-dts/-/rollup-plugin-dts-6.1.1.tgz", - "integrity": "sha512-aSHRcJ6KG2IHIioYlvAOcEq6U99sVtqDDKVhnwt70rW6tsz3tv5OSjEiWcgzfsHdLyGXZ/3b/7b/+Za3Y6r1XA==", - "dev": true, - "license": "LGPL-3.0-only", - "dependencies": { - "magic-string": "^0.30.10" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/Swatinem" - }, - "optionalDependencies": { - "@babel/code-frame": "^7.24.2" - }, - "peerDependencies": { - "rollup": "^3.29.4 || ^4", - "typescript": "^4.5 || ^5.0" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/scule": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/scule/-/scule-1.3.0.tgz", - "integrity": "sha512-6FtHJEvt+pVMIB9IBY+IcCJ6Z5f1iQnytgyfKMhDKgmzYG+TeH/wx1y3l27rshSbLiSanrR9ffZDrEsmjlQF2g==", - "dev": true, - "license": "MIT" - }, - "node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/slash": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", - "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/stylehacks": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-7.0.4.tgz", - "integrity": "sha512-i4zfNrGMt9SB4xRK9L83rlsFCgdGANfeDAYacO1pkqcE7cRHPdWHwnKZVz7WY17Veq/FvyYsRAU++Ga+qDFIww==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.3", - "postcss-selector-parser": "^6.1.2" - }, - "engines": { - "node": "^18.12.0 || ^20.9.0 || >=22.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/stylehacks/node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/svgo": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.3.2.tgz", - "integrity": "sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^5.1.0", - "css-tree": "^2.3.1", - "css-what": "^6.1.0", - "csso": "^5.0.5", - "picocolors": "^1.0.0" - }, - "bin": { - "svgo": "bin/svgo" - }, - "engines": { - "node": ">=14.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/svgo" - } - }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "license": "ISC", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tinyglobby": { - "version": "0.2.10", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.10.tgz", - "integrity": "sha512-Zc+8eJlFMvgatPZTl6A9L/yht8QqdmUNtURHaKZLmKBE12hNPSrqNkUp2cs3M/UKmNVVAMFQYSjYIVHDjW5zew==", - "dev": true, - "license": "MIT", - "dependencies": { - "fdir": "^6.4.2", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/tinyglobby/node_modules/fdir": { - "version": "6.4.2", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.2.tgz", - "integrity": "sha512-KnhMXsKSPZlAhp7+IjUkRZKPb4fUyccpDrdFXbi4QL1qkmFh9kVY09Yox+n4MaOb3lHZ1Tv829C3oaaXoMYPDQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/typescript": { - "version": "5.7.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", - "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/ufo": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.4.tgz", - "integrity": "sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==", - "license": "MIT" - }, - "node_modules/unbuild": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unbuild/-/unbuild-2.0.0.tgz", - "integrity": "sha512-JWCUYx3Oxdzvw2J9kTAp+DKE8df/BnH/JTSj6JyA4SH40ECdFu7FoJJcrm8G92B7TjofQ6GZGjJs50TRxoH6Wg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/plugin-alias": "^5.0.0", - "@rollup/plugin-commonjs": "^25.0.4", - "@rollup/plugin-json": "^6.0.0", - "@rollup/plugin-node-resolve": "^15.2.1", - "@rollup/plugin-replace": "^5.0.2", - "@rollup/pluginutils": "^5.0.3", - "chalk": "^5.3.0", - "citty": "^0.1.2", - "consola": "^3.2.3", - "defu": "^6.1.2", - "esbuild": "^0.19.2", - "globby": "^13.2.2", - "hookable": "^5.5.3", - "jiti": "^1.19.3", - "magic-string": "^0.30.3", - "mkdist": "^1.3.0", - "mlly": "^1.4.0", - "pathe": "^1.1.1", - "pkg-types": "^1.0.3", - "pretty-bytes": "^6.1.1", - "rollup": "^3.28.1", - "rollup-plugin-dts": "^6.0.0", - "scule": "^1.0.0", - "untyped": "^1.4.0" - }, - "bin": { - "unbuild": "dist/cli.mjs" - }, - "peerDependencies": { - "typescript": "^5.1.6" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/unbuild/node_modules/globby": { - "version": "13.2.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-13.2.2.tgz", - "integrity": "sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==", - "dev": true, - "license": "MIT", - "dependencies": { - "dir-glob": "^3.0.1", - "fast-glob": "^3.3.0", - "ignore": "^5.2.4", - "merge2": "^1.4.1", - "slash": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/unbuild/node_modules/jiti": { - "version": "1.21.7", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", - "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", - "dev": true, - "license": "MIT", - "bin": { - "jiti": "bin/jiti.js" - } - }, - "node_modules/unbuild/node_modules/slash": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", - "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/unicorn-magic": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", - "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/untyped": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/untyped/-/untyped-1.5.2.tgz", - "integrity": "sha512-eL/8PlhLcMmlMDtNPKhyyz9kEBDS3Uk4yMu/ewlkT2WFbtzScjHWPJLdQLmaGPUKjXzwe9MumOtOgc4Fro96Kg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.26.0", - "@babel/standalone": "^7.26.4", - "@babel/types": "^7.26.3", - "citty": "^0.1.6", - "defu": "^6.1.4", - "jiti": "^2.4.1", - "knitwork": "^1.2.0", - "scule": "^1.3.0" - }, - "bin": { - "untyped": "dist/cli.mjs" - } - }, - "node_modules/untyped/node_modules/jiti": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", - "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", - "dev": true, - "license": "MIT", - "bin": { - "jiti": "lib/jiti-cli.mjs" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.2.tgz", - "integrity": "sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true, - "license": "MIT" - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - } - } -} diff --git a/packages/create-eliza-app/package.json b/packages/create-eliza-app/package.json deleted file mode 100644 index 429a7d53c86a1..0000000000000 --- a/packages/create-eliza-app/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "create-eliza-app", - "version": "0.25.6-alpha.1", - "description": "", - "sideEffects": false, - "files": [ - "dist" - ], - "main": "dist/index.cjs", - "bin": { - "create-eliza-app": "dist/index.mjs" - }, - "scripts": { - "build": "unbuild", - "start": "node ./dist/index.cjs", - "automd": "automd" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "citty": "0.1.6", - "giget": "1.2.3" - }, - "devDependencies": { - "automd": "0.3.12", - "jiti": "2.4.0", - "unbuild": "2.0.0" - } -} diff --git a/packages/create-eliza-app/registry/eliza.json b/packages/create-eliza-app/registry/eliza.json deleted file mode 100644 index 105335b39b256..0000000000000 --- a/packages/create-eliza-app/registry/eliza.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "eliza", - "defaultDir": "eliza", - "url": "https://github.com/elizaos/eliza-starter", - "tar": "https://codeload.github.com/elizaos/eliza-starter/tar.gz/refs/heads/main" -} diff --git a/packages/create-eliza-app/src/index.ts b/packages/create-eliza-app/src/index.ts deleted file mode 100644 index 7aeed4268dbaa..0000000000000 --- a/packages/create-eliza-app/src/index.ts +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env node - -import { downloadTemplate } from "giget"; -import { runMain } from "citty"; - -const DEFAULT_TEMPLATE = "eliza"; -const DEFAULT_REGISTRY = - "https://raw.githubusercontent.com/elizaos/eliza/main/packages/create-eliza-app/registry"; - -runMain({ - args: { - name: { - type: "string", - description: "Name of the template to use", - required: false, - }, - registry: { - type: "string", - description: "Registry URL to download the template from", - default: DEFAULT_REGISTRY, - }, - dir: { - type: "string", - description: "Directory where the project will be created", - required: false, - }, - _dir: { - type: "positional", - default: ".", - description: "Project directory (prefer using --dir)", - }, - }, - async run(context) { - try { - const templateName = context.args.name || DEFAULT_TEMPLATE; - const targetDir = context.args.dir || context.args._dir; - - console.log(`Downloading template ${templateName}...`); - - const res = await downloadTemplate(templateName, { - registry: context.args.registry, - dir: targetDir, - }); - - console.log(`Downloaded template to ${res.dir} from ${res.source}`); - - // Print getting started instructions if using default template - if (templateName === DEFAULT_TEMPLATE) { - console.log("\nGetting Started:"); - console.log(` cd ${res.dir}`); - console.log(" cp .env.example .env"); - console.log(" pnpm install"); - console.log(" pnpm start"); - } - } catch (error: any) { - console.error( - "Error:", - "message" in error ? error.message : "unknown error" - ); - process.exit(1); - } - }, -}); diff --git a/packages/create-eliza-app/tsconfig.json b/packages/create-eliza-app/tsconfig.json deleted file mode 100644 index 885dc1ca3c1e5..0000000000000 --- a/packages/create-eliza-app/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "compilerOptions": { - "target": "ESNext", - "module": "ESNext", - "moduleResolution": "Node", - "esModuleInterop": true, - "strict": true, - "skipLibCheck": true - }, - "include": ["src"] -} diff --git a/packages/dynamic-imports/package.json b/packages/dynamic-imports/package.json new file mode 100644 index 0000000000000..440ee24d4aa76 --- /dev/null +++ b/packages/dynamic-imports/package.json @@ -0,0 +1,5 @@ +{ + "name": "dynamic-imports", + "type": "module", + "main": "src/index.ts" +} diff --git a/packages/dynamic-imports/src/index.ts b/packages/dynamic-imports/src/index.ts new file mode 100644 index 0000000000000..12684abcb7114 --- /dev/null +++ b/packages/dynamic-imports/src/index.ts @@ -0,0 +1,14 @@ +const registrations = new Map(); + +export const dynamicImport = async (specifier: string) => { + const module = registrations.get(specifier); + if (module !== undefined) { + return module; + } else { + return await import(specifier); + } +}; + +export const registerDynamicImport = (specifier: string, module: any) => { + registrations.set(specifier, module); +}; \ No newline at end of file diff --git a/packages/plugin-0g/README.md b/packages/plugin-0g/README.md deleted file mode 100644 index e0d2a4922ed5e..0000000000000 --- a/packages/plugin-0g/README.md +++ /dev/null @@ -1,221 +0,0 @@ -# @elizaos/plugin-0g - -A plugin for storing data using the 0G protocol within the ElizaOS ecosystem. - -## Description - -The 0G plugin enables seamless integration with the Zero Gravity (0G) protocol for decentralized file storage. It provides functionality to upload files to the 0G network. - -## Installation - -```bash -pnpm install @elizaos/plugin-0g -``` - -## Configuration - -The plugin requires the following environment variables to be set: - -```typescript -ZEROG_INDEXER_RPC=<0G indexer RPC endpoint> -ZEROG_EVM_RPC=<0G EVM RPC endpoint> -ZEROG_PRIVATE_KEY= -ZEROG_FLOW_ADDRESS=<0G Flow contract address> -``` - -## Usage - -### Basic Integration - -```typescript -import { zgPlugin } from "@eliza/plugin-0g"; -``` - -### File Upload Example - -```typescript -// The plugin automatically handles file uploads when triggered -// through natural language commands like: - -"Upload my document.pdf"; -"Store this image.png on 0G"; -"Save my resume.docx to Zero Gravity"; -``` - -## API Reference - -### Actions - -#### ZG_UPLOAD - -Uploads files to the 0G network. - -**Aliases:** - -- UPLOAD_FILE_TO_ZG -- STORE_FILE_ON_ZG -- SAVE_FILE_TO_ZG -- UPLOAD_TO_ZERO_GRAVITY -- STORE_ON_ZERO_GRAVITY -- SHARE_FILE_ON_ZG -- PUBLISH_FILE_TO_ZG - -**Input Content:** - -```typescript -interface UploadContent { - filePath: string; -} -``` - -## Common Issues & Troubleshooting - -1. **File Access Errors** - - - Ensure the file exists at the specified path - - Check file permissions - - Verify the path is absolute or relative to the execution context - -2. **Configuration Issues** - - Verify all required environment variables are set - - Ensure RPC endpoints are accessible - - Confirm private key has sufficient permissions - -## Security Best Practices - -1. **Environment Variables** - - Never commit private keys to version control - - Use secure environment variable management - - Rotate private keys periodically - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run the plugin: - -```bash -pnpm run dev -``` - -## Future Enhancements - -1. **Storage Management** - - - Multi-file upload optimization - - Folder structure preservation - - Automated file replication - - Storage redundancy management - - File versioning system - - Archival storage options - -2. **Content Distribution** - - - CDN integration - - Bandwidth optimization - - Geographic replication - - Edge caching support - - P2P content delivery - - Streaming optimization - -3. **Data Security** - - - Enhanced encryption options - - Access control lists - - Key management system - - Data integrity verification - - Secure sharing mechanisms - - Privacy-preserving features - -4. **Integration Features** - - - Additional blockchain support - - Cross-chain functionality - - Smart contract integration - - NFT storage optimization - - DApp integration tools - - API expansion - -5. **Performance Optimization** - - - Upload speed improvements - - Parallel processing - - Compression algorithms - - Caching mechanisms - - Network optimization - - Resource management - -6. **Developer Tools** - - - Enhanced SDK features - - CLI tool improvements - - Testing framework - - Monitoring dashboard - - Analytics integration - - Documentation generator - -7. **Content Management** - - - Metadata management - - Search functionality - - Content indexing - - Tag system - - Collection management - - Batch operations - -8. **Protocol Features** - - Model service deployment - - KV store implementation - - State persistence - - Database integration - - Enhanced file metadata - - Protocol governance - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Zero Gravity (0G)](https://0g.ai/): Decentralized file storage protocol -- [IPFS](https://ipfs.tech/): InterPlanetary File System -- [Filecoin](https://filecoin.io/): Decentralized storage network -- [Flow](https://flow.com/): Blockchain for open worlds -- [Content Addressable Storage](https://en.wikipedia.org/wiki/Content-addressable_storage): Storage architecture - -Special thanks to: - -- The 0G Protocol development team -- The Protocol Labs team for IPFS -- The Filecoin Foundation -- The Flow blockchain team -- The decentralized storage community -- The Eliza community for their contributions and feedback - -For more information about 0G capabilities: - -- [0G Documentation](https://docs.0g.xyz/) -- [IPFS Documentation](https://docs.ipfs.tech/) -- [Filecoin Docs](https://docs.filecoin.io/) -- [Flow Documentation](https://developers.flow.com/) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-0g/__tests__/actions/upload.test.ts b/packages/plugin-0g/__tests__/actions/upload.test.ts deleted file mode 100644 index bd72f6c4c94fc..0000000000000 --- a/packages/plugin-0g/__tests__/actions/upload.test.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { zgUpload } from '../../src/actions/upload'; -import { type Memory, type State, type IAgentRuntime, type HandlerCallback } from '@elizaos/core'; -import { FileSecurityValidator, type SecurityConfig, type ValidationResult } from '../../src/utils/security'; -import { promises as fs } from 'fs'; -import type { Stats } from 'fs'; - -// Mock dependencies -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - debug: vi.fn(), - info: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - }, - composeContext: vi.fn(), - generateObject: vi.fn().mockResolvedValue({ - filePath: '/path/to/test/file.txt' - }), - parseBooleanFromText: vi.fn(), - ModelClass: { - LARGE: 'LARGE', - }, -})); - -interface MockZgFile { - upload: vi.Mock>; - merkleTree: vi.Mock>; -} - -vi.mock('@0glabs/0g-ts-sdk', () => ({ - Indexer: vi.fn().mockImplementation(() => ({ - getFile: vi.fn().mockResolvedValue({}), - })), - ZgFile: class { - static fromFilePath = vi.fn().mockImplementation((filePath: string): MockZgFile => ({ - upload: vi.fn().mockResolvedValue('test-file-id'), - merkleTree: vi.fn().mockResolvedValue('test-merkle-tree'), - })); - }, - getFlowContract: vi.fn().mockReturnValue({ - address: '0xtest', - }), -})); - -vi.mock('fs', () => ({ - promises: { - access: vi.fn().mockResolvedValue(undefined), - readFile: vi.fn().mockResolvedValue(Buffer.from('test content')), - unlink: vi.fn().mockResolvedValue(undefined), - stat: vi.fn().mockResolvedValue({ - size: 1024, - isFile: () => true, - isDirectory: () => false, - isSymbolicLink: () => false, - birthtime: new Date(), - mtime: new Date(), - } as Stats), - }, -})); - -interface MockValidator { - config: SecurityConfig; - validateFileType: jest.Mock>; - validateFileSize: jest.Mock>; - validateVirusScan: jest.Mock>; - validateFilePath: jest.Mock>; - sanitizePath: jest.Mock; -} - -vi.mock('../../src/utils/security', () => { - const validateFileType = vi.fn().mockResolvedValue({ isValid: true }); - const validateFileSize = vi.fn().mockResolvedValue({ isValid: true }); - const validateVirusScan = vi.fn().mockResolvedValue({ isValid: true }); - const validateFilePath = vi.fn().mockResolvedValue({ isValid: true }); - const sanitizePath = vi.fn().mockImplementation((filePath: string): string => filePath); - const validateFile = vi.fn().mockResolvedValue(true); - - return { - FileSecurityValidator: class implements MockValidator { - constructor(config: SecurityConfig) { - if (!config.allowedExtensions || config.allowedExtensions.length === 0) { - throw new Error('Security configuration error: allowedExtensions must be specified'); - } - if (!config.uploadDirectory) { - throw new Error('Security configuration error: uploadDirectory must be specified'); - } - if (config.maxFileSize <= 0) { - throw new Error('Security configuration error: maxFileSize must be positive'); - } - this.config = config; - this.validateFileType = validateFileType; - this.validateFileSize = validateFileSize; - this.validateVirusScan = validateVirusScan; - this.validateFilePath = validateFilePath; - this.sanitizePath = sanitizePath; - } - config: SecurityConfig; - validateFileType: jest.Mock>; - validateFileSize: jest.Mock>; - validateVirusScan: jest.Mock>; - validateFilePath: jest.Mock>; - sanitizePath: jest.Mock; - static validateFile = validateFile; - }, - }; -}); - -describe('ZG Upload Action', () => { - let mockValidator: MockValidator; - - const mockRuntime: Required> = { - getSetting: vi.fn(), - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - }; - - const mockMessage: Required> = { - id: 'test-message-id', - content: { - filePath: '/path/to/test/file.txt', - }, - }; - - const mockState: Required> = { - messages: [], - context: {}, - }; - - const mockCallback: HandlerCallback = vi.fn(); - - const defaultSettings: Record = { - ZEROG_INDEXER_RPC: 'http://indexer.test', - ZEROG_EVM_RPC: 'http://evm.test', - ZEROG_PRIVATE_KEY: '0xprivatekey', - ZEROG_FLOW_ADDRESS: '0xflowaddress', - ZEROG_MAX_FILE_SIZE: '10485760', - ZEROG_ALLOWED_EXTENSIONS: '.pdf,.png,.jpg,.jpeg,.doc,.docx', - ZEROG_UPLOAD_DIR: '/tmp/zerog-uploads', - ZEROG_ENABLE_VIRUS_SCAN: 'false', - }; - - beforeEach(() => { - vi.clearAllMocks(); - - // Setup default mock implementations - mockRuntime.getSetting.mockImplementation((key: string): string => { - const value = defaultSettings[key]; - if (value === undefined) { - throw new Error(`Unexpected setting key: ${key}`); - } - return value; - }); - - mockRuntime.composeState.mockResolvedValue(mockState); - mockRuntime.updateRecentMessageState.mockResolvedValue(mockState); - - // Create a new validator instance for each test - const config: SecurityConfig = { - maxFileSize: 10485760, - allowedExtensions: ['.pdf', '.png', '.jpg', '.jpeg', '.doc', '.docx'], - uploadDirectory: '/tmp/zerog-uploads', - enableVirusScan: false, - }; - mockValidator = new FileSecurityValidator(config); - }); - - describe('validate', () => { - it('should validate successfully with correct settings', async () => { - const result = await zgUpload.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should fail validation with missing settings', async () => { - mockRuntime.getSetting.mockReturnValue(undefined); - const result = await zgUpload.validate(mockRuntime, mockMessage); - expect(result).toBe(false); - }); - }); - - describe('handler', () => { - it('should handle file upload successfully', async () => { - const result = await zgUpload.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBeDefined(); - }); - - it('should handle file validation failure', async () => { - mockValidator.validateFileType.mockResolvedValueOnce({ - isValid: false, - error: 'Invalid file type' - }); - - const result = await zgUpload.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Upload failed: Invalid file type', - content: { error: 'Invalid file type' } - }); - }); - - it('should handle file not found error', async () => { - const error = new Error('ENOENT: no such file or directory'); - vi.mocked(fs.access).mockRejectedValueOnce(error); - vi.mocked(fs.stat).mockRejectedValueOnce(error); - - const result = await zgUpload.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Upload failed: Could not access file', - content: { error: 'Failed to get file stats: ENOENT: no such file or directory' } - }); - }); - }); -}); diff --git a/packages/plugin-0g/biome.json b/packages/plugin-0g/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-0g/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-0g/package.json b/packages/plugin-0g/package.json deleted file mode 100644 index 0cec4a2d6663f..0000000000000 --- a/packages/plugin-0g/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/plugin-0g", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@0glabs/0g-ts-sdk": "0.2.1", - "@elizaos/core": "workspace:*", - "ethers": "6.13.4", - "tsup": "8.3.5" - }, - "devDependencies": { - "@biomejs/biome": "1.5.3", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome check src/", - "lint:fix": "biome check --apply src/", - "format": "biome format src/", - "format:fix": "biome format --write src/" - } -} diff --git a/packages/plugin-0g/src/actions/upload.ts b/packages/plugin-0g/src/actions/upload.ts deleted file mode 100644 index 639da61a140a7..0000000000000 --- a/packages/plugin-0g/src/actions/upload.ts +++ /dev/null @@ -1,508 +0,0 @@ -import { - type Action, - type HandlerCallback, - type IAgentRuntime, - type Memory, - type State, - ModelClass, - type Content, - type ActionExample, - generateObject, - elizaLogger, -} from "@elizaos/core"; -import { Indexer, ZgFile, getFlowContract } from "@0glabs/0g-ts-sdk"; -import { ethers, Wallet } from "ethers"; -import { composeContext } from "@elizaos/core"; -import { promises as fs, type Stats } from "node:fs"; -import { FileSecurityValidator } from "../utils/security"; -import { logSecurityEvent, monitorUpload, monitorFileValidation, monitorCleanup } from '../utils/monitoring'; -import { uploadTemplate } from "../templates/upload"; - -export interface UploadContent extends Content { - filePath: string; -} - -function isUploadContent( - _runtime: IAgentRuntime, - content: unknown -): content is UploadContent { - elizaLogger.debug("Validating upload content", { content }); - return typeof content === "object" && content !== null && "filePath" in content && typeof (content as UploadContent).filePath === "string"; -} - -export const zgUpload: Action = { - name: "ZG_UPLOAD", - similes: [ - "UPLOAD_FILE_TO_ZG", - "STORE_FILE_ON_ZG", - "SAVE_FILE_TO_ZG", - "UPLOAD_TO_ZERO_GRAVITY", - "STORE_ON_ZERO_GRAVITY", - "SHARE_FILE_ON_ZG", - "PUBLISH_FILE_TO_ZG", - ], - description: "Store data using 0G protocol", - validate: async (runtime: IAgentRuntime, message: Memory) => { - elizaLogger.debug("Starting ZG_UPLOAD validation", { messageId: message.id }); - - try { - const settings = { - indexerRpc: runtime.getSetting("ZEROG_INDEXER_RPC"), - evmRpc: runtime.getSetting("ZEROG_EVM_RPC"), - privateKey: runtime.getSetting("ZEROG_PRIVATE_KEY"), - flowAddr: runtime.getSetting("ZEROG_FLOW_ADDRESS") - }; - - elizaLogger.debug("Checking ZeroG settings", { - hasIndexerRpc: Boolean(settings.indexerRpc), - hasEvmRpc: Boolean(settings.evmRpc), - hasPrivateKey: Boolean(settings.privateKey), - hasFlowAddr: Boolean(settings.flowAddr) - }); - - const hasRequiredSettings = Object.entries(settings).every(([_key, value]) => Boolean(value)); - - if (!hasRequiredSettings) { - const missingSettings = Object.entries(settings) - .filter(([_, value]) => !value) - .map(([key]) => key); - - elizaLogger.error("Missing required ZeroG settings", { - missingSettings, - messageId: message.id - }); - return false; - } - - const config = { - maxFileSize: Number.parseInt(runtime.getSetting("ZEROG_MAX_FILE_SIZE") || "10485760"), - allowedExtensions: runtime.getSetting("ZEROG_ALLOWED_EXTENSIONS")?.split(",") || [".pdf", ".png", ".jpg", ".jpeg", ".doc", ".docx"], - uploadDirectory: runtime.getSetting("ZEROG_UPLOAD_DIR") || "/tmp/zerog-uploads", - enableVirusScan: runtime.getSetting("ZEROG_ENABLE_VIRUS_SCAN") === "true" - }; - - // Validate config values - if (Number.isNaN(config.maxFileSize) || config.maxFileSize <= 0) { - elizaLogger.error("Invalid ZEROG_MAX_FILE_SIZE setting", { - value: runtime.getSetting("ZEROG_MAX_FILE_SIZE"), - messageId: message.id - }); - return false; - } - - if (!config.allowedExtensions || config.allowedExtensions.length === 0) { - elizaLogger.error("Invalid ZEROG_ALLOWED_EXTENSIONS setting", { - value: runtime.getSetting("ZEROG_ALLOWED_EXTENSIONS"), - messageId: message.id - }); - return false; - } - - elizaLogger.info("ZG_UPLOAD action settings validated", { - config, - messageId: message.id - }); - return true; - } catch (error) { - elizaLogger.error("Error validating ZG_UPLOAD settings", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - messageId: message.id - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback: HandlerCallback - ) => { - elizaLogger.info("ZG_UPLOAD action started", { - messageId: message.id, - hasState: Boolean(state), - hasCallback: Boolean(callback) - }); - - let file: ZgFile | undefined; - let cleanupRequired = false; - - try { - // Update state if needed - // Initialize or update state - let currentState = state; - if (!currentState) { - elizaLogger.debug("No state provided, composing new state"); - currentState = (await runtime.composeState(message)) as State; - } else { - elizaLogger.debug("Updating existing state"); - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose upload context - elizaLogger.debug("Composing upload context"); - const uploadContext = composeContext({ - state: currentState, - template: uploadTemplate, - }); - - // Generate upload content - elizaLogger.debug("Generating upload content"); - const content = await generateObject({ - runtime, - context: uploadContext, - modelClass: ModelClass.LARGE, - }); - - // Validate upload content - if (!isUploadContent(runtime, content)) { - const error = "Invalid content for UPLOAD action"; - elizaLogger.error(error, { - content, - messageId: message.id - }); - if (callback) { - callback({ - text: "Unable to process 0G upload request. Invalid content provided.", - content: { error } - }); - } - return false; - } - - const filePath = content.filePath; - elizaLogger.debug("Extracted file path", { filePath, content }); - - if (!filePath) { - const error = "File path is required"; - elizaLogger.error(error, { messageId: message.id }); - if (callback) { - callback({ - text: "File path is required for upload.", - content: { error } - }); - } - return false; - } - - // Initialize security validator - const securityConfig = { - maxFileSize: Number.parseInt(runtime.getSetting("ZEROG_MAX_FILE_SIZE") || "10485760"), - allowedExtensions: runtime.getSetting("ZEROG_ALLOWED_EXTENSIONS")?.split(",") || [".pdf", ".png", ".jpg", ".jpeg", ".doc", ".docx"], - uploadDirectory: runtime.getSetting("ZEROG_UPLOAD_DIR") || "/tmp/zerog-uploads", - enableVirusScan: runtime.getSetting("ZEROG_ENABLE_VIRUS_SCAN") === "true" - }; - - let validator: FileSecurityValidator; - try { - elizaLogger.debug("Initializing security validator", { - config: securityConfig, - messageId: message.id - }); - validator = new FileSecurityValidator(securityConfig); - } catch (error) { - const errorMessage = `Security validator initialization failed: ${error instanceof Error ? error.message : String(error)}`; - elizaLogger.error(errorMessage, { - config: securityConfig, - messageId: message.id - }); - if (callback) { - callback({ - text: "Upload failed: Security configuration error.", - content: { error: errorMessage } - }); - } - return false; - } - - // Validate file type - elizaLogger.debug("Starting file type validation", { filePath }); - const typeValidation = await validator.validateFileType(filePath); - monitorFileValidation(filePath, "file_type", typeValidation.isValid, { - error: typeValidation.error - }); - if (!typeValidation.isValid) { - const error = "File type validation failed"; - elizaLogger.error(error, { - error: typeValidation.error, - filePath, - messageId: message.id - }); - if (callback) { - callback({ - text: `Upload failed: ${typeValidation.error}`, - content: { error: typeValidation.error } - }); - } - return false; - } - - // Validate file size - elizaLogger.debug("Starting file size validation", { filePath }); - const sizeValidation = await validator.validateFileSize(filePath); - monitorFileValidation(filePath, "file_size", sizeValidation.isValid, { - error: sizeValidation.error - }); - if (!sizeValidation.isValid) { - const error = "File size validation failed"; - elizaLogger.error(error, { - error: sizeValidation.error, - filePath, - messageId: message.id - }); - if (callback) { - callback({ - text: `Upload failed: ${sizeValidation.error}`, - content: { error: sizeValidation.error } - }); - } - return false; - } - - // Validate file path - elizaLogger.debug("Starting file path validation", { filePath }); - const pathValidation = await validator.validateFilePath(filePath); - monitorFileValidation(filePath, "file_path", pathValidation.isValid, { - error: pathValidation.error - }); - if (!pathValidation.isValid) { - const error = "File path validation failed"; - elizaLogger.error(error, { - error: pathValidation.error, - filePath, - messageId: message.id - }); - if (callback) { - callback({ - text: `Upload failed: ${pathValidation.error}`, - content: { error: pathValidation.error } - }); - } - return false; - } - - // Sanitize the file path - let sanitizedPath: string; - try { - sanitizedPath = validator.sanitizePath(filePath); - elizaLogger.debug("File path sanitized", { - originalPath: filePath, - sanitizedPath, - messageId: message.id - }); - } catch (error) { - const errorMessage = `Failed to sanitize file path: ${error instanceof Error ? error.message : String(error)}`; - elizaLogger.error(errorMessage, { - filePath, - messageId: message.id - }); - if (callback) { - callback({ - text: "Upload failed: Invalid file path.", - content: { error: errorMessage } - }); - } - return false; - } - - // Start upload monitoring - const startTime = Date.now(); - let fileStats: Stats; - try { - fileStats = await fs.stat(sanitizedPath); - elizaLogger.debug("File stats retrieved", { - size: fileStats.size, - path: sanitizedPath, - created: fileStats.birthtime, - modified: fileStats.mtime, - messageId: message.id - }); - } catch (error) { - const errorMessage = `Failed to get file stats: ${error instanceof Error ? error.message : String(error)}`; - elizaLogger.error(errorMessage, { - path: sanitizedPath, - messageId: message.id - }); - if (callback) { - callback({ - text: "Upload failed: Could not access file", - content: { error: errorMessage } - }); - } - return false; - } - - try { - // Initialize ZeroG file - elizaLogger.debug("Initializing ZeroG file", { - sanitizedPath, - messageId: message.id - }); - file = await ZgFile.fromFilePath(sanitizedPath); - cleanupRequired = true; - - // Generate Merkle tree - elizaLogger.debug("Generating Merkle tree"); - const [merkleTree, merkleError] = await file.merkleTree(); - if (merkleError !== null) { - const error = `Error getting file root hash: ${merkleError instanceof Error ? merkleError.message : String(merkleError)}`; - elizaLogger.error(error, { messageId: message.id }); - if (callback) { - callback({ - text: "Upload failed: Error generating file hash.", - content: { error } - }); - } - return false; - } - elizaLogger.info("File root hash generated", { - rootHash: merkleTree.rootHash(), - messageId: message.id - }); - - // Initialize blockchain connection - elizaLogger.debug("Initializing blockchain connection"); - const provider = new ethers.JsonRpcProvider(runtime.getSetting("ZEROG_EVM_RPC")); - const signer = new ethers.Wallet(runtime.getSetting("ZEROG_PRIVATE_KEY"), provider); - const indexer = new Indexer(runtime.getSetting("ZEROG_INDEXER_RPC")); - const flowContract = getFlowContract(runtime.getSetting("ZEROG_FLOW_ADDRESS"), signer as any); - - // Upload file to ZeroG - elizaLogger.info("Starting file upload to ZeroG", { - filePath: sanitizedPath, - messageId: message.id - }); - const [txHash, uploadError] = await indexer.upload( - file, - 0, - runtime.getSetting("ZEROG_EVM_RPC"), - flowContract - ); - - if (uploadError !== null) { - const error = `Error uploading file: ${uploadError instanceof Error ? uploadError.message : String(uploadError)}`; - elizaLogger.error(error, { messageId: message.id }); - monitorUpload({ - filePath: sanitizedPath, - size: fileStats.size, - duration: Date.now() - startTime, - success: false, - error: error - }); - if (callback) { - callback({ - text: "Upload failed: Error during file upload.", - content: { error } - }); - } - return false; - } - - // Log successful upload - monitorUpload({ - filePath: sanitizedPath, - size: fileStats.size, - duration: Date.now() - startTime, - success: true - }); - - elizaLogger.info("File uploaded successfully", { - transactionHash: txHash, - filePath: sanitizedPath, - fileSize: fileStats.size, - duration: Date.now() - startTime, - messageId: message.id - }); - - if (callback) { - callback({ - text: "File uploaded successfully to ZeroG.", - content: { - success: true, - transactionHash: txHash - } - }); - } - - return true; - } finally { - // Cleanup temporary file - if (cleanupRequired && file) { - try { - elizaLogger.debug("Starting file cleanup", { - filePath: sanitizedPath, - messageId: message.id - }); - await file.close(); - await fs.unlink(sanitizedPath); - monitorCleanup(sanitizedPath, true); - elizaLogger.debug("File cleanup completed successfully", { - filePath: sanitizedPath, - messageId: message.id - }); - } catch (cleanupError) { - monitorCleanup(sanitizedPath, false, cleanupError.message); - elizaLogger.warn("Failed to cleanup file", { - error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError), - filePath: sanitizedPath, - messageId: message.id - }); - } - } - } - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); - logSecurityEvent("Unexpected error in upload action", "high", { - error: errorMessage, - stack: error instanceof Error ? error.stack : undefined, - messageId: message.id - }); - - elizaLogger.error("Unexpected error during file upload", { - error: errorMessage, - stack: error instanceof Error ? error.stack : undefined, - messageId: message.id - }); - - if (callback) { - callback({ - text: "Upload failed due to an unexpected error.", - content: { error: errorMessage } - }); - } - - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "upload my resume.pdf file", - action: "ZG_UPLOAD", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "can you help me upload this document.docx?", - action: "ZG_UPLOAD", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "I need to upload an image file image.png", - action: "ZG_UPLOAD", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-0g/src/index.ts b/packages/plugin-0g/src/index.ts deleted file mode 100644 index 70ede43647dee..0000000000000 --- a/packages/plugin-0g/src/index.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { zgUpload } from "./actions/upload"; - -export const zgPlugin: Plugin = { - description: "ZeroG Plugin for Eliza", - name: "ZeroG", - actions: [zgUpload], - evaluators: [], - providers: [], -}; diff --git a/packages/plugin-0g/src/templates/upload.ts b/packages/plugin-0g/src/templates/upload.ts deleted file mode 100644 index d982d8929b6ea..0000000000000 --- a/packages/plugin-0g/src/templates/upload.ts +++ /dev/null @@ -1,22 +0,0 @@ -export const uploadTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - -Example response: -\`\`\`json -{ - "filePath": null, - "description": "I want to upload a file" -} -\`\`\` - -{{recentMessages}} - -Extract the user's intention to upload a file from the conversation. Users might express this in various ways, such as: -- "I want to upload a file" -- "upload an image" -- "send a photo" -- "upload" -- "let me share a file" - -If the user provides any specific description of the file, include that as well. - -Respond with a JSON markdown block containing only the extracted values.`; diff --git a/packages/plugin-0g/src/utils/monitoring.ts b/packages/plugin-0g/src/utils/monitoring.ts deleted file mode 100644 index e502a5641768c..0000000000000 --- a/packages/plugin-0g/src/utils/monitoring.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { elizaLogger } from '@elizaos/core'; - -export interface SecurityEvent { - timestamp: number; - event: string; - severity: 'low' | 'medium' | 'high'; - details: Record; -} - -export interface UploadMetrics { - filePath: string; - size: number; - timestamp: string; - duration?: number; - success: boolean; - error?: string; -} - -/** - * Logs a security event with the specified severity and details - */ -export const logSecurityEvent = ( - event: string, - severity: SecurityEvent['severity'], - details: Record -): void => { - const securityEvent: SecurityEvent = { - timestamp: Date.now(), - event, - severity, - details - }; - - elizaLogger.info('Security event', securityEvent); - - // For high severity events, also log as error - if (severity === 'high') { - elizaLogger.error('High severity security event', securityEvent); - } -}; - -/** - * Tracks upload metrics and logs them - */ -export const monitorUpload = (metrics: Omit): void => { - const uploadMetrics: UploadMetrics = { - ...metrics, - timestamp: new Date().toISOString() - }; - - elizaLogger.info('Upload metrics', uploadMetrics); - - // Log errors if present - if (!metrics.success && metrics.error) { - elizaLogger.error('Upload failed', { - filePath: metrics.filePath, - error: metrics.error - }); - } -}; - -/** - * Monitors file validation events - */ -export const monitorFileValidation = ( - filePath: string, - validationType: string, - isValid: boolean, - details?: Record -): void => { - const event = isValid ? 'File validation passed' : 'File validation failed'; - const severity = isValid ? 'low' : 'medium'; - - logSecurityEvent(event, severity, { - filePath, - validationType, - ...details - }); -}; - -/** - * Tracks cleanup operations - */ -export const monitorCleanup = ( - filePath: string, - success: boolean, - error?: string -): void => { - const event = success ? 'File cleanup succeeded' : 'File cleanup failed'; - const severity = success ? 'low' : 'medium'; - - logSecurityEvent(event, severity, { - filePath, - error - }); -}; \ No newline at end of file diff --git a/packages/plugin-0g/src/utils/security.ts b/packages/plugin-0g/src/utils/security.ts deleted file mode 100644 index 6b2e3df898f10..0000000000000 --- a/packages/plugin-0g/src/utils/security.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { promises as fs } from 'node:fs'; -import path from 'node:path'; - -export interface SecurityConfig { - maxFileSize: number; - allowedExtensions: string[]; - uploadDirectory: string; - enableVirusScan: boolean; -} - -export interface ValidationResult { - isValid: boolean; - error?: string; -} - -export class FileSecurityValidator { - private config: SecurityConfig; - - constructor(config: SecurityConfig) { - if (!config.allowedExtensions || config.allowedExtensions.length === 0) { - throw new Error('Security configuration error: allowedExtensions must be specified'); - } - if (!config.uploadDirectory) { - throw new Error('Security configuration error: uploadDirectory must be specified'); - } - if (config.maxFileSize <= 0) { - throw new Error('Security configuration error: maxFileSize must be positive'); - } - this.config = config; - } - - async validateFileType(filePath: string): Promise { - try { - if (!filePath) { - return { - isValid: false, - error: 'Invalid file path: Path cannot be empty' - }; - } - - const ext = path.extname(filePath).toLowerCase(); - if (!ext) { - return { - isValid: false, - error: `File type not allowed. Allowed types: ${this.config.allowedExtensions.join(', ')}` - }; - } - - if (!this.config.allowedExtensions.includes(ext)) { - return { - isValid: false, - error: `File type not allowed. Allowed types: ${this.config.allowedExtensions.join(', ')}` - }; - } - return { isValid: true }; - } catch (error) { - return { - isValid: false, - error: `Error validating file type: ${error instanceof Error ? error.message : String(error)}` - }; - } - } - - async validateFileSize(filePath: string): Promise { - try { - if (!filePath) { - return { - isValid: false, - error: 'Invalid file path: Path cannot be empty' - }; - } - - const stats = await fs.stat(filePath); - if (stats.size === 0) { - return { - isValid: false, - error: 'Invalid file: File is empty' - }; - } - - if (stats.size > this.config.maxFileSize) { - return { - isValid: false, - error: `File size exceeds limit of ${this.config.maxFileSize} bytes (file size: ${stats.size} bytes)` - }; - } - return { isValid: true }; - } catch (error) { - if (error.code === 'ENOENT') { - return { - isValid: false, - error: 'File not found or inaccessible' - }; - } - if (error.code === 'EACCES') { - return { - isValid: false, - error: 'Permission denied: Cannot access file' - }; - } - return { - isValid: false, - error: `Error checking file size: ${error instanceof Error ? error.message : String(error)}` - }; - } - } - - async validateFilePath(filePath: string): Promise { - try { - if (!filePath) { - return { - isValid: false, - error: 'Invalid file path: Path cannot be empty' - }; - } - - const normalizedPath = path.normalize(filePath); - - // Check for directory traversal attempts - if (normalizedPath.includes('..')) { - return { - isValid: false, - error: 'Invalid file path: Directory traversal detected' - }; - } - - // For test files, we'll allow them to be created in the test directory - if (normalizedPath.includes('__test_files__')) { - return { isValid: true }; - } - - // For production files, ensure they're in the upload directory - const uploadDir = path.normalize(this.config.uploadDirectory); - - // Check if upload directory exists and is accessible - try { - await fs.access(uploadDir, fs.constants.W_OK); - } catch (error) { - return { - isValid: false, - error: `Upload directory is not accessible: ${error.code === 'ENOENT' ? 'Directory does not exist' : - error.code === 'EACCES' ? 'Permission denied' : error.message}` - }; - } - - if (!normalizedPath.startsWith(uploadDir)) { - return { - isValid: false, - error: 'Invalid file path: File must be within the upload directory' - }; - } - - return { isValid: true }; - } catch (error) { - return { - isValid: false, - error: `Error validating file path: ${error instanceof Error ? error.message : String(error)}` - }; - } - } - - sanitizePath(filePath: string): string { - try { - if (!filePath) { - throw new Error('File path cannot be empty'); - } - - // Remove any directory traversal attempts - const normalizedPath = path.normalize(filePath).replace(/^(\.\.[/\\])+/, ''); - - // If it's a test path, preserve it - if (normalizedPath.includes('__test_files__') || !normalizedPath.startsWith(this.config.uploadDirectory)) { - return normalizedPath; - } - - // For production paths, ensure they're in the upload directory - return path.join(this.config.uploadDirectory, path.basename(normalizedPath)); - } catch (error) { - throw new Error(`Error sanitizing file path: ${error instanceof Error ? error.message : String(error)}`); - } - } -} \ No newline at end of file diff --git a/packages/plugin-0g/tsconfig.json b/packages/plugin-0g/tsconfig.json deleted file mode 100644 index a9de3e24c18b3..0000000000000 --- a/packages/plugin-0g/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": ".", - "types": ["node"] - }, - "include": ["src"] -} diff --git a/packages/plugin-0g/tsup.config.ts b/packages/plugin-0g/tsup.config.ts deleted file mode 100644 index 1b704be143d15..0000000000000 --- a/packages/plugin-0g/tsup.config.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "@0glabs/0g-ts-sdk", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-0g/vitest.config.ts b/packages/plugin-0g/vitest.config.ts deleted file mode 100644 index dcc2f3dd98c53..0000000000000 --- a/packages/plugin-0g/vitest.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - // Enable Jest-like globals (describe, it, expect) - globals: true, - - // Environment setup - environment: 'node', - - // Test file patterns - include: ['__tests__/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'], - exclude: ['**/node_modules/**', '**/dist/**'], - - // TypeScript configuration - typecheck: { - tsconfig: './tsconfig.json', - include: ['**/*.{test,spec}.{ts,tsx}'], - }, - }, -}); diff --git a/packages/plugin-0x/README.md b/packages/plugin-0x/README.md deleted file mode 100644 index 42cd84804c255..0000000000000 --- a/packages/plugin-0x/README.md +++ /dev/null @@ -1,86 +0,0 @@ -# @elizaos/plugin-0x - -This plugin enables Eliza to interact with the 0x Protocol, providing decentralized exchange capabilities across multiple evm blockchain networks with optimized token swaps and liquidity aggregation. - -Supported networks include: - - Ethereum Mainnet - - Polygon - - Binance Smart Chain - - Arbitrum - - Optimism - - Base - - Linea - - Scroll - - Avalanche - - Blast - -## Configuration - -Set the following environment variables: - -```env -WALLET_PRIVATE_KEY=your_private_key -ZERO_EX_API_KEY=your_0x_api_key -{chain}_RPC_URL=your_rpc_endpoint -``` - -## Installation - -```bash -pnpm install @elizaos/plugin-0x -``` - -## Usage - -### Basic Integration - -```typescript -import { zeroExPlugin } from "@elizaos/plugin-0x"; -``` - -### Example Usage - -The plugin supports natural language commands for ETH transfers: - -```typescript -"I want to convert 1 ETH to USDC on ethereum chain"; -"Give me the quote"; -"Execute it"; -``` - -## Available Actions - -The plugin provides the following actions: - -1. **GET_INDICATIVE_PRICE_0X**: Get indicative prices for token swaps - - Example: "Get quote for swapping 1 ETH to USDC on Ethereum chain" - - Example: "Price check for trading 100 USDT to MATIC on Polygon chain" - -2. **GET_QUOTE_0X**: Get the quote for the swap. Quote expires in 5mins. (This action is triggered only after user has requested for an indicative price. No need to repeat the buy/sell tokens because the last indicative price will be stored in the memory) - - Example: "Get quote" - -3. **EXECUTE_SWAP_0X**: Execute token swaps. (Action is triggered only after user has gotten a quote) - - Example: "Execute the swap" - -## Security Best Practices - -1. **Environment Variables** - - Never commit private keys to version control - - Use secure environment variable management - - Rotate private keys periodically - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with: -- [0x Protocol](https://0x.org/) - -For more information about 0x capabilities: -- [0x API Documentation](https://0x.org/docs/api) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. \ No newline at end of file diff --git a/packages/plugin-0x/__tests__/actions/getIndicativePrice.test.ts b/packages/plugin-0x/__tests__/actions/getIndicativePrice.test.ts deleted file mode 100644 index c2628d434b684..0000000000000 --- a/packages/plugin-0x/__tests__/actions/getIndicativePrice.test.ts +++ /dev/null @@ -1,196 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { getIndicativePrice } from '../../src/actions/getIndicativePrice'; -import type { Memory, State, IAgentRuntime, HandlerCallback } from '@elizaos/core'; -import { generateObject } from '@elizaos/core'; -import { createClientV2 } from '@0x/swap-ts-sdk'; -import { EVMTokenRegistry } from '../../src/EVMtokenRegistry'; -import { Chains } from '../../src/types'; - -// Mock dependencies -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - debug: vi.fn(), - info: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - }, - composeContext: vi.fn(), - generateObject: vi.fn().mockResolvedValue({ - object: { - sellTokenSymbol: 'ETH', - sellAmount: 1, - buyTokenSymbol: 'USDC', - chain: 'ethereum', - }, - }), - ModelClass: { - SMALL: 'SMALL', - }, - MemoryManager: { - create: vi.fn(), - }, -})); - -vi.mock('@0x/swap-ts-sdk', () => ({ - createClientV2: vi.fn().mockReturnValue({ - getIndicativePrice: vi.fn().mockResolvedValue({ - buyAmount: '1000000000000000000', - sellAmount: '1000000000000000000', - estimatedPriceImpact: '0.01', - grossPrice: '1', - sellTokenToEthRate: '1', - buyTokenToEthRate: '1', - permit2: { - permitData: {}, - }, - }), - }), -})); - -vi.mock('../../src/EVMtokenRegistry', () => ({ - EVMTokenRegistry: { - getInstance: vi.fn().mockReturnValue({ - isChainSupported: vi.fn().mockReturnValue(true), - initializeChain: vi.fn().mockResolvedValue(undefined), - getTokenBySymbol: vi.fn().mockImplementation((chain: string, symbol: string) => ({ - address: `0x${symbol}address`, - decimals: 18, - symbol, - })), - }), - }, -})); - -describe('GET_INDICATIVE_PRICE_0X Action', () => { - const mockRuntime: Required> = { - getSetting: vi.fn(), - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - }; - - const mockMessage: Required> = { - id: 'test-message-id', - content: { - sellTokenSymbol: 'ETH', - sellAmount: 1, - buyTokenSymbol: 'USDC', - chain: 'ethereum', - }, - }; - - const mockState: Required> = { - messages: [], - context: {}, - }; - - const mockCallback: HandlerCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - - mockRuntime.getSetting.mockImplementation((key: string): string => { - const settings: Record = { - ZERO_EX_API_KEY: 'test-api-key', - }; - const value = settings[key]; - if (value === undefined) { - throw new Error(`Unexpected setting key: ${key}`); - } - return value; - }); - - mockRuntime.composeState.mockResolvedValue(mockState); - mockRuntime.updateRecentMessageState.mockResolvedValue(mockState); - }); - - describe('validate', () => { - it('should validate successfully with API key', async () => { - const result = await getIndicativePrice.validate(mockRuntime); - expect(result).toBe(true); - }); - - it('should fail validation without API key', async () => { - mockRuntime.getSetting.mockReturnValue(undefined); - const result = await getIndicativePrice.validate(mockRuntime); - expect(result).toBe(false); - }); - }); - - describe('handler', () => { - it('should get indicative price successfully', async () => { - const result = await getIndicativePrice.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBeDefined(); - expect(createClientV2).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); - expect(mockCallback).toHaveBeenCalled(); - }); - - it('should handle invalid chain', async () => { - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - ...mockMessage.content, - chain: 'invalid-chain', - }, - }); - - await getIndicativePrice.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Unsupported chain'), - }); - }); - - it('should handle token not found', async () => { - vi.mocked(EVMTokenRegistry.getInstance).mockReturnValueOnce({ - isChainSupported: vi.fn().mockReturnValue(true), - initializeChain: vi.fn().mockResolvedValue(undefined), - getTokenBySymbol: vi.fn().mockReturnValue(null), - }); - - await getIndicativePrice.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('not found'), - }); - }); - - it('should handle 0x API error', async () => { - vi.mocked(createClientV2).mockReturnValueOnce({ - getIndicativePrice: vi.fn().mockRejectedValue(new Error('API Error')), - }); - - await getIndicativePrice.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Error getting price'), - content: expect.objectContaining({ - error: expect.any(String), - }), - })); - }); - }); -}); diff --git a/packages/plugin-0x/biome.json b/packages/plugin-0x/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-0x/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-0x/package.json b/packages/plugin-0x/package.json deleted file mode 100644 index 2312c4b329001..0000000000000 --- a/packages/plugin-0x/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@elizaos/plugin-0x", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome check src/", - "lint:fix": "biome check --apply src/", - "format": "biome format src/", - "format:fix": "biome format --write src/" - }, - "dependencies": { - "@elizaos/core": "workspace:*", - "whatwg-url": "7.1.0", - "@0x/swap-ts-sdk": "2.1.1" - }, - "devDependencies": { - "tsup": "^8.0.1", - "@biomejs/biome": "1.5.3", - "vitest": "^2.1.5" - }, - "peerDependencies": { - "@elizaos/core": "workspace:*", - "zod": "^3.22.4" - } -} diff --git a/packages/plugin-0x/src/EVMtokenRegistry.ts b/packages/plugin-0x/src/EVMtokenRegistry.ts deleted file mode 100644 index 39075f71a5a75..0000000000000 --- a/packages/plugin-0x/src/EVMtokenRegistry.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import { - Chains, - type TokenMetadata, - type TrustWalletGithubJson, - type TrustWalletTokenMetadata, -} from "./types"; -import { NATIVE_TOKENS } from "./constants"; - -export class EVMTokenRegistry { - private static instance: EVMTokenRegistry; - private chainTokenMaps: Map>; - private initializedChains: Set; - - private static CHAIN_NAMES: Record = Object.fromEntries( - Object.keys(Chains) - .map(name => [Chains[name as keyof typeof Chains], name.toLowerCase()]) - ); - - private constructor() { - this.chainTokenMaps = new Map(); - this.initializedChains = new Set(); - } - - public static getInstance(): EVMTokenRegistry { - if (!EVMTokenRegistry.instance) { - EVMTokenRegistry.instance = new EVMTokenRegistry(); - } - return EVMTokenRegistry.instance; - } - - private async fetchTokenList( - chainId: number - ): Promise { - const chainName = EVMTokenRegistry.CHAIN_NAMES[chainId]; - if (!chainName) { - throw new Error(`Unsupported chain ID: ${chainId}`); - } - - const url = `https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/${chainName}/tokenlist.json`; - - try { - const response = await fetch(url); - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - const data: TrustWalletGithubJson = await response.json(); - return data.tokens; - } catch (error) { - elizaLogger.error( - `Failed to fetch token list for chain ${chainName}:`, - error - ); - throw error; - } - } - - public async initializeChain(chainId: number): Promise { - if (this.initializedChains.has(chainId)) return; - - const tokens = await this.fetchTokenList(chainId); - const tokenMap = new Map(); - - // Add native token first - const nativeToken = NATIVE_TOKENS[chainId]; - if (nativeToken) { - tokenMap.set(nativeToken.symbol.toUpperCase(), nativeToken); - } - - for (const token of tokens) { - const { pairs, ...restToken } = token; - tokenMap.set(token.symbol.toUpperCase(), { - chainId, - ...restToken, - }); - } - - this.chainTokenMaps.set(chainId, tokenMap); - // Only add to initializedChains if tokens were fetched successfully - if (tokenMap.size > 0) { - this.initializedChains.add(chainId); - } - } - - public getTokenBySymbol( - symbol: string, - chainId: number - ): TokenMetadata | undefined { - if (!EVMTokenRegistry.CHAIN_NAMES[chainId]) { - throw new Error(`Unsupported chain ID: ${chainId}`); - } - - const chainTokens = this.chainTokenMaps.get(chainId); - if (!chainTokens) return undefined; - - return chainTokens.get(symbol.toUpperCase()); - } - - public getTokenByAddress( - address: string, - chainId: number - ): TokenMetadata | undefined { - if (!EVMTokenRegistry.CHAIN_NAMES[chainId]) { - throw new Error(`Unsupported chain ID: ${chainId}`); - } - - const tokens = this.chainTokenMaps.get(chainId)?.values(); - if (!tokens) return undefined; - - const normalizedAddress = address.toLowerCase(); - for (const token of tokens) { - if (token.address.toLowerCase() === normalizedAddress) { - return token; - } - } - return undefined; - } - - public async getAllTokensForChain( - chainId: number - ): Promise { - if (!EVMTokenRegistry.CHAIN_NAMES[chainId]) { - throw new Error(`Unsupported chain ID: ${chainId}`); - } - - await this.initializeChain(chainId); - return Array.from(this.chainTokenMaps.get(chainId)?.values() ?? []); - } - - public isChainSupported(chainId: number): boolean { - return chainId in EVMTokenRegistry.CHAIN_NAMES; - } -} diff --git a/packages/plugin-0x/src/actions/getIndicativePrice.ts b/packages/plugin-0x/src/actions/getIndicativePrice.ts deleted file mode 100644 index 7a9a469d46ca3..0000000000000 --- a/packages/plugin-0x/src/actions/getIndicativePrice.ts +++ /dev/null @@ -1,294 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - ModelClass, - generateObject, - MemoryManager, -} from "@elizaos/core"; -import { createClientV2 } from "@0x/swap-ts-sdk"; -import { getIndicativePriceTemplate } from "../templates"; -import { z } from "zod"; -import { Chains, type GetIndicativePriceResponse, type PriceInquiry } from "../types"; -import { parseUnits } from "viem"; -import { CHAIN_NAMES, ZX_MEMORY } from "../constants"; -import { EVMTokenRegistry } from "../EVMtokenRegistry"; - -export const IndicativePriceSchema = z.object({ - sellTokenSymbol: z.string().nullable(), - sellAmount: z.number().nullable(), - buyTokenSymbol: z.string().nullable(), - chain: z.string().nullable(), -}); - -export interface IndicativePriceContent { - sellTokenSymbol: string; - sellAmount: number; - buyTokenSymbol: string; - chain: string; -} - -export const getIndicativePrice: Action = { - name: "GET_INDICATIVE_PRICE_0X", - similes: [], - suppressInitialMessage: true, - description: - "Get indicative price for a swap from 0x when user wants to convert their tokens", - validate: async (runtime: IAgentRuntime) => { - return !!runtime.getSetting("ZERO_EX_API_KEY"); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback: HandlerCallback - ) => { - const supportedChains = Object.keys(Chains).join(" | "); - - const localState = !state - ? await runtime.composeState(message, { supportedChains }) - : await runtime.updateRecentMessageState(state); - - const context = composeContext({ - state: localState, - template: getIndicativePriceTemplate, - }); - - const content = await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: IndicativePriceSchema, - }); - - if (!isIndicativePriceContent(content.object)) { - const missingFields = getMissingIndicativePriceContent( - content.object - ); - callback({ - text: `Need more information about the swap. Please provide me ${missingFields}`, - }); - return; - } - - const { sellTokenSymbol, sellAmount, buyTokenSymbol, chain } = - content.object; - - // Convert chain string to chainId - const chainId = Chains[chain.toLowerCase() as keyof typeof Chains]; - if (!chainId) { - callback({ - text: `Unsupported chain: ${chain}. Supported chains are: ${Object.keys( - Chains - ) - .filter((k) => !Number.isNaN(Number(k))) - .join(", ")}`, - }); - return; - } - - const evmTokenRegistry = EVMTokenRegistry.getInstance(); - if (evmTokenRegistry.isChainSupported(chainId)) { - await evmTokenRegistry.initializeChain(chainId); - } else { - callback({ - text: `Chain ${chain} is not supported for token swaps.`, - }); - return; - } - - const sellTokenMetadata = evmTokenRegistry.getTokenBySymbol( - sellTokenSymbol, - chainId - ); - const buyTokenMetadata = evmTokenRegistry.getTokenBySymbol( - buyTokenSymbol, - chainId - ); - - if (!sellTokenMetadata || !buyTokenMetadata) { - const missingTokens = []; - if (!sellTokenMetadata) missingTokens.push(`'${sellTokenSymbol}'`); - if (!buyTokenMetadata) missingTokens.push(`'${buyTokenSymbol}'`); - - callback({ - text: `Token${missingTokens.length > 1 ? 's' : ''} ${missingTokens.join(' and ')} not found on ${chain}. Please check the token symbols and chain.`, - }); - return; - } - - elizaLogger.info("Getting indicative price for:", { - sellToken: sellTokenMetadata, - buyToken: buyTokenMetadata, - amount: sellAmount, - }); - - const zxClient = createClientV2({ - apiKey: runtime.getSetting("ZERO_EX_API_KEY"), - }); - - const sellAmountBaseUnits = parseUnits( - sellAmount.toString(), - sellTokenMetadata.decimals - ).toString(); - - try { - const price = (await zxClient.swap.permit2.getPrice.query({ - sellAmount: sellAmountBaseUnits, - sellToken: sellTokenMetadata.address, - buyToken: buyTokenMetadata.address, - chainId, - })) as GetIndicativePriceResponse; - - // Format amounts to human-readable numbers - const buyAmount = - Number(price.buyAmount) / - (10 ** buyTokenMetadata.decimals); - const sellAmount = - Number(price.sellAmount) / - (10 ** sellTokenMetadata.decimals); - - await storePriceInquiryToMemory(runtime, message, { - sellTokenObject: sellTokenMetadata, - buyTokenObject: buyTokenMetadata, - sellAmountBaseUnits, - chainId, - timestamp: new Date().toISOString(), - }); - - // Updated formatted response to include chain - const formattedResponse = [ - "💱 Swap Details:", - "────────────────", - `📤 Sell: ${sellAmount.toFixed(4)} ${sellTokenMetadata.symbol}`, - `📥 Buy: ${buyAmount.toFixed(4)} ${buyTokenMetadata.symbol}`, - `📊 Rate: 1 ${sellTokenMetadata.symbol} = ${(buyAmount / sellAmount).toFixed(4)} ${buyTokenMetadata.symbol}`, - `🔗 Chain: ${CHAIN_NAMES[chainId]}`, - "────────────────", - `💫 Happy with the price? Type 'quote' to continue`, - ].join("\n"); - - callback({ text: formattedResponse }); - return true; - } catch (error) { - elizaLogger.error("Error getting price:", error); - callback({ - text: `Error getting price: ${error.message || error}`, - content: { error: error.message || String(error) }, - }); - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What's the price of 2 ETH in USDC on Optimism?", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me check the current exchange rate for ETH/USDC on Optimism.", - action: "GET_INDICATIVE_PRICE_0X", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "I want to swap WETH for USDT on Arbitrum", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you check the price. How much WETH would you like to swap?", - action: "GET_INDICATIVE_PRICE_0X", - }, - }, - { - user: "{{user1}}", - content: { - text: "5 WETH", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me get the indicative price for 5 WETH to USDT on Arbitrum.", - action: "GET_INDICATIVE_PRICE_0X", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Price check for 1000 USDC to WETH on Base", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the current exchange rate for 1000 USDC to WETH on Base network.", - action: "GET_INDICATIVE_PRICE_0X", - }, - }, - ], - ], -}; - -export const isIndicativePriceContent = ( - object: any -): object is IndicativePriceContent => { - if (IndicativePriceSchema.safeParse(object).success) { - return true; - } - return false; -}; - -export const getMissingIndicativePriceContent = ( - content: Partial -): string => { - const missingFields = []; - - if (typeof content.sellTokenSymbol !== "string") - missingFields.push("sell token"); - if (typeof content.buyTokenSymbol !== "string") - missingFields.push("buy token"); - if (typeof content.sellAmount !== "number") - missingFields.push("sell amount"); - - return missingFields.join(" and "); -}; - -export const storePriceInquiryToMemory = async ( - runtime: IAgentRuntime, - message: Memory, - priceInquiry: PriceInquiry -) => { - const memory: Memory = { - roomId: message.roomId, - userId: message.userId, - agentId: runtime.agentId, - content: { - text: JSON.stringify(priceInquiry), - type: ZX_MEMORY.price.type, - }, - }; - - const memoryManager = new MemoryManager({ - runtime, - tableName: ZX_MEMORY.price.tableName, - }); - - await memoryManager.createMemory(memory); -}; diff --git a/packages/plugin-0x/src/actions/getQuote.ts b/packages/plugin-0x/src/actions/getQuote.ts deleted file mode 100644 index d4df553fe7180..0000000000000 --- a/packages/plugin-0x/src/actions/getQuote.ts +++ /dev/null @@ -1,339 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - MemoryManager, -} from "@elizaos/core"; -import type { GetQuoteResponse, PriceInquiry, Quote } from "../types"; -import { formatTokenAmount } from "../utils"; -import { CHAIN_NAMES, NATIVE_TOKENS, ZX_MEMORY } from "../constants"; -import { createClientV2 } from "@0x/swap-ts-sdk"; -import { formatUnits } from "viem"; - -export const getQuote: Action = { - name: "GET_QUOTE_0X", - similes: [], - suppressInitialMessage: true, - description: - "Get a firm quote for a swap from 0x when user wants to execute a trade. This action is triggered only after user has requested for an indicative price.", - validate: async (runtime: IAgentRuntime) => { - return !!runtime.getSetting("ZERO_EX_API_KEY"); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State, - _options: Record, - callback: HandlerCallback - ) => { - const latestPriceInquiry = await retrieveLatestPriceInquiry( - runtime, - message - ); - if (!latestPriceInquiry) { - callback({ - text: "Please provide me the details of the swap.", - }); - return; - } - - const { - sellTokenObject, - sellAmountBaseUnits, - buyTokenObject, - chainId, - } = latestPriceInquiry; - - const zxClient = createClientV2({ - apiKey: runtime.getSetting("ZERO_EX_API_KEY"), - }); - - try { - const quote = (await zxClient.swap.permit2.getQuote.query({ - sellAmount: sellAmountBaseUnits, - sellToken: sellTokenObject.address, - buyToken: buyTokenObject.address, - chainId: chainId, - taker: runtime.getSetting("WALLET_PUBLIC_ADDRESS"), - })) as GetQuoteResponse; - - await storeQuoteToMemory(runtime, message, { - sellTokenObject, - buyTokenObject, - sellAmountBaseUnits, - chainId, - quote, - timestamp: new Date().toISOString(), - }); - - if (!quote.liquidityAvailable) { - callback({ - text: "No liquidity available for this swap. Please try again with a different token or amount.", - }); - return; - } - - const buyAmountBaseUnitsQuoted = formatUnits( - BigInt(quote.buyAmount), - buyTokenObject.decimals - ); - - const sellAmountBaseUnitsQuoted = formatUnits( - BigInt(quote.sellAmount), - sellTokenObject.decimals - ); - - const warnings = []; - if (quote.issues?.balance) { - warnings.push( - "⚠️ Warnings:", - ` • Insufficient balance (Have ${formatTokenAmount( - quote.issues.balance.actual, - quote.issues.balance.token, - chainId - )})` - ); - } - - const formattedResponse = [ - "🎯 Firm Quote Details:", - "────────────────", - // Basic swap details (same as price) - `📤 Sell: ${formatTokenAmount( - quote.sellAmount, - sellTokenObject.address, - chainId - )}`, - `📥 Buy: ${formatTokenAmount( - quote.buyAmount, - buyTokenObject.address, - chainId - )}`, - `📊 Rate: 1 ${sellTokenObject.symbol} = ${( - Number(buyAmountBaseUnitsQuoted) / - Number(sellAmountBaseUnitsQuoted) - ).toFixed(4)} ${buyTokenObject.symbol}`, - - // New information specific to quote - `💱 Minimum Buy Amount: ${formatTokenAmount( - quote.minBuyAmount, - quote.buyToken, - chainId - )}`, - - // Fee breakdown - "💰 Fees Breakdown:", - ` • 0x Protocol Fee: ${formatTokenAmount( - quote.fees.zeroExFee?.amount, - quote.fees.zeroExFee?.token, - chainId - )}`, - ` • Integrator Fee: ${formatTokenAmount( - quote.fees.integratorFee?.amount, - quote.fees.integratorFee?.token, - chainId - )}`, - ` • Network Gas Fee: ${ - quote.totalNetworkFee - ? formatTokenAmount( - quote.totalNetworkFee, - NATIVE_TOKENS[chainId].address, - chainId - ) - : "Will be estimated at execution" - }`, - - ...formatRouteInfo(quote), - - // Chain - `🔗 Chain: ${CHAIN_NAMES[chainId]}`, - - ...(warnings.length > 0 ? warnings : []), - - "────────────────", - "💫 Ready to execute? Type 'execute' to continue", - ] - .filter(Boolean) - .join("\n"); - - callback({ - text: formattedResponse, - }); - return true; - } catch (error) { - elizaLogger.error("Error getting quote:", error); - if (callback) { - callback({ - text: `Error getting quote: ${error.message}`, - content: { error: error.message || String(error) }, - }); - } - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Get me a quote for 500 USDC to WETH on Optimism", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch a firm quote for swapping 500 USDC to WETH on Optimism.", - action: "GET_QUOTE_0X", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Quote for 2.5 WETH to USDT on Arbitrum please", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll get you a firm quote for swapping 2.5 WETH to USDT on Arbitrum.", - action: "GET_QUOTE_0X", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "quote 100 MATIC to USDC on Polygon", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch a firm quote for swapping 100 MATIC to USDC on Polygon.", - action: "GET_QUOTE_0X", - }, - }, - ], - ], -}; - -// const formatTime = (time: string) => { -// const expirationDate = new Date(parseInt(time) * 1000); - -// // Format: "Mar 15, 2:30 PM" -// const formattedTime = expirationDate.toLocaleString(undefined, { -// month: "short", -// day: "numeric", -// hour: "numeric", -// minute: "2-digit", -// hour12: true, -// }); - -// return `${formattedTime}`; -// }; - -export const retrieveLatestPriceInquiry = async ( - runtime: IAgentRuntime, - message: Memory -): Promise => { - const memoryManager = new MemoryManager({ - runtime, - tableName: ZX_MEMORY.price.tableName, - }); - - try { - const memories = await memoryManager.getMemories({ - roomId: message.roomId, - count: 1, - start: 0, - end: Date.now(), - }); - - if (memories?.[0]) { - return JSON.parse(memories[0].content.text) as PriceInquiry; - } - return null; - } catch (error) { - elizaLogger.error("Failed to retrieve price inquiry:", error.message); - return null; - } -}; - -export const storeQuoteToMemory = async ( - runtime: IAgentRuntime, - message: Memory, - quote: Quote -) => { - const memory: Memory = { - roomId: message.roomId, - userId: message.userId, - agentId: runtime.agentId, - content: { - text: JSON.stringify(quote), - type: ZX_MEMORY.quote.type, - }, - }; - - const memoryManager = new MemoryManager({ - runtime, - tableName: ZX_MEMORY.quote.tableName, - }); - - await memoryManager.createMemory(memory); -}; - -/** - * @returns example: - * 🛣️ Route: - * WETH → DAI → LINK - * • WETH → DAI: 100% via Uniswap_V3 - * • DAI → LINK: 14.99% via Uniswap_V3, 85.01% via Uniswap_V3 - */ - -export const formatRouteInfo = (quote: GetQuoteResponse): string[] => { - if (!quote.route.tokens || !quote.route.fills) { - return []; - } - // Get unique route path - const routeTokens = quote.route.tokens; - const routePath = routeTokens.map((t) => t.symbol).join(" → "); - - // Group fills by token pairs - const fillsByPair = quote.route.fills.reduce((acc, fill) => { - const key = `${fill.from}-${fill.to}`; - if (!acc[key]) acc[key] = []; - acc[key].push(fill); - return acc; - }, {} as Record); - - // Format each pair's route details - const routeDetails = Object.entries(fillsByPair).map(([pair, fills]) => { - const [fromAddr, toAddr] = pair.split("-"); - const from = routeTokens.find( - (t) => t.address.toLowerCase() === fromAddr.toLowerCase() - )?.symbol; - const to = routeTokens.find( - (t) => t.address.toLowerCase() === toAddr.toLowerCase() - )?.symbol; - - if (fills.length === 1) { - return ` • ${from} → ${to}: ${ - Number(fills[0].proportionBps) / 100 - }% via ${fills[0].source}`; - } - return [ - ` • ${from} → ${to}:`, - ...fills.map( - (f) => `${Number(f.proportionBps) / 100}% via ${f.source}` - ), - ].join(", "); - }); - - return ["🛣️ Route:", routePath, ...routeDetails]; -}; diff --git a/packages/plugin-0x/src/actions/swap.ts b/packages/plugin-0x/src/actions/swap.ts deleted file mode 100644 index 2183d8be3d9d1..0000000000000 --- a/packages/plugin-0x/src/actions/swap.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - MemoryManager, -} from "@elizaos/core"; -import { type Hex, numberToHex, concat } from "viem"; -import { CHAIN_EXPLORERS, ZX_MEMORY } from "../constants"; -import { getWalletClient } from "../hooks.ts/useGetWalletClient"; -import type { Quote } from "../types"; - -export const swap: Action = { - name: "EXECUTE_SWAP_0X", - similes: [ - "SWAP_TOKENS_0X", - "TOKEN_SWAP_0X", - "TRADE_TOKENS_0X", - "EXCHANGE_TOKENS_0X", - ], - suppressInitialMessage: true, - description: "Execute a token swap using 0x protocol", - validate: async (runtime: IAgentRuntime) => { - return ( - !!runtime.getSetting("ZERO_EX_API_KEY") && - !!runtime.getSetting("WALLET_PRIVATE_KEY") - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State, - _options: Record, - callback: HandlerCallback - ) => { - const latestQuote = await retrieveLatestQuote(runtime, message); - if (!latestQuote) { - callback({ - text: "Please provide me the details of the swap. E.g. convert 000.1 Weth to USDC on Ethereum chain", - }); - return; - } - - const { quote, chainId } = latestQuote; - - try { - const client = getWalletClient(chainId); // 1 for mainnet, or pass chainId - - // 1. Handle Permit2 signature - let signature: Hex | undefined; - if (quote.permit2?.eip712) { - signature = await client.signTypedData({ - account: client.account, - ...quote.permit2.eip712, - }); - - if (signature && quote.transaction?.data) { - const sigLengthHex = numberToHex(signature.length, { - size: 32, - }) as Hex; - quote.transaction.data = concat([ - quote.transaction.data as Hex, - sigLengthHex, - signature, - ]); - } - } - - const nonce = await client.getTransactionCount({ - address: (client.account as { address: `0x${string}` }).address, - }); - - const txHash = await client.sendTransaction({ - account: client.account, - chain: client.chain, - gas: quote?.transaction.gas - ? BigInt(quote?.transaction.gas) - : undefined, - to: quote?.transaction.to as `0x${string}`, - data: quote.transaction.data as `0x${string}`, - value: BigInt(quote.transaction.value), - gasPrice: quote?.transaction.gasPrice - ? BigInt(quote?.transaction.gasPrice) - : undefined, - nonce: nonce, - kzg: undefined, - }); - - // Wait for transaction confirmation - const receipt = await client.waitForTransactionReceipt({ - hash: txHash, - }); - - if (receipt.status === "success") { - callback({ - text: `✅ Swap executed successfully!\nView on Explorer: ${CHAIN_EXPLORERS[chainId]}/tx/${txHash}`, - content: { hash: txHash, status: "success" }, - }); - return true; - } - callback({ - text: `❌ Swap failed! Check transaction: ${CHAIN_EXPLORERS[chainId]}/tx/${txHash}`, - content: { hash: txHash, status: "failed" }, - }); - return false; - } catch (error) { - elizaLogger.error("Swap execution failed:", error); - callback({ - text: `❌ Failed to execute swap: ${error.message || error}`, - content: { error: error.message || String(error) }, - }); - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "I want to swap 1 ETH for USDC", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me get you a quote for that swap.", - action: "GET_INDICATE_PRICE_0X", - }, - }, - { - user: "{{user1}}", - content: { - text: "Get the quote for 1 ETH for USDC on Ethereum chain", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me get you the quotefor 1 ETH for USDC on Ethereum chain", - action: "GET_QUOTE_0X", - }, - }, - { - user: "{{user1}}", - content: { - text: "execute the swap", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me execute the swap for you.", - action: "EXECUTE_SWAP_0X", - }, - }, - ], - ], -}; - -export const retrieveLatestQuote = async ( - runtime: IAgentRuntime, - message: Memory -): Promise => { - const memoryManager = new MemoryManager({ - runtime, - tableName: ZX_MEMORY.quote.tableName, - }); - - try { - const memories = await memoryManager.getMemories({ - roomId: message.roomId, - count: 1, - start: 0, - end: Date.now(), - }); - - if (memories?.[0]) { - return JSON.parse(memories[0].content.text) as Quote; - } - return null; - } catch (error) { - elizaLogger.error(`Failed to retrieve quote: ${error.message}`); - return null; - } -}; diff --git a/packages/plugin-0x/src/constants.ts b/packages/plugin-0x/src/constants.ts deleted file mode 100644 index f161bbb07295b..0000000000000 --- a/packages/plugin-0x/src/constants.ts +++ /dev/null @@ -1,143 +0,0 @@ -import { Chains, type TokenMetadata } from "./types"; - -export const ZX_MEMORY = { - price: { - tableName: "0x_prices", - type: "price_inquiry", - }, - quote: { - tableName: "0x_quotes", - type: "quote", - }, -}; - -export const CHAIN_NAMES: Record = { - [Chains.ethereum]: "Ethereum", - [Chains.optimism]: "Optimism", - [Chains.bsc]: "BSC", - [Chains.polygon]: "Polygon", - [Chains.base]: "Base", - [Chains.arbitrum]: "Arbitrum", - [Chains.avalanche]: "Avalanche", - [Chains.linea]: "Linea", - [Chains.scroll]: "Scroll", - [Chains.blast]: "Blast", -} as const; - -export const CHAIN_EXPLORERS: Record = { - [Chains.ethereum]: "https://etherscan.io", - [Chains.optimism]: "https://optimistic.etherscan.io", - [Chains.bsc]: "https://bscscan.com", - [Chains.polygon]: "https://polygonscan.com", - [Chains.base]: "https://basescan.org", - [Chains.arbitrum]: "https://arbiscan.io", - [Chains.avalanche]: "https://snowtrace.io", - [Chains.linea]: "https://lineascan.build", - [Chains.scroll]: "https://scrollscan.com", - [Chains.blast]: "https://blastscan.io", -} as const; - -export const NATIVE_TOKEN_ADDRESS = - "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE"; -export const NATIVE_TOKENS: Record = { - [Chains.ethereum]: { - chainId: Chains.ethereum, - name: "Ethereum", - symbol: "ETH", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/ethereum/info/logo.png", - }, - [Chains.optimism]: { - chainId: Chains.optimism, - name: "Ethereum", - symbol: "ETH", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/optimism/info/logo.png", - }, - [Chains.bsc]: { - chainId: Chains.bsc, - name: "BNB Chain", - symbol: "BNB", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/binance/info/logo.png", - }, - [Chains.polygon]: { - chainId: Chains.polygon, - name: "Polygon", - symbol: "MATIC", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/polygon/info/logo.png", - }, - [Chains.base]: { - chainId: Chains.base, - name: "Ethereum", - symbol: "ETH", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/base/info/logo.png", - }, - [Chains.arbitrum]: { - chainId: Chains.arbitrum, - name: "Ethereum", - symbol: "ETH", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/arbitrum/info/logo.png", - }, - [Chains.avalanche]: { - chainId: Chains.avalanche, - name: "Avalanche", - symbol: "AVAX", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/avalanchec/info/logo.png", - }, - [Chains.linea]: { - chainId: Chains.linea, - name: "Ethereum", - symbol: "ETH", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/linea/info/logo.png", - }, - [Chains.scroll]: { - chainId: Chains.scroll, - name: "Ethereum", - symbol: "ETH", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/scroll/info/logo.png", - }, - [Chains.blast]: { - chainId: Chains.blast, - name: "Ethereum", - symbol: "ETH", - decimals: 18, - address: NATIVE_TOKEN_ADDRESS, - type: "NATIVE", - logoURI: - "https://raw.githubusercontent.com/trustwallet/assets/master/blockchains/blast/info/logo.png", - }, -}; diff --git a/packages/plugin-0x/src/hooks.ts/useGetWalletClient.ts b/packages/plugin-0x/src/hooks.ts/useGetWalletClient.ts deleted file mode 100644 index 25516a3808dd1..0000000000000 --- a/packages/plugin-0x/src/hooks.ts/useGetWalletClient.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { - createWalletClient, - http, - publicActions, - createTestClient, - type WalletClient, - type PublicClient, - walletActions, -} from "viem"; - -import { - arbitrum, - avalanche, - base, - blast, - bsc, - hardhat, - linea, - mainnet, - optimism, - polygon, - scroll, -} from "viem/chains"; -import { privateKeyToAccount } from "viem/accounts"; - -const CHAIN_CONFIG = { - 1: { - chain: mainnet, - rpcUrl: process.env.ETH_RPC_URL, - }, - 10: { - chain: optimism, - rpcUrl: process.env.OPTIMISM_RPC_URL, - }, - 56: { - chain: bsc, - rpcUrl: process.env.BSC_RPC_URL, - }, - 137: { - chain: polygon, - rpcUrl: process.env.POLYGON_RPC_URL, - }, - 8453: { - chain: base, - rpcUrl: process.env.BASE_RPC_URL, - }, - 42161: { - chain: arbitrum, - rpcUrl: process.env.ARBITRUM_RPC_URL, - }, - 43114: { - chain: avalanche, - rpcUrl: process.env.AVALANCHE_RPC_URL, - }, - 59144: { - chain: linea, - rpcUrl: process.env.LINEA_RPC_URL, - }, - 534352: { - chain: scroll, - rpcUrl: process.env.SCROLL_RPC_URL, - }, - 81457: { - chain: blast, - rpcUrl: process.env.BLAST_RPC_URL, - }, -} as const; - -export const getWalletClient = ( - chainId: number -): WalletClient & PublicClient => { - const rawPrivateKey = process.env.WALLET_PRIVATE_KEY; - if (!rawPrivateKey) { - throw new Error("Wallet private key is required"); - } - if (!/^(0x)?[0-9a-fA-F]{64}$/.test(rawPrivateKey)) { - throw new Error("Invalid private key format"); - } - const privateKey = rawPrivateKey.startsWith("0x") - ? (rawPrivateKey as `0x${string}`) - : (`0x${rawPrivateKey}` as `0x${string}`); - - const account = privateKeyToAccount(privateKey); - - if (process.env.NODE_ENV === "development") { - return createTestClient({ - chain: hardhat, - transport: http(), - mode: "hardhat", - account: privateKeyToAccount( - process.env.WALLET_PRIVATE_KEY as `0x${string}` - ), - }) - .extend(walletActions) - .extend(publicActions) as WalletClient & PublicClient; - } - - const config = CHAIN_CONFIG[chainId as keyof typeof CHAIN_CONFIG]; - if (!config) throw new Error(`Chain ID ${chainId} not supported by 0x`); - - return createWalletClient({ - chain: config.chain, - transport: http(config.rpcUrl), - account, - }).extend(publicActions) as WalletClient & PublicClient; -}; diff --git a/packages/plugin-0x/src/index.ts b/packages/plugin-0x/src/index.ts deleted file mode 100644 index a8fd147113744..0000000000000 --- a/packages/plugin-0x/src/index.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { getIndicativePrice } from "./actions/getIndicativePrice"; -import { getQuote } from "./actions/getQuote"; -import { swap } from "./actions/swap"; - -export const zxPlugin: Plugin = { - name: "0x", - description: "0x Plugin for Eliza", - actions: [ - getIndicativePrice, - getQuote, - swap, - ], - evaluators: [], - providers: [], -}; - -export default zxPlugin; diff --git a/packages/plugin-0x/src/templates.ts b/packages/plugin-0x/src/templates.ts deleted file mode 100644 index 980a92aaa87d7..0000000000000 --- a/packages/plugin-0x/src/templates.ts +++ /dev/null @@ -1,64 +0,0 @@ -export const getIndicativePriceTemplate = ` - You are helping users get indicative prices for token swaps across different chains. - - Extract the following information: - - sellToken: The token the user wants to sell (e.g., ETH, WETH, USDC) - - buyToken: The token the user wants to receive (e.g., USDC, WETH, USDT) - - sellAmount: The amount of tokens to sell (numeric value only) - - chain: The blockchain network for the swap (e.g., ethereum, optimism, arbitrum, base) - - Return in JSON format: - { - "sellTokenSymbol": "", - "buyTokenSymbol": "", - "sellAmount": "" - "chain": {{supportedChains}} - } - - Examples: - "What's the price of 2 ETH in USDC on Optimism?" - { - sellTokenSymbol: "ETH", - buyTokenSymbol: "USDC", - sellAmount: 2, - chain: "optimism" - } - - "I want to swap 1000 USDC to WETH on Base" - { - sellTokenSymbol: "USDC", - buyTokenSymbol: "WETH", - sellAmount: 1000, - chain: "base" - } - - Notes: - - If the chain is not specified, assume it's "ethereum". - - If you are unsure, just return null for the missing fields. - - Recent conversation: - {{recentMessages}} -`; - -export const getQuoteTemplate = `Look at the recent conversation and extract the quote details. - -Extract: -- Which token the user wants to sell (sellToken) -- Which token the user wants to buy (buyToken) -- How much they want to sell (sellAmount) If amount is not specified, return null for sellAmount - -For example: -"I want to convert 5 WETH to USDC" -> { "sellToken": "WETH", "buyToken": "USDC", "sellAmount": "5" } -"Convert 100 LINK to USDC" -> { "sellToken": "LINK", "buyToken": "USDC", "sellAmount": "100" } -"How much DAI can I get for 100 USDC?" -> { "sellToken": "USDC", "buyToken": "DAI", "sellAmount": "100" } -"WETH/USDT price?" -> { "sellToken": "WETH", "buyToken": "USDT", "sellAmount": null } - -Return in JSON format: -{ - "sellToken": "", - "buyToken": "", - "sellAmount": "" -} - -Recent conversation: -{{recentMessages}}`; diff --git a/packages/plugin-0x/src/types.ts b/packages/plugin-0x/src/types.ts deleted file mode 100644 index 20dd484c85388..0000000000000 --- a/packages/plugin-0x/src/types.ts +++ /dev/null @@ -1,225 +0,0 @@ -export enum Chains { - arbitrum = 42161, - avalanche = 43114, - base = 8453, - bsc = 56, - blast = 81457, - ethereum = 1, - linea = 59144, - optimism = 10, - polygon = 137, - scroll = 534352, -} - -export interface SwapRequestParams { - chainId: string; // e.g., "1" for Ethereum mainnet - sellToken: string; // token address to sell - buyToken: string; // token address to buy - sellAmount: string; // amount in wei - taker: string; // wallet address - slippagePercentage?: string; // optional, e.g., "0.01" -} - -export interface GetIndicativePriceResponse { - chainId: number; - price: string; - buyAmount: string; - buyToken: string; - sellAmount: string; - sellToken: string; - blockNumber: string; - estimatedPriceImpact: string; - estimatedGas: string; - totalNetworkFee: string; - route: { - tokens: Array<{ - address: string; - symbol: string; - name: string; - decimals: number; - }>; - fills: Array<{ - source: string; - proportionBps: string; - from: string; - to: string; - }>; - }; - fees: { - zeroExFee: { - amount: string; - token: string; - type: "volume"; - } | null; - integratorFee: { - amount: string; - token: string; - type: "volume"; - } | null; - gasFee: { - amount: string; - token: string; - type: "volume"; - } | null; - }; - issues?: { - balance?: { - token: string; - actual: string; - expected: string; - }; - allowance?: { - token: string; - actual: string; - expected: string; - }; - }; - permit2: { - type: "Permit2"; - hash: string; - eip712: { - types: { - PermitTransferFrom: Array<{ name: string; type: string }>; - TokenPermissions: Array<{ name: string; type: string }>; - EIP712Domain: Array<{ name: string; type: string }>; - }; - domain: { - name: string; - chainId: number; - verifyingContract: string; - }; - message: { - permitted: { - token: string; - amount: string; - }; - spender: string; - nonce: string; - deadline: string; - }; - primaryType: string; - }; - }; -} - -export interface GetQuoteResponse { - blockNumber: string; - buyAmount: string; - buyToken: string; - sellAmount: string; - sellToken: string; - minBuyAmount: string; - liquidityAvailable: boolean; - totalNetworkFee: string; - zid: string; - fees: { - zeroExFee: { - amount: string; - token: string; - type: string; - } | null; - integratorFee: { - amount: string; - token: string; - type: string; - } | null; - gasFee: { - amount: string; - token: string; - type: string; - } | null; - }; - issues: { - allowance: null; - balance: { - token: string; - actual: string; - expected: string; - } | null; - simulationIncomplete: boolean; - invalidSourcesPassed: string[]; - }; - permit2: { - type: "Permit2"; - hash: string; - eip712: { - types: Record; - domain: Record; - message: Record; - primaryType: string; - }; - }; - route: { - fills: Array<{ - from: string; - to: string; - source: string; - proportionBps: string; - }>; - tokens: Array<{ - address: string; - symbol: string; - }>; - }; - tokenMetadata: { - buyToken: { - buyTaxBps: string; - sellTaxBps: string; - }; - sellToken: { - buyTaxBps: string; - sellTaxBps: string; - }; - }; - transaction: { - to: string; - data: string; - gas: string; - gasPrice: string; - value: string; - }; -} - -export interface TokenMetadata { - chainId: number; - address: string; - name: string; - symbol: string; - decimals: number; - logoURI?: string; - type: string; -} - -export interface TrustWalletTokenMetadata { - address: string; - name: string; - symbol: string; - decimals: number; - logoURI?: string; - type: string; - pairs: string[]; -} - -export interface TrustWalletGithubJson { - name: string; - logoURI: string; - timestamp: string; - tokens: TrustWalletTokenMetadata[]; -} - -export interface PriceInquiry { - sellTokenObject: TokenMetadata; - buyTokenObject: TokenMetadata; - sellAmountBaseUnits: string; - chainId: number; - timestamp: string; -} - -export interface Quote { - sellTokenObject: TokenMetadata; - buyTokenObject: TokenMetadata; - sellAmountBaseUnits: string; - chainId: number; - quote: GetQuoteResponse; - timestamp: string; -} diff --git a/packages/plugin-0x/src/utils.ts b/packages/plugin-0x/src/utils.ts deleted file mode 100644 index 0cbd10c0ad21c..0000000000000 --- a/packages/plugin-0x/src/utils.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { formatUnits, Hash } from "viem"; -import { EVMTokenRegistry } from "./EVMtokenRegistry"; -import { IAgentRuntime } from "@elizaos/core"; - -/** - * Formats a token amount with its symbol - * @param amount The amount in base units (e.g., wei) - * @param address The token address - * @param chainId The chain ID (defaults to 1 for Ethereum mainnet) - * @returns Formatted string like "1.234567 USDC" - */ -export function formatTokenAmount( - amount: string, - address: string, - chainId: number = 1 -): string { - if (!amount) return "0"; - - const tokenRegistry = EVMTokenRegistry.getInstance(); - const token = tokenRegistry.getTokenByAddress(address, chainId); - - if (!token) throw new Error(`Token not found for address: ${address}`); - - const parsedAmount = formatUnits(BigInt(amount), token.decimals); - return `${Number(parsedAmount).toFixed(4)} ${token.symbol}`; -} diff --git a/packages/plugin-0x/tsconfig.json b/packages/plugin-0x/tsconfig.json deleted file mode 100644 index 74ac4ce5bb76a..0000000000000 --- a/packages/plugin-0x/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": ".", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-0x/tsup.config.ts b/packages/plugin-0x/tsup.config.ts deleted file mode 100644 index 61b3ab19b13b0..0000000000000 --- a/packages/plugin-0x/tsup.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: ["dotenv", "fs", "path", "https", "http"], -}); diff --git a/packages/plugin-0x/vitest.config.ts b/packages/plugin-0x/vitest.config.ts deleted file mode 100644 index dcc2f3dd98c53..0000000000000 --- a/packages/plugin-0x/vitest.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - // Enable Jest-like globals (describe, it, expect) - globals: true, - - // Environment setup - environment: 'node', - - // Test file patterns - include: ['__tests__/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'], - exclude: ['**/node_modules/**', '**/dist/**'], - - // TypeScript configuration - typecheck: { - tsconfig: './tsconfig.json', - include: ['**/*.{test,spec}.{ts,tsx}'], - }, - }, -}); diff --git a/packages/plugin-3d-generation/.npmignore b/packages/plugin-3d-generation/.npmignore deleted file mode 100644 index a9227d220f6d8..0000000000000 --- a/packages/plugin-3d-generation/.npmignore +++ /dev/null @@ -1,7 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts -!tsconfig.json \ No newline at end of file diff --git a/packages/plugin-3d-generation/README.md b/packages/plugin-3d-generation/README.md deleted file mode 100644 index 51e50349a74c7..0000000000000 --- a/packages/plugin-3d-generation/README.md +++ /dev/null @@ -1,221 +0,0 @@ -# @elizaos/plugin-3d-generation - -A plugin for generating 3D models using the FAL.ai API within the ElizaOS ecosystem. - -## Description - -The 3D Generation plugin enables AI-powered creation of 3D models through FAL.ai's services. It provides functionality to generate 3D models from text descriptions and save them locally. - -## Installation - -```bash -pnpm install @elizaos/plugin-3d-generation -``` - -## Configuration - -The plugin requires the following environment variable or runtime setting to be set: - -```typescript -FAL_API_KEY= -``` - -## Usage - -### Basic Integration - -```typescript -import { ThreeDGenerationPlugin } from "@elizaos/plugin-3d-generation"; -``` - -### Model Generation Examples - -```typescript -// The plugin responds to natural language commands like: - -"Generate a 3D object of a cat playing piano"; -"Create a 3D object of an anime character Goku"; -"Make a 3D model of [your description]"; -``` - -## API Reference - -### Actions - -#### GENERATE_3D - -Generates 3D models based on text descriptions. - -**Aliases:** - -- 3D_GENERATION -- 3D_GEN -- CREATE_3D -- MAKE_3D -- TEXT23D -- TEXT_TO_3D -- 3D_CREATE -- 3D_MAKE - -**Default Configuration:** - -```typescript -{ - geometry_file_format: "glb", // Available: glb, usdz, fbx, obj, stl - material: "PBR", // Available: PBR, Shaded - quality: "medium", // Available: extra-low, low, medium, high - tier: "Regular" // Available: Regular, Sketch -} -``` - -## Common Issues & Troubleshooting - -1. **Generation Failures** - - - Verify FAL API key is correctly set - - Ensure prompt is descriptive (minimum 3 characters) - - Check network connectivity to FAL.ai services - -2. **Storage Issues** - - Verify write permissions to content_cache directory - - Ensure sufficient disk space - - Check if content_cache directory exists - -## Security Best Practices - -1. **API Key Management** - - Store FAL API key securely using runtime settings or environment variables - - Never commit API keys to version control - - Monitor API usage - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run the plugin: - -```bash -pnpm run dev -``` - -## Future Enhancements - -1. **Advanced Generation Features** - - - Multi-object scene generation - - Texture customization options - - Animation support - - Material property controls - - Advanced lighting systems - - Physics-based rendering - -2. **Model Optimization** - - - Automatic mesh simplification - - LOD (Level of Detail) generation - - Texture compression - - File size optimization - - Performance profiling - - Mobile-friendly exports - -3. **Format Support** - - - Additional file format exports - - Custom format converters - - Batch format conversion - - Format-specific optimizations - - Metadata preservation - - Version control integration - -4. **AI Improvements** - - - Enhanced prompt understanding - - Style transfer capabilities - - Real-time generation - - Multi-model support - - Quality improvements - - Consistency controls - -5. **Scene Management** - - - Scene composition tools - - Environment management - - Asset library integration - - Scene presets - - Batch processing - - Scene version control - -6. **Developer Tools** - - - API expansion - - Testing framework - - Documentation generator - - Debug visualization - - Performance monitoring - - Integration templates - -7. **Rendering Features** - - - Real-time preview - - Custom shader support - - Post-processing effects - - Render queue management - - Batch rendering - - Cloud rendering options - -8. **Collaboration Features** - - Asset sharing - - Version control - - Team workspace - - Review system - - Access control - - Change tracking - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [FAL.ai](https://fal.ai/): AI model deployment platform -- [Three.js](https://threejs.org/): 3D graphics library -- [glTF](https://www.khronos.org/gltf/): 3D file format standard -- [USD](https://graphics.pixar.com/usd/): Universal Scene Description -- [Blender](https://www.blender.org/): 3D creation suite - -Special thanks to: - -- The FAL.ai team for AI infrastructure -- The Three.js development community -- The Khronos Group for glTF standards -- The Pixar USD team -- The Blender Foundation -- The Eliza community for their contributions and feedback - -For more information about 3D generation capabilities: - -- [FAL.ai Documentation](https://fal.ai/docs) -- [Three.js Documentation](https://threejs.org/docs/) -- [glTF Specification](https://github.com/KhronosGroup/glTF) -- [USD Documentation](https://graphics.pixar.com/usd/docs/index.html) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-3d-generation/__tests__/actions/generate3d.test.ts b/packages/plugin-3d-generation/__tests__/actions/generate3d.test.ts deleted file mode 100644 index a5e4f90a834e6..0000000000000 --- a/packages/plugin-3d-generation/__tests__/actions/generate3d.test.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { ThreeDGenerationPlugin } from '../../src'; -import type { Memory, State, IAgentRuntime, HandlerCallback } from '@elizaos/core'; -import { elizaLogger } from '@elizaos/core'; -import { fal } from '@fal-ai/client'; -import * as fs from 'fs'; -import * as path from 'path'; -import * as crypto from 'crypto'; - -// Mock external dependencies -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - }, -})); - -vi.mock('@fal-ai/client', () => ({ - fal: { - subscribe: vi.fn(), - }, -})); - -vi.mock('fs', () => ({ - existsSync: vi.fn(), - mkdirSync: vi.fn(), - writeFileSync: vi.fn(), -})); - -vi.mock('path', () => ({ - dirname: vi.fn().mockReturnValue('content_cache'), -})); - -vi.mock('crypto', () => ({ - randomUUID: vi.fn().mockReturnValue('db98fb20-1f1f-4017-8314-7cc61e66c4e6'), -})); - -// Get the ThreeDGeneration action from the plugin -const ThreeDGeneration = ThreeDGenerationPlugin.actions[0]; - -describe('ThreeDGeneration Action', () => { - const mockRuntime: Required> = { - getSetting: vi.fn(), - }; - - const mockMessage: Required> = { - id: 'test-message-id', - content: { - text: 'Generate a 3D model of a cute cat', - }, - }; - - const mockState: Required> = { - messages: [], - context: {}, - }; - - const mockCallback: HandlerCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - global.fetch = vi.fn(); - mockRuntime.getSetting.mockReturnValue('test-fal-api-key'); - }); - - describe('validate', () => { - it('should validate successfully with API key', async () => { - const result = await ThreeDGeneration.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - expect(elizaLogger.log).toHaveBeenCalledWith('FAL_API_KEY present:', true); - }); - - it('should fail validation without API key', async () => { - mockRuntime.getSetting.mockReturnValue(undefined); - const result = await ThreeDGeneration.validate(mockRuntime, mockMessage); - expect(result).toBe(false); - expect(elizaLogger.log).toHaveBeenCalledWith('FAL_API_KEY present:', false); - }); - }); - - describe('handler', () => { - beforeEach(() => { - vi.mocked(fal.subscribe).mockResolvedValue({ - data: { - model_mesh: { - url: 'https://example.com/3d-model.glb', - file_name: 'model.glb', - }, - }, - }); - - vi.mocked(global.fetch).mockResolvedValue({ - arrayBuffer: vi.fn().mockResolvedValue(new ArrayBuffer(8)), - } as unknown as Response); - }); - - it('should handle successful 3D generation', async () => { - vi.mocked(fs.existsSync).mockReturnValue(false); - - await ThreeDGeneration.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - // Check initial message - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('I\'ll generate a 3D object based on your prompt'), - }); - - // Verify FAL API call - expect(fal.subscribe).toHaveBeenCalledWith(expect.any(String), { - input: expect.objectContaining({ - prompt: expect.stringContaining('cute cat'), - }), - logs: true, - onQueueUpdate: expect.any(Function), - }); - - // Verify file handling - expect(fs.mkdirSync).toHaveBeenCalledWith('content_cache', { recursive: true }); - expect(fs.writeFileSync).toHaveBeenCalled(); - - // Verify final callback - expect(mockCallback).toHaveBeenLastCalledWith( - expect.objectContaining({ - text: 'Here\'s your generated 3D object!', - attachments: [expect.objectContaining({ - url: 'https://example.com/3d-model.glb', - title: 'Generated 3D', - source: 'ThreeDGeneration', - description: expect.stringContaining('cute cat'), - text: expect.stringContaining('cute cat'), - })], - }), - ['content_cache/generated_3d_model.glb'] - ); - }); - - it('should handle empty or short prompts', async () => { - const shortMessage: Memory = { - id: 'test-message-id', - content: { - text: 'hi', - }, - }; - - await ThreeDGeneration.handler( - mockRuntime, - shortMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Could you please provide more details'), - }); - expect(fal.subscribe).not.toHaveBeenCalled(); - }); - - it('should handle FAL API errors', async () => { - vi.mocked(fal.subscribe).mockRejectedValue(new Error('API Error')); - - await ThreeDGeneration.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenLastCalledWith({ - text: expect.stringContaining('3D generation failed'), - error: true, - }); - }); - - it('should handle file system errors', async () => { - vi.mocked(fs.writeFileSync).mockImplementation(() => { - throw new Error('File system error'); - }); - - await ThreeDGeneration.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenLastCalledWith({ - text: expect.stringContaining('3D generation failed'), - error: true, - }); - }); - - it('should clean up prompt by removing mentions and commands', async () => { - const messageWithMentions: Memory = { - id: 'test-message-id', - content: { - text: '<@123456> generate 3D a cute cat render 3D', - }, - }; - - await ThreeDGeneration.handler( - mockRuntime, - messageWithMentions, - mockState, - {}, - mockCallback - ); - - expect(fal.subscribe).toHaveBeenCalledWith(expect.any(String), { - input: expect.objectContaining({ - prompt: 'a cute cat', - }), - logs: true, - onQueueUpdate: expect.any(Function), - }); - }); - }); -}); diff --git a/packages/plugin-3d-generation/biome.json b/packages/plugin-3d-generation/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-3d-generation/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-3d-generation/package.json b/packages/plugin-3d-generation/package.json deleted file mode 100644 index 86bcfc2c4782e..0000000000000 --- a/packages/plugin-3d-generation/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@elizaos/plugin-3d-generation", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "tsup": "8.3.5", - "whatwg-url": "7.1.0" - }, - "devDependencies": { - "@biomejs/biome": "1.5.3", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome check src/", - "lint:fix": "biome check --apply src/", - "format": "biome format src/", - "format:fix": "biome format --write src/" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-3d-generation/src/constants.ts b/packages/plugin-3d-generation/src/constants.ts deleted file mode 100644 index dbde2ab6efba9..0000000000000 --- a/packages/plugin-3d-generation/src/constants.ts +++ /dev/null @@ -1,4 +0,0 @@ -export const FAL_CONSTANTS = { - API_3D_ENDPOINT: "fal-ai/hyper3d/rodin", - API_KEY_SETTING: "FAL_API_KEY", // The setting name to fetch from runtime -}; diff --git a/packages/plugin-3d-generation/src/index.ts b/packages/plugin-3d-generation/src/index.ts deleted file mode 100644 index b6b516bee19a2..0000000000000 --- a/packages/plugin-3d-generation/src/index.ts +++ /dev/null @@ -1,199 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import type { - Action, - HandlerCallback, - IAgentRuntime, - Memory, - Plugin, - State, -} from "@elizaos/core"; -import { fal } from "@fal-ai/client"; -import { FAL_CONSTANTS } from "./constants"; - -import * as fs from "node:fs"; -import { Buffer } from "node:buffer"; -import * as path from "node:path"; -import * as process from "node:process"; - -const generate3D = async (prompt: string, runtime: IAgentRuntime) => { - process.env["FAL_KEY"] = - FAL_CONSTANTS.API_KEY_SETTING || runtime.getSetting("FAL_API_KEY"); - - try { - elizaLogger.log("Starting 3D generation with prompt:", prompt); - - const response = await fal.subscribe(FAL_CONSTANTS.API_3D_ENDPOINT, { - input: { - prompt: prompt, - input_image_urls: [], - condition_mode: "concat", // fuse concat - geometry_file_format: "glb", // glb usdz fbx obj stl - material: "PBR", // PBR Shaded - quality: "medium", // extra-low, low, medium, high - tier: "Regular", // Regular, Sketch - }, - logs: true, - onQueueUpdate: (update) => { - if (update.status === "IN_PROGRESS") { - update.logs - .map((log) => log.message) - .forEach(elizaLogger.log); - } - }, - }); - - elizaLogger.log( - "Generation request successful, received response:", - response - ); - - return { - success: true, - url: response.data.model_mesh.url, - file_name: response.data.model_mesh.file_name, - }; - } catch (error) { - elizaLogger.error("3D generation error:", error); - return { - success: false, - error: error.message || "Unknown error occurred", - }; - } -}; - -const ThreeDGeneration: Action = { - name: "GENERATE_3D", - similes: [ - "3D_GENERATION", - "3D_GEN", - "CREATE_3D", - "MAKE_3D", - "TEXT23D", - "TEXT_TO_3D", - "3D_CREATE", - "3D_MAKE", - ], - description: "Generate a 3D object based on a text prompt", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.log("Validating 3D generation action"); - const FalApiKey = runtime.getSetting("FAL_API_KEY"); - elizaLogger.log("FAL_API_KEY present:", !!FalApiKey); - return !!FalApiKey; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State, - _options: Record, - callback: HandlerCallback - ) => { - elizaLogger.log("3D generation request:", message); - - // Clean up the prompt by removing mentions and commands - const ThreeDPrompt = message.content.text - .replace(/<@\d+>/g, "") // Remove mentions - .replace(/generate 3D|create 3D|make 3D|render 3D/gi, "") // Remove commands - .trim(); - - if (!ThreeDPrompt || ThreeDPrompt.length < 3) { - callback({ - text: "Could you please provide more details about what kind of 3D object you'd like me to generate? For example: 'Generate a lovely cat'", - }); - return; - } - - elizaLogger.log("3D prompt:", ThreeDPrompt); - - callback({ - text: `I'll generate a 3D object based on your prompt: "${ThreeDPrompt}". This might take a few minutes...`, - }); - - try { - const result = await generate3D(ThreeDPrompt, runtime); - - if (result.success && result.url && result.file_name) { - // Download the 3D file - const response = await fetch(result.url); - const arrayBuffer = await response.arrayBuffer(); - const ThreeDFileName = `content_cache/generated_3d_${result.file_name}`; - - // ensure the directory is existed - const directoryPath = path.dirname(ThreeDFileName); - if (!fs.existsSync(directoryPath)) { - fs.mkdirSync(directoryPath, { recursive: true }); - } - - // Save 3D file - fs.writeFileSync(ThreeDFileName, Buffer.from(arrayBuffer)); - - callback( - { - text: "Here's your generated 3D object!", - attachments: [ - { - id: crypto.randomUUID(), - url: result.url, - title: "Generated 3D", - source: "ThreeDGeneration", - description: ThreeDPrompt, - text: ThreeDPrompt, - }, - ], - }, - [ThreeDFileName] - ); // Add the 3D file to the attachments - } else { - callback({ - text: `3D generation failed: ${result.error}`, - error: true, - }); - } - } catch (error) { - elizaLogger.error(`Failed to generate 3D. Error: ${error}`); - callback({ - text: `3D generation failed: ${error.message}`, - error: true, - }); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Generate a 3D object of a cat playing piano", - }, - }, - { - user: "{{agentName}}", - content: { - text: "I'll create a 3D object of a cat playing piano for you", - action: "GENERATE_3D", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Can you make a 3D object of a anime character Goku?", - }, - }, - { - user: "{{agentName}}", - content: { - text: "I'll generate a 3D object of a anime character Goku for you", - action: "GENERATE_3D", - }, - }, - ], - ], -} as Action; - -export const ThreeDGenerationPlugin: Plugin = { - name: "3DGeneration", - description: "Generate 3D using Hyper 3D", - actions: [ThreeDGeneration], - evaluators: [], - providers: [], -}; diff --git a/packages/plugin-3d-generation/tsconfig.json b/packages/plugin-3d-generation/tsconfig.json deleted file mode 100644 index d5b54aefd5fea..0000000000000 --- a/packages/plugin-3d-generation/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "module": "ESNext", - "moduleResolution": "Bundler", - "types": ["node"] - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-3d-generation/tsup.config.ts b/packages/plugin-3d-generation/tsup.config.ts deleted file mode 100644 index 8c78dce12bf69..0000000000000 --- a/packages/plugin-3d-generation/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: [ - "dotenv", - "fs", - "path", - "process", - "@reflink/reflink", - "@node-llama-cpp", - "@fal-ai/client", - "https", - "http", - "agentkeepalive", - ], -}); diff --git a/packages/plugin-3d-generation/vitest.config.ts b/packages/plugin-3d-generation/vitest.config.ts deleted file mode 100644 index adbf725538008..0000000000000 --- a/packages/plugin-3d-generation/vitest.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - }, -}); diff --git a/packages/plugin-abstract/README.md b/packages/plugin-abstract/README.md deleted file mode 100644 index 865ec94ead263..0000000000000 --- a/packages/plugin-abstract/README.md +++ /dev/null @@ -1,175 +0,0 @@ -# @elizaos/plugin-abstract - -A plugin for interacting with the Abstract blockchain network within the ElizaOS ecosystem. - -## Description - -The Abstract plugin enables seamless token transfers on the Abstract testnet. It provides functionality to transfer both native ETH and ERC20 tokens using secure wallet operations. - -## Installation - -```bash -pnpm install @elizaos/plugin-abstract -``` - -## Configuration - -The plugin requires the following environment variables to be set: - -```typescript -ABSTRACT_ADDRESS= -ABSTRACT_PRIVATE_KEY= -``` - -## Usage - -### Basic Integration - -```typescript -import { abstractPlugin } from "@elizaos/plugin-abstract"; -``` - -### Transfer Examples - -```typescript -// The plugin responds to natural language commands like: - -"Send 100 USDC to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62"; -"Transfer 0.1 ETH to 0xbD8679cf79137042214fA4239b02F4022208EE82"; -"Pay 50 USDC on Abstract to [address]"; -``` - -## API Reference - -### Actions - -#### SEND_TOKEN - -Transfers tokens from the agent's wallet to another address. - -**Aliases:** - -- TRANSFER_TOKEN_ON_ABSTRACT -- TRANSFER_TOKENS_ON_ABSTRACT -- SEND_TOKENS_ON_ABSTRACT -- SEND_ETH_ON_ABSTRACT -- PAY_ON_ABSTRACT -- MOVE_TOKENS_ON_ABSTRACT -- MOVE_ETH_ON_ABSTRACT - -## Common Issues & Troubleshooting - -1. **Transaction Failures** - - - Verify wallet has sufficient balance - - Check recipient address format - - Ensure private key is correctly set - - Verify network connectivity - -2. **Configuration Issues** - - Verify all required environment variables are set - - Ensure private key format is correct - - Check wallet address format - -## Security Best Practices - -1. **Private Key Management** - - Store private key securely using environment variables - - Never commit private keys to version control - - Use separate wallets for development and production - - Monitor wallet activity regularly - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run the plugin: - -```bash -pnpm run dev -``` - -## Future Enhancements - -1. **Smart Account Features** - - - Multi-signature support - - Account recovery mechanisms - - Batch transaction processing - - Advanced permission management - - Account abstraction improvements - - Social recovery options - -2. **DEX Integration** - - - Advanced swap routing - - Liquidity pool management - - Yield farming automation - - Price impact analysis - - Slippage protection - - AMM optimization - -3. **Security Enhancements** - - - Transaction simulation - - Risk assessment tools - - Rate limiting controls - - Fraud detection system - - Emergency shutdown features - - Audit integration tools - -4. **Developer Tools** - - - Enhanced debugging capabilities - - Documentation generator - - CLI tool improvements - - Testing utilities - - Deployment automation - - Performance profiling - -5. **Analytics and Monitoring** - - - Transaction tracking dashboard - - Network statistics - - Performance metrics - - Gas usage optimization - - Custom reporting tools - - Real-time monitoring - -6. **Wallet Management** - - Multiple wallet support - - Hardware wallet integration - - Address book features - - Transaction history analysis - - Balance monitoring - - Token management tools - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Abstract](https://abs.xyz/): Consumer blockchain -- [viem](https://viem.sh/): Typescript web3 client - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-abstract/__tests__/deployTokenAction.test.ts b/packages/plugin-abstract/__tests__/deployTokenAction.test.ts deleted file mode 100644 index 2f2d2c8b9aa68..0000000000000 --- a/packages/plugin-abstract/__tests__/deployTokenAction.test.ts +++ /dev/null @@ -1,443 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { deployTokenAction } from '../src/actions/deployTokenAction'; -import { ModelClass, generateObject } from '@elizaos/core'; -import { parseEther } from 'viem'; -import { abstractTestnet } from 'viem/chains'; -import { useGetWalletClient } from '../src/hooks'; -import { validateAbstractConfig } from '../src/environment'; -import { abstractPublicClient } from '../src/utils/viemHelpers'; -import { createAbstractClient } from '@abstract-foundation/agw-client'; - -// Mock dependencies -vi.mock('@elizaos/core', () => { - const actual = vi.importActual('@elizaos/core'); - return { - ...actual, - ModelClass: { - SMALL: 'small' - }, - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - success: vi.fn() - }, - composeContext: vi.fn().mockReturnValue('mocked-context'), - generateObject: vi.fn().mockResolvedValue({ - object: { - name: 'Test Token', - symbol: 'TEST', - initialSupply: '1000000', - useAGW: false - } - }), - stringToUuid: vi.fn().mockReturnValue('mocked-uuid') - }; -}); - -vi.mock('viem', () => ({ - parseEther: vi.fn().mockReturnValue(BigInt(1000000)) -})); - -vi.mock('@abstract-foundation/agw-client', () => ({ - createAbstractClient: vi.fn().mockResolvedValue({ - deployContract: vi.fn().mockResolvedValue('0xhash') - }) -})); - -vi.mock('../src/environment', () => ({ - validateAbstractConfig: vi.fn().mockResolvedValue(true) -})); - -vi.mock('../src/hooks', () => { - const deployContract = vi.fn(); - deployContract.mockResolvedValue('0xhash'); - return { - useGetAccount: vi.fn().mockReturnValue('0xaccount'), - useGetWalletClient: vi.fn().mockReturnValue({ - deployContract - }) - }; -}); - -vi.mock('../src/utils/viemHelpers', () => ({ - abstractPublicClient: { - waitForTransactionReceipt: vi.fn().mockResolvedValue({ - status: 'success', - contractAddress: '0xcontract' - }) - } -})); - -describe('deployTokenAction', () => { - const mockRuntime = { - agentId: 'test-agent', - composeState: vi.fn().mockResolvedValue({ - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Deploy a token named MyToken with symbol MTK and supply 1000000' } } - ], - currentMessage: 'Deploy a token named MyToken with symbol MTK and supply 1000000' - }), - updateRecentMessageState: vi.fn().mockImplementation((state) => ({ - ...state, - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Deploy a token named MyToken with symbol MTK and supply 1000000' } } - ], - currentMessage: 'Deploy a token named MyToken with symbol MTK and supply 1000000' - })), - messageManager: { - createMemory: vi.fn().mockResolvedValue(true) - } - }; - - const mockCallback = vi.fn(); - let mockDeployContract; - - beforeEach(() => { - vi.clearAllMocks(); - mockDeployContract = vi.mocked(useGetWalletClient()).deployContract; - }); - - describe('action properties', () => { - it('should have correct name and similes', () => { - expect(deployTokenAction.name).toBe('DEPLOY_TOKEN'); - expect(deployTokenAction.similes).toContain('CREATE_TOKEN'); - expect(deployTokenAction.similes).toContain('DEPLOY_NEW_TOKEN'); - expect(deployTokenAction.similes).toContain('CREATE_NEW_TOKEN'); - expect(deployTokenAction.similes).toContain('LAUNCH_TOKEN'); - }); - - it('should have a description', () => { - expect(deployTokenAction.description).toBe('Deploy a new ERC20 token contract'); - }); - }); - - describe('validation', () => { - it('should validate abstract config', async () => { - const result = await deployTokenAction.validate(mockRuntime); - expect(result).toBe(true); - }); - - it('should handle validation failure', async () => { - const mockValidateAbstractConfig = vi.mocked(validateAbstractConfig); - mockValidateAbstractConfig.mockRejectedValueOnce(new Error('Config validation failed')); - - await expect(deployTokenAction.validate(mockRuntime)).rejects.toThrow('Config validation failed'); - }); - }); - - describe('state management', () => { - it('should compose state if not provided', async () => { - await deployTokenAction.handler(mockRuntime, {}, undefined, {}, mockCallback); - expect(mockRuntime.composeState).toHaveBeenCalled(); - }); - - it('should update state if provided', async () => { - const mockState = { - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Deploy a token named MyToken with symbol MTK and supply 1000000' } } - ], - currentMessage: 'Deploy a token named MyToken with symbol MTK and supply 1000000' - }; - await deployTokenAction.handler(mockRuntime, {}, mockState, {}, mockCallback); - expect(mockRuntime.updateRecentMessageState).toHaveBeenCalledWith(mockState); - }); - }); - - describe('handler', () => { - it('should handle token deployment without AGW', async () => { - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(parseEther).toHaveBeenCalledWith('1000000'); - expect(mockDeployContract).toHaveBeenCalledWith({ - chain: abstractTestnet, - account: '0xaccount', - abi: expect.any(Array), - bytecode: expect.any(String), - args: ['Test Token', 'TEST', BigInt(1000000)], - kzg: undefined - }); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('deployed successfully'), - content: expect.objectContaining({ - contractAddress: '0xcontract', - tokenName: 'Test Token', - tokenSymbol: 'TEST', - hash: '0xhash' - }) - }); - expect(mockRuntime.messageManager.createMemory).toHaveBeenCalledWith({ - id: 'mocked-uuid', - userId: 'test-agent', - content: expect.objectContaining({ - text: expect.stringContaining('Token deployed'), - tokenAddress: '0xcontract', - name: 'Test Token', - symbol: 'TEST', - initialSupply: '1000000', - source: 'abstract_token_deployment' - }), - agentId: 'test-agent', - roomId: 'mocked-uuid', - createdAt: expect.any(Number) - }); - }); - - it('should handle token deployment with AGW', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - name: 'Test Token', - symbol: 'TEST', - initialSupply: '1000000', - useAGW: true - } - }); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(parseEther).toHaveBeenCalledWith('1000000'); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('deployed successfully'), - content: expect.objectContaining({ - contractAddress: '0xcontract', - tokenName: 'Test Token', - tokenSymbol: 'TEST', - hash: '0xhash' - }) - }); - }); - - describe('validation cases', () => { - it('should handle empty name', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - name: '', - symbol: 'TEST', - initialSupply: '1000000', - useAGW: false - } - }); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Unable to process'), - content: expect.objectContaining({ - error: 'Invalid deployment parameters' - }) - }); - }); - - it('should handle invalid symbol length', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - name: 'Test Token', - symbol: 'TOOLONG', - initialSupply: '1000000', - useAGW: false - } - }); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Unable to process'), - content: expect.objectContaining({ - error: 'Invalid deployment parameters' - }) - }); - }); - - it('should handle zero supply', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - name: 'Test Token', - symbol: 'TEST', - initialSupply: '0', - useAGW: false - } - }); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Unable to process'), - content: expect.objectContaining({ - error: 'Invalid deployment parameters' - }) - }); - }); - - it('should handle negative supply', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - name: 'Test Token', - symbol: 'TEST', - initialSupply: '-1000', - useAGW: false - } - }); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Unable to process'), - content: expect.objectContaining({ - error: 'Invalid deployment parameters' - }) - }); - }); - - it('should handle non-numeric supply', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - name: 'Test Token', - symbol: 'TEST', - initialSupply: 'not-a-number', - useAGW: false - } - }); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Unable to process'), - content: expect.objectContaining({ - error: 'Invalid deployment parameters' - }) - }); - }); - }); - - describe('error handling', () => { - it('should handle deployment errors', async () => { - mockDeployContract.mockRejectedValueOnce(new Error('Deployment failed')); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error deploying token: Deployment failed'), - content: expect.objectContaining({ - error: 'Deployment failed' - }) - }); - }); - - it('should handle transaction receipt errors', async () => { - const mockWaitForTransactionReceipt = vi.mocked(abstractPublicClient.waitForTransactionReceipt); - mockWaitForTransactionReceipt.mockRejectedValueOnce(new Error('Transaction failed')); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error deploying token: Transaction failed'), - content: expect.objectContaining({ - error: 'Transaction failed' - }) - }); - }); - - it('should handle AGW client creation errors', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - name: 'Test Token', - symbol: 'TEST', - initialSupply: '1000000', - useAGW: true - } - }); - - const mockCreateAbstractClient = vi.mocked(createAbstractClient); - mockCreateAbstractClient.mockRejectedValueOnce(new Error('AGW client creation failed')); - - const result = await deployTokenAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error deploying token: AGW client creation failed'), - content: expect.objectContaining({ - error: 'AGW client creation failed' - }) - }); - }); - }); - }); -}); diff --git a/packages/plugin-abstract/__tests__/getBalanceAction.test.ts b/packages/plugin-abstract/__tests__/getBalanceAction.test.ts deleted file mode 100644 index 30cc068e0e2cb..0000000000000 --- a/packages/plugin-abstract/__tests__/getBalanceAction.test.ts +++ /dev/null @@ -1,373 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { getBalanceAction } from '../src/actions/getBalanceAction'; -import { ModelClass, generateObject } from '@elizaos/core'; -import { formatUnits } from 'viem'; -import { ETH_ADDRESS } from '../src/constants'; -import { useGetAccount } from '../src/hooks'; -import { resolveAddress, getTokenByName, abstractPublicClient } from '../src/utils/viemHelpers'; -import { validateAbstractConfig } from '../src/environment'; - -// Mock dependencies -vi.mock('@elizaos/core', () => { - const actual = vi.importActual('@elizaos/core'); - return { - ...actual, - ModelClass: { - SMALL: 'small' - }, - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - success: vi.fn() - }, - composeContext: vi.fn().mockReturnValue('mocked-context'), - generateObject: vi.fn().mockResolvedValue({ - object: { - tokenAddress: '0xtoken', - walletAddress: '0xwallet', - tokenSymbol: 'TEST' - } - }), - stringToUuid: vi.fn().mockReturnValue('mocked-uuid') - }; -}); - -vi.mock('viem', () => ({ - formatUnits: vi.fn().mockReturnValue('1.0'), - isAddress: vi.fn().mockReturnValue(true), - erc20Abi: [ - { - name: 'balanceOf', - type: 'function', - inputs: [{ type: 'address' }], - outputs: [{ type: 'uint256' }] - }, - { - name: 'decimals', - type: 'function', - inputs: [], - outputs: [{ type: 'uint8' }] - }, - { - name: 'symbol', - type: 'function', - inputs: [], - outputs: [{ type: 'string' }] - } - ] -})); - -vi.mock('../src/environment', () => ({ - validateAbstractConfig: vi.fn().mockResolvedValue(true) -})); - -vi.mock('../src/hooks', () => ({ - useGetAccount: vi.fn().mockReturnValue({ - address: '0xaccount' - }) -})); - -vi.mock('../src/utils/viemHelpers', () => ({ - resolveAddress: vi.fn().mockResolvedValue('0xresolved'), - getTokenByName: vi.fn().mockReturnValue({ address: '0xtoken' }), - abstractPublicClient: { - getBalance: vi.fn().mockResolvedValue(BigInt(1000000000000000000)), - readContract: vi.fn().mockImplementation(async ({ functionName }) => { - switch (functionName) { - case 'balanceOf': - return BigInt(1000000); - case 'decimals': - return 18; - case 'symbol': - return 'TEST'; - default: - throw new Error('Unexpected function call'); - } - }) - } -})); - -describe('getBalanceAction', () => { - const mockRuntime = { - agentId: 'test-agent', - composeState: vi.fn().mockResolvedValue({ - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Check my ETH balance' } } - ], - currentMessage: 'Check my ETH balance' - }), - updateRecentMessageState: vi.fn().mockImplementation((state) => ({ - ...state, - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Check my ETH balance' } } - ], - currentMessage: 'Check my ETH balance' - })), - messageManager: { - createMemory: vi.fn().mockResolvedValue(true), - getMemoryById: vi.fn().mockResolvedValue({ - content: { - tokenAddress: '0xtoken' - } - }) - } - }; - - const mockCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('action properties', () => { - it('should have correct name and similes', () => { - expect(getBalanceAction.name).toBe('GET_BALANCE'); - expect(getBalanceAction.similes).toContain('CHECK_BALANCE'); - expect(getBalanceAction.similes).toContain('VIEW_BALANCE'); - expect(getBalanceAction.similes).toContain('SHOW_BALANCE'); - expect(getBalanceAction.similes).toContain('BALANCE_CHECK'); - expect(getBalanceAction.similes).toContain('TOKEN_BALANCE'); - }); - - it('should have a description', () => { - expect(getBalanceAction.description).toBe('Check token balance for a given address'); - }); - }); - - describe('validation', () => { - it('should validate abstract config', async () => { - const result = await getBalanceAction.validate(mockRuntime, {}); - expect(result).toBe(true); - }); - - it('should handle validation failure', async () => { - const mockValidateAbstractConfig = vi.mocked(validateAbstractConfig); - mockValidateAbstractConfig.mockRejectedValueOnce(new Error('Config validation failed')); - - await expect(getBalanceAction.validate(mockRuntime, {})).rejects.toThrow('Config validation failed'); - }); - }); - - describe('state management', () => { - it('should compose state if not provided', async () => { - await getBalanceAction.handler(mockRuntime, {}, undefined, {}, mockCallback); - expect(mockRuntime.composeState).toHaveBeenCalled(); - }); - - it('should update state if provided', async () => { - const mockState = { - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Check my ETH balance' } } - ], - currentMessage: 'Check my ETH balance' - }; - await getBalanceAction.handler(mockRuntime, {}, mockState, {}, mockCallback); - expect(mockRuntime.updateRecentMessageState).toHaveBeenCalledWith(mockState); - }); - }); - - describe('handler', () => { - describe('ETH balance checks', () => { - it('should handle ETH balance check with default address', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - walletAddress: null, - tokenSymbol: null - } - }); - - const result = await getBalanceAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(abstractPublicClient.getBalance).toHaveBeenCalledWith({ - address: '0xresolved' - }); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('1.0 ETH'), - content: expect.objectContaining({ - balance: '1.0', - symbol: 'ETH' - }) - }); - }); - - it('should handle ETH balance check with specific address', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - walletAddress: '0xspecific', - tokenSymbol: null - } - }); - - const result = await getBalanceAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(resolveAddress).toHaveBeenCalledWith('0xspecific'); - expect(abstractPublicClient.getBalance).toHaveBeenCalledWith({ - address: '0xresolved' - }); - }); - }); - - describe('token balance checks', () => { - it('should handle token balance check with address', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: '0xtoken', - walletAddress: null, - tokenSymbol: null - } - }); - - const result = await getBalanceAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(abstractPublicClient.readContract).toHaveBeenCalledWith( - expect.objectContaining({ - address: '0xtoken', - functionName: 'balanceOf', - args: ['0xresolved'] - }) - ); - }); - - it('should handle token balance check with symbol', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: null, - walletAddress: null, - tokenSymbol: 'TEST' - } - }); - - const result = await getBalanceAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(mockRuntime.messageManager.getMemoryById).toHaveBeenCalledWith('mocked-uuid'); - expect(abstractPublicClient.readContract).toHaveBeenCalledWith( - expect.objectContaining({ - address: '0xtoken', - functionName: 'balanceOf', - args: ['0xresolved'] - }) - ); - }); - }); - - describe('error handling', () => { - it('should handle invalid address', async () => { - const mockResolveAddress = vi.mocked(resolveAddress); - mockResolveAddress.mockResolvedValueOnce(null); - - const result = await getBalanceAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Invalid address or ENS name'), - content: expect.objectContaining({ - error: 'Invalid address or ENS name' - }) - }); - }); - - it('should handle balance check errors', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - walletAddress: '0xwallet', - tokenSymbol: null - } - }); - - const mockGetBalance = vi.mocked(abstractPublicClient.getBalance); - mockGetBalance.mockRejectedValueOnce(new Error('Balance check failed')); - - const result = await getBalanceAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error checking balance: Balance check failed'), - content: expect.objectContaining({ - error: 'Balance check failed' - }) - }); - }); - - it('should handle token contract errors', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: '0xtoken', - walletAddress: null, - tokenSymbol: null - } - }); - - const mockReadContract = vi.mocked(abstractPublicClient.readContract); - mockReadContract.mockRejectedValueOnce(new Error('Contract read failed')); - - const result = await getBalanceAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error checking balance: Contract read failed'), - content: expect.objectContaining({ - error: 'Contract read failed' - }) - }); - }); - }); - }); -}); diff --git a/packages/plugin-abstract/__tests__/transferAction.test.ts b/packages/plugin-abstract/__tests__/transferAction.test.ts deleted file mode 100644 index 02469b9f906f7..0000000000000 --- a/packages/plugin-abstract/__tests__/transferAction.test.ts +++ /dev/null @@ -1,498 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { transferAction } from '../src/actions/transferAction'; -import { ModelClass, generateObject } from '@elizaos/core'; -import { formatUnits, parseUnits, erc20Abi } from 'viem'; -import { ETH_ADDRESS } from '../src/constants'; -import { useGetAccount, useGetWalletClient } from '../src/hooks'; -import { resolveAddress, getTokenByName, abstractPublicClient } from '../src/utils/viemHelpers'; -import { createAbstractClient } from '@abstract-foundation/agw-client'; -import { validateAbstractConfig } from '../src/environment'; - -// Mock dependencies -vi.mock('@elizaos/core', () => { - const actual = vi.importActual('@elizaos/core'); - return { - ...actual, - ModelClass: { - SMALL: 'small' - }, - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - success: vi.fn() - }, - composeContext: vi.fn().mockReturnValue('mocked-context'), - generateObject: vi.fn().mockResolvedValue({ - object: { - tokenAddress: '0xtoken', - recipient: '0xrecipient', - amount: '1.0', - useAGW: false, - tokenSymbol: 'TEST' - } - }), - stringToUuid: vi.fn().mockReturnValue('mocked-uuid') - }; -}); - -vi.mock('viem', () => ({ - formatUnits: vi.fn().mockReturnValue('1.0'), - parseUnits: vi.fn().mockReturnValue(BigInt(1000000000000000000)), - isAddress: vi.fn().mockReturnValue(true), - erc20Abi: [ - { - name: 'transfer', - type: 'function', - inputs: [ - { type: 'address' }, - { type: 'uint256' } - ], - outputs: [{ type: 'bool' }] - }, - { - name: 'decimals', - type: 'function', - inputs: [], - outputs: [{ type: 'uint8' }] - }, - { - name: 'symbol', - type: 'function', - inputs: [], - outputs: [{ type: 'string' }] - } - ] -})); - -vi.mock('@abstract-foundation/agw-client', () => ({ - createAbstractClient: vi.fn().mockResolvedValue({ - sendTransaction: vi.fn().mockResolvedValue('0xhash'), - writeContract: vi.fn().mockResolvedValue('0xhash') - }) -})); - -vi.mock('../src/environment', () => ({ - validateAbstractConfig: vi.fn().mockResolvedValue(true) -})); - -vi.mock('../src/hooks', () => { - const writeContract = vi.fn().mockResolvedValue('0xhash'); - const sendTransaction = vi.fn().mockResolvedValue('0xhash'); - return { - useGetAccount: vi.fn().mockReturnValue({ - address: '0xaccount' - }), - useGetWalletClient: vi.fn().mockReturnValue({ - writeContract, - sendTransaction - }) - }; -}); - -vi.mock('../src/utils/viemHelpers', () => ({ - resolveAddress: vi.fn().mockResolvedValue('0xresolved'), - getTokenByName: vi.fn().mockReturnValue({ address: '0xtoken' }), - abstractPublicClient: { - readContract: vi.fn().mockImplementation(async ({ functionName }) => { - switch (functionName) { - case 'symbol': - return 'TEST'; - case 'decimals': - return 18; - default: - throw new Error('Unexpected function call'); - } - }) - } -})); - -describe('transferAction', () => { - const mockRuntime = { - agentId: 'test-agent', - composeState: vi.fn().mockResolvedValue({ - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Send 1 ETH to 0xrecipient' } } - ], - currentMessage: 'Send 1 ETH to 0xrecipient' - }), - updateRecentMessageState: vi.fn().mockImplementation((state) => ({ - ...state, - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Send 1 ETH to 0xrecipient' } } - ], - currentMessage: 'Send 1 ETH to 0xrecipient' - })), - messageManager: { - getMemoryById: vi.fn().mockResolvedValue({ - content: { - tokenAddress: '0xtoken' - } - }) - } - }; - - const mockCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('action properties', () => { - it('should have correct name and similes', () => { - expect(transferAction.name).toBe('SEND_TOKEN'); - expect(transferAction.similes).toContain('TRANSFER_TOKEN_ON_ABSTRACT'); - expect(transferAction.similes).toContain('TRANSFER_TOKENS_ON_ABSTRACT'); - expect(transferAction.similes).toContain('SEND_TOKENS_ON_ABSTRACT'); - expect(transferAction.similes).toContain('SEND_ETH_ON_ABSTRACT'); - expect(transferAction.similes).toContain('PAY_ON_ABSTRACT'); - expect(transferAction.similes).toContain('MOVE_TOKENS_ON_ABSTRACT'); - expect(transferAction.similes).toContain('MOVE_ETH_ON_ABSTRACT'); - }); - - it('should have a description', () => { - expect(transferAction.description).toBe("Transfer tokens from the agent's wallet to another address"); - }); - }); - - describe('validation', () => { - it('should validate abstract config', async () => { - const result = await transferAction.validate(mockRuntime); - expect(result).toBe(true); - }); - - it('should handle validation failure', async () => { - const mockValidateAbstractConfig = vi.mocked(validateAbstractConfig); - mockValidateAbstractConfig.mockRejectedValueOnce(new Error('Config validation failed')); - - await expect(transferAction.validate(mockRuntime)).rejects.toThrow('Config validation failed'); - }); - }); - - describe('state management', () => { - it('should compose state if not provided', async () => { - await transferAction.handler(mockRuntime, {}, undefined, {}, mockCallback); - expect(mockRuntime.composeState).toHaveBeenCalled(); - }); - - it('should update state if provided', async () => { - const mockState = { - recentMessagesData: [ - { content: { text: 'previous message' } }, - { content: { text: 'Send 1 ETH to 0xrecipient' } } - ], - currentMessage: 'Send 1 ETH to 0xrecipient' - }; - await transferAction.handler(mockRuntime, {}, mockState, {}, mockCallback); - expect(mockRuntime.updateRecentMessageState).toHaveBeenCalledWith(mockState); - }); - }); - - describe('handler', () => { - describe('ETH transfers', () => { - it('should handle ETH transfer without AGW', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - recipient: '0xrecipient', - amount: '1.0', - useAGW: false, - tokenSymbol: null - } - }); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - const walletClient = useGetWalletClient(); - expect(walletClient.sendTransaction).toHaveBeenCalledWith({ - account: expect.any(Object), - chain: expect.any(Object), - to: '0xresolved', - value: BigInt(1000000000000000000), - kzg: undefined - }); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('1.0 ETH'), - content: expect.objectContaining({ - hash: '0xhash', - tokenAmount: '1.0', - symbol: 'ETH', - recipient: '0xresolved', - useAGW: false - }) - }); - }); - - it('should handle ETH transfer with AGW', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - recipient: '0xrecipient', - amount: '1.0', - useAGW: true, - tokenSymbol: null - } - }); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - const mockAbstractClient = await createAbstractClient({}); - expect(mockAbstractClient.sendTransaction).toHaveBeenCalledWith({ - chain: expect.any(Object), - to: '0xresolved', - value: BigInt(1000000000000000000), - kzg: undefined - }); - }); - }); - - describe('token transfers', () => { - it('should handle token transfer without AGW', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: '0xtoken', - recipient: '0xrecipient', - amount: '1.0', - useAGW: false, - tokenSymbol: null - } - }); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - const walletClient = useGetWalletClient(); - expect(walletClient.writeContract).toHaveBeenCalledWith({ - account: expect.any(Object), - chain: expect.any(Object), - address: '0xtoken', - abi: expect.any(Array), - functionName: 'transfer', - args: ['0xresolved', BigInt(1000000000000000000)] - }); - }); - - it('should handle token transfer with AGW', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: '0xtoken', - recipient: '0xrecipient', - amount: '1.0', - useAGW: true, - tokenSymbol: null - } - }); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - const mockAbstractClient = await createAbstractClient({}); - expect(mockAbstractClient.writeContract).toHaveBeenCalledWith({ - chain: expect.any(Object), - address: '0xtoken', - abi: expect.any(Array), - functionName: 'transfer', - args: ['0xresolved', BigInt(1000000000000000000)] - }); - }); - - it('should handle token transfer by symbol', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: null, - recipient: '0xrecipient', - amount: '1.0', - useAGW: false, - tokenSymbol: 'TEST' - } - }); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(mockRuntime.messageManager.getMemoryById).toHaveBeenCalledWith('mocked-uuid'); - const walletClient = useGetWalletClient(); - expect(walletClient.writeContract).toHaveBeenCalledWith({ - account: expect.any(Object), - chain: expect.any(Object), - address: '0xtoken', - abi: expect.any(Array), - functionName: 'transfer', - args: ['0xresolved', BigInt(1000000000000000000)] - }); - }); - }); - - describe('error handling', () => { - it('should handle invalid recipient address', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - recipient: '0xinvalid', - amount: '1.0', - useAGW: false, - tokenSymbol: null - } - }); - - const mockResolveAddress = vi.mocked(resolveAddress); - mockResolveAddress.mockResolvedValueOnce(null); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Unable to process transfer request. Did not extract valid parameters.', - content: expect.objectContaining({ - error: expect.stringContaining('Expected string, received null'), - recipient: null, - tokenAddress: ETH_ADDRESS, - useAGW: false, - amount: '1.0' - }) - }); - }); - - it('should handle transfer errors without AGW', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - recipient: '0xrecipient', - amount: '1.0', - useAGW: false, - tokenSymbol: null - } - }); - - const walletClient = useGetWalletClient(); - vi.mocked(walletClient.sendTransaction).mockRejectedValueOnce(new Error('Transfer failed')); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error transferring tokens: Transfer failed'), - content: expect.objectContaining({ - error: 'Transfer failed' - }) - }); - }); - - it('should handle transfer errors with AGW', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: ETH_ADDRESS, - recipient: '0xrecipient', - amount: '1.0', - useAGW: true, - tokenSymbol: null - } - }); - - const mockCreateAbstractClient = vi.mocked(createAbstractClient); - mockCreateAbstractClient.mockRejectedValueOnce(new Error('AGW client creation failed')); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error transferring tokens: AGW client creation failed'), - content: expect.objectContaining({ - error: 'AGW client creation failed' - }) - }); - }); - - it('should handle token contract errors', async () => { - const mockGenerateObject = vi.mocked(generateObject); - mockGenerateObject.mockResolvedValueOnce({ - object: { - tokenAddress: '0xtoken', - recipient: '0xrecipient', - amount: '1.0', - useAGW: false, - tokenSymbol: null - } - }); - - const mockReadContract = vi.mocked(abstractPublicClient.readContract); - mockReadContract.mockRejectedValueOnce(new Error('Contract read failed')); - - const result = await transferAction.handler( - mockRuntime, - {}, - undefined, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Error transferring tokens: Contract read failed'), - content: expect.objectContaining({ - error: 'Contract read failed' - }) - }); - }); - }); - }); -}); diff --git a/packages/plugin-abstract/biome.json b/packages/plugin-abstract/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-abstract/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-abstract/package.json b/packages/plugin-abstract/package.json deleted file mode 100644 index 7c97d1c7de46b..0000000000000 --- a/packages/plugin-abstract/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "@elizaos/plugin-abstract", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@abstract-foundation/agw-client": "1.0.1", - "@elizaos/core": "workspace:*", - "tsup": "^8.3.5", - "viem": "2.22.2" - }, - "scripts": { - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "build": "tsup --format esm --no-dts", - "dev": "tsup --format esm --no-dts --watch", - "test": "vitest run", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "tsup": "8.3.5", - "typescript": "4.9", - "vitest": "^3.0.0" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-abstract/src/actions/deployTokenAction.ts b/packages/plugin-abstract/src/actions/deployTokenAction.ts deleted file mode 100644 index 83c30946a2620..0000000000000 --- a/packages/plugin-abstract/src/actions/deployTokenAction.ts +++ /dev/null @@ -1,269 +0,0 @@ -import type { Action } from "@elizaos/core"; -import { - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - elizaLogger, - composeContext, - generateObject, - stringToUuid, -} from "@elizaos/core"; -import { validateAbstractConfig } from "../environment"; -import { parseEther, type Hash } from "viem"; -import { abstractTestnet } from "viem/chains"; -import { - type AbstractClient, - createAbstractClient, -} from "@abstract-foundation/agw-client"; -import { z } from "zod"; -import { useGetAccount, useGetWalletClient } from "../hooks"; -import basicToken from "../constants/contracts/basicToken.json"; -import { abstractPublicClient } from "../utils/viemHelpers"; - -const DeploySchema = z.object({ - name: z.string(), - symbol: z.string(), - initialSupply: z.string(), - useAGW: z.boolean(), -}); - -const validatedSchema = z.object({ - name: z.string().min(1, "Name is required"), - symbol: z - .string() - .min(1, "Symbol is required") - .max(5, "Symbol must be 5 characters or less"), - initialSupply: z - .string() - .refine((val) => !Number.isNaN(Number(val)) && Number(val) > 0, { - message: "Initial supply must be a positive number", - }), - useAGW: z.boolean(), -}); - -export interface DeployContent extends Content { - name: string; - symbol: string; - initialSupply: string; - useAGW: boolean; -} - -const deployTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - -Example response: -\`\`\`json -{ - "name": "My Token", - "symbol": "MTK", - "initialSupply": "1000000", - "useAGW": true -} -\`\`\` - -User message: -"{{currentMessage}}" - -Given the message, extract the following information about the requested token deployment: -- Token name -- Token symbol (usually 3-4 characters) -- Initial supply amount -- Whether to use Abstract Global Wallet aka AGW - -If the user did not specify "global wallet", "AGW", "agw", or "abstract global wallet" in their message, set useAGW to false, otherwise set it to true. - -Respond with a JSON markdown block containing only the extracted values.`; - -export const deployTokenAction: Action = { - name: "DEPLOY_TOKEN", - similes: [ - "CREATE_TOKEN", - "DEPLOY_NEW_TOKEN", - "CREATE_NEW_TOKEN", - "LAUNCH_TOKEN", - ], - validate: async (runtime: IAgentRuntime) => { - await validateAbstractConfig(runtime); - return true; - }, - description: "Deploy a new ERC20 token contract", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback, - ): Promise => { - elizaLogger.log("Starting Abstract DEPLOY_TOKEN handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - currentState.currentMessage = `${currentState.recentMessagesData[1].content.text}`; - const deployContext = composeContext({ - state: currentState, - template: deployTemplate, - }); - - const content = ( - await generateObject({ - runtime, - context: deployContext, - modelClass: ModelClass.SMALL, - schema: DeploySchema, - }) - ).object as DeployContent; - - // Validate deployment content - const result = validatedSchema.safeParse(content); - if (!result.success) { - elizaLogger.error("Invalid content for DEPLOY_TOKEN action.", { - errors: result.error.errors, - }); - if (callback) { - callback({ - text: "Unable to process token deployment request. Invalid parameters provided.", - content: { error: "Invalid deployment parameters" }, - }); - } - return false; - } - - try { - const account = useGetAccount(runtime); - const supply = parseEther(content.initialSupply); - let hash: Hash; - - if (content.useAGW) { - const abstractClient = (await createAbstractClient({ - chain: abstractTestnet, - signer: account, - })) as any; // type being exported as never - - hash = await abstractClient.deployContract({ - abi: basicToken.abi, - bytecode: basicToken.bytecode, - args: [result.data.name, result.data.symbol, supply], - }); - } else { - const walletClient = useGetWalletClient(); - - hash = await walletClient.deployContract({ - chain: abstractTestnet, - account, - abi: basicToken.abi, - bytecode: basicToken.bytecode, - args: [result.data.name, result.data.symbol, supply], - kzg: undefined, - }); - } - - // Wait for transaction receipt - const receipt = await abstractPublicClient.waitForTransactionReceipt({ - hash, - }); - const contractAddress = receipt.contractAddress; - - elizaLogger.success( - `Token deployment completed! Contract address: ${contractAddress}. Transaction hash: ${hash}`, - ); - if (callback) { - callback({ - text: `Token "${result.data.name}" (${result.data.symbol}) deployed successfully! Contract address: ${contractAddress} and transaction hash: ${hash}`, - content: { - hash, - tokenName: result.data.name, - tokenSymbol: result.data.symbol, - contractAddress, - transactionHash: hash, - }, - }); - } - - const metadata = { - tokenAddress: contractAddress, - name: result.data.name, - symbol: result.data.symbol, - initialSupply: String(result.data.initialSupply), - }; - - await runtime.messageManager.createMemory({ - id: stringToUuid(`${result.data.symbol}-${runtime.agentId}`), - userId: runtime.agentId, - content: { - text: `Token deployed: ${result.data.name}, symbol: ${result.data.symbol} and contract address: ${contractAddress}`, - ...metadata, - source: "abstract_token_deployment", - }, - agentId: runtime.agentId, - roomId: stringToUuid(`tokens-${runtime.agentId}`), - createdAt: Date.now(), - }); - elizaLogger.success("memory saved for token deployment", metadata); - - return true; - } catch (error) { - elizaLogger.error("Error during token deployment:", error); - if (callback) { - callback({ - text: `Error deploying token: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Deploy a new token called MyToken with symbol MTK and initial supply of 1000000", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll deploy your new token now.", - action: "DEPLOY_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully deployed MyToken (MTK) with 1000000 initial supply.\nContract address: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b\nTransaction hash: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Create a new token using AGW with name TestCoin, symbol TEST, and 5000 supply", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll deploy your token using the Abstract Global Wallet.", - action: "DEPLOY_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully deployed TestCoin (TEST) with 5000 initial supply using AGW.\nContract address: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b\nTransaction: 0x4fed598033f0added272c3ddefd4d83a521634a738474400b27378db462a76ec", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-abstract/src/actions/getBalanceAction.ts b/packages/plugin-abstract/src/actions/getBalanceAction.ts deleted file mode 100644 index 602fe37567684..0000000000000 --- a/packages/plugin-abstract/src/actions/getBalanceAction.ts +++ /dev/null @@ -1,281 +0,0 @@ -import type { Action } from "@elizaos/core"; -import { - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - elizaLogger, - composeContext, - generateObject, - stringToUuid, -} from "@elizaos/core"; -import { validateAbstractConfig } from "../environment"; - -import { erc20Abi, formatUnits, isAddress } from "viem"; -import { z } from "zod"; -import { ETH_ADDRESS } from "../constants"; -import { useGetAccount } from "../hooks"; -import { - resolveAddress, - getTokenByName, - abstractPublicClient, -} from "../utils/viemHelpers"; - -const BalanceSchema = z.object({ - tokenAddress: z.string().optional().nullable(), - walletAddress: z.string().optional().nullable(), - tokenSymbol: z.string().optional().nullable(), -}); - -export interface BalanceContent extends Content { - tokenAddress?: string; - walletAddress?: string; - tokenSymbol?: string; -} - -const validatedSchema = z.object({ - tokenAddress: z - .string() - .refine(isAddress, { message: "Invalid token address" }), - walletAddress: z - .string() - .refine(isAddress, { message: "Invalid token address" }), -}); - -const balanceTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - - -Example response: -\`\`\`json -{ - "tokenAddress": "", - "walletAddress": "", - "tokenSymbol": "USDC" -} -\`\`\` - -User message: -"{{currentMessage}}" - -Given the message, extract the following information about the requested balance check: -- Token contract address (optional, if not specified set to null) -- Wallet address to check (optional, if not specified set to null) -- The symbol of the token to check (optional, if not specified set to null). Between 1 to 6 characters usually. - -Respond with a JSON markdown block containing only the extracted values.`; - -export const getBalanceAction: Action = { - name: "GET_BALANCE", - similes: [ - "CHECK_BALANCE", - "VIEW_BALANCE", - "SHOW_BALANCE", - "BALANCE_CHECK", - "TOKEN_BALANCE", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateAbstractConfig(runtime); - return true; - }, - description: "Check token balance for a given address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback, - ): Promise => { - elizaLogger.log("Starting Abstract GET_BALANCE handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose balance context - currentState.currentMessage = `${currentState.recentMessagesData[1].content.text}`; - const balanceContext = composeContext({ - state: currentState, - template: balanceTemplate, - }); - - // Generate balance content - const content = ( - await generateObject({ - runtime, - context: balanceContext, - modelClass: ModelClass.SMALL, - schema: BalanceSchema, - }) - ).object as BalanceContent; - - try { - const account = useGetAccount(runtime); - const addressToCheck = content.walletAddress || account.address; - - // Resolve address - const resolvedAddress = await resolveAddress(addressToCheck); - if (!resolvedAddress) { - throw new Error("Invalid address or ENS name"); - } - - let tokenAddress = content.tokenAddress; - - if (content.tokenSymbol) { - const tokenMemory = await runtime.messageManager.getMemoryById( - stringToUuid(`${content.tokenSymbol}-${runtime.agentId}`), - ); - - if (typeof tokenMemory?.content?.tokenAddress === "string") { - tokenAddress = tokenMemory.content.tokenAddress; - } - - if (!tokenAddress) { - tokenAddress = getTokenByName(content.tokenSymbol)?.address; - } - } - - const result = validatedSchema.safeParse({ - tokenAddress: tokenAddress || ETH_ADDRESS, - walletAddress: resolvedAddress, - }); - - // Validate transfer content - if (!result.success) { - elizaLogger.error("Invalid content for GET_BALANCE action."); - if (callback) { - callback({ - text: "Unable to process balance request. Invalid content provided.", - content: { error: "Invalid balance content" }, - }); - } - return false; - } - - let balance: bigint; - let symbol: string; - let decimals: number; - - // Query balance based on token type - if (result.data.tokenAddress === ETH_ADDRESS) { - balance = await abstractPublicClient.getBalance({ - address: resolvedAddress, - }); - symbol = "ETH"; - decimals = 18; - } else { - [balance, decimals, symbol] = await Promise.all([ - abstractPublicClient.readContract({ - address: result.data.tokenAddress, - abi: erc20Abi, - functionName: "balanceOf", - args: [resolvedAddress], - }), - abstractPublicClient.readContract({ - address: result.data.tokenAddress, - abi: erc20Abi, - functionName: "decimals", - }), - abstractPublicClient.readContract({ - address: result.data.tokenAddress, - abi: erc20Abi, - functionName: "symbol", - }), - ]); - } - - const formattedBalance = formatUnits(balance, decimals); - - elizaLogger.success(`Balance check completed for ${resolvedAddress}`); - if (callback) { - callback({ - text: `Balance for ${resolvedAddress}: ${formattedBalance} ${symbol}`, - content: { balance: formattedBalance, symbol: symbol }, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error checking balance:", error); - if (callback) { - callback({ - text: `Error checking balance: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What's my ETH balance?", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me check your ETH balance.", - action: "GET_BALANCE", - }, - }, - { - user: "{{agent}}", - content: { - text: "Your ETH balance is 1.5 ETH", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Check USDC balance for 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the USDC balance for that address.", - action: "GET_BALANCE", - }, - }, - { - user: "{{agent}}", - content: { - text: "The USDC balance for 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62 is 100 USDC", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Check balance for 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62 with token 0xe4c7fbb0a626ed208021ccaba6be1566905e2dfc", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me check the balance for that address.", - action: "GET_BALANCE", - }, - }, - { - user: "{{agent}}", - content: { - text: "The balance for 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62 with token 0xe4c7fbb0a626ed208021ccaba6be1566905e2dfc is 100", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-abstract/src/actions/index.ts b/packages/plugin-abstract/src/actions/index.ts deleted file mode 100644 index 90317a57850a3..0000000000000 --- a/packages/plugin-abstract/src/actions/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from "./transferAction"; -export * from "./getBalanceAction"; -export * from "./deployTokenAction"; diff --git a/packages/plugin-abstract/src/actions/transferAction.ts b/packages/plugin-abstract/src/actions/transferAction.ts deleted file mode 100644 index 4cc3f996cc32b..0000000000000 --- a/packages/plugin-abstract/src/actions/transferAction.ts +++ /dev/null @@ -1,426 +0,0 @@ -import type { Action } from "@elizaos/core"; -import { - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - elizaLogger, - composeContext, - generateObject, - stringToUuid, -} from "@elizaos/core"; -import { validateAbstractConfig } from "../environment"; - -import { erc20Abi, formatUnits, isAddress, parseUnits, type Hash } from "viem"; -import { abstractTestnet } from "viem/chains"; -import { createAbstractClient } from "@abstract-foundation/agw-client"; -import { z } from "zod"; -import { ETH_ADDRESS } from "../constants"; -import { useGetAccount, useGetWalletClient } from "../hooks"; -import { - resolveAddress, - abstractPublicClient, - getTokenByName, -} from "../utils/viemHelpers"; - -// Define types for Abstract client -interface AbstractTransactionRequest { - chain: typeof abstractTestnet; - to: string; - value: bigint; - kzg: undefined; -} - -interface AbstractContractRequest { - chain: typeof abstractTestnet; - address: string; - abi: typeof erc20Abi; - functionName: string; - args: [string, bigint]; -} - -interface AbstractClient { - sendTransaction: (request: AbstractTransactionRequest) => Promise; - writeContract: (request: AbstractContractRequest) => Promise; -} - -const TransferSchema = z.object({ - tokenAddress: z.string().optional().nullable(), - recipient: z.string(), - amount: z.string(), - useAGW: z.boolean(), - tokenSymbol: z.string().optional().nullable(), -}); - -const validatedTransferSchema = z.object({ - tokenAddress: z - .string() - .refine(isAddress, { message: "Invalid token address" }), - recipient: z - .string() - .refine(isAddress, { message: "Invalid recipient address" }), - amount: z.string(), - useAGW: z.boolean(), -}); - -export interface TransferContent extends Content { - tokenAddress: string; - recipient: string; - amount: string | number; - useAGW: boolean; - tokenSymbol?: string; -} - -const transferTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - -Example response: -\`\`\`json -{ - "tokenAddress": "", - "recipient": "", - "amount": "1000", - "useAGW": true, - "tokenSymbol": "USDC" -} -\`\`\` - -User message: -"{{currentMessage}}" - -Given the message, extract the following information about the requested token transfer: -- Token contract address -- Recipient wallet address -- Amount to transfer -- Whether to use Abstract Global Wallet aka AGW -- The symbol of the token that wants to be transferred. Between 1 to 6 characters usually. - -If the user did not specify "global wallet", "AGW", "agw", or "abstract global wallet" in their message, set useAGW to false, otherwise set it to true. -s -Respond with a JSON markdown block containing only the extracted values.`; - -export const transferAction: Action = { - - name: "SEND_TOKEN", - similes: [ - "TRANSFER_TOKEN_ON_ABSTRACT", - "TRANSFER_TOKENS_ON_ABSTRACT", - "SEND_TOKENS_ON_ABSTRACT", - "SEND_ETH_ON_ABSTRACT", - "PAY_ON_ABSTRACT", - "MOVE_TOKENS_ON_ABSTRACT", - "MOVE_ETH_ON_ABSTRACT", - ], - // eslint-disable-next-line - validate: async (runtime: IAgentRuntime) => { - await validateAbstractConfig(runtime); - return true; - }, - description: "Transfer tokens from the agent's wallet to another address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback, - ): Promise => { - elizaLogger.log("Starting Abstract SEND_TOKEN handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose transfer context - currentState.currentMessage = `${currentState.recentMessagesData[1].content.text}`; - const transferContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - - // Generate transfer content - const content = ( - await generateObject({ - runtime, - context: transferContext, - modelClass: ModelClass.SMALL, - schema: TransferSchema, - }) - ).object as TransferContent; - - let tokenAddress = content.tokenAddress; - - if (content.tokenSymbol) { - const tokenMemory = await runtime.messageManager.getMemoryById( - stringToUuid(`${content.tokenSymbol}-${runtime.agentId}`), - ); - - if (typeof tokenMemory?.content?.tokenAddress === "string") { - tokenAddress = tokenMemory.content.tokenAddress; - } - - if (!tokenAddress) { - tokenAddress = getTokenByName(content.tokenSymbol)?.address; - } - } - - const resolvedRecipient = await resolveAddress(content.recipient); - - const input = { - tokenAddress: tokenAddress, - recipient: resolvedRecipient, - amount: content.amount.toString(), - useAGW: content.useAGW, - }; - const result = validatedTransferSchema.safeParse(input); - - if (!result.success) { - elizaLogger.error( - "Invalid content for TRANSFER_TOKEN action.", - result.error.message, - ); - if (callback) { - callback({ - text: "Unable to process transfer request. Did not extract valid parameters.", - content: { error: result.error.message, ...input }, - }); - } - return false; - } - - if (!resolvedRecipient) { - throw new Error("Invalid recipient address or ENS name"); - } - - try { - const account = useGetAccount(runtime); - - let symbol = "ETH"; - let decimals = 18; - const isEthTransfer = result.data.tokenAddress === ETH_ADDRESS; - const { tokenAddress, recipient, amount, useAGW } = result.data; - - if (!isEthTransfer) { - [symbol, decimals] = await Promise.all([ - abstractPublicClient.readContract({ - address: tokenAddress, - abi: erc20Abi, - functionName: "symbol", - }), - abstractPublicClient.readContract({ - address: tokenAddress, - abi: erc20Abi, - functionName: "decimals", - }), - ]); - } - let hash: Hash; - const tokenAmount = parseUnits(amount.toString(), decimals); - - if (useAGW) { - const abstractClient = (await createAbstractClient({ - chain: abstractTestnet, - signer: account, - })) as AbstractClient; - - if (isEthTransfer) { - hash = await abstractClient.sendTransaction({ - chain: abstractTestnet, - to: recipient, - value: tokenAmount, - kzg: undefined, - }); - } else { - hash = await abstractClient.writeContract({ - chain: abstractTestnet, - address: tokenAddress, - abi: erc20Abi, - functionName: "transfer", - args: [recipient, tokenAmount], - }); - } - } else { - const walletClient = useGetWalletClient(); - if (isEthTransfer) { - hash = await walletClient.sendTransaction({ - account, - chain: abstractTestnet, - to: recipient, - value: tokenAmount, - kzg: undefined, - }); - } else { - hash = await walletClient.writeContract({ - account, - chain: abstractTestnet, - address: tokenAddress, - abi: erc20Abi, - functionName: "transfer", - args: [recipient, tokenAmount], - }); - } - } - - elizaLogger.success( - `Transfer completed successfully! Transaction hash: ${hash}`, - ); - if (callback) { - callback({ - text: `Transfer completed successfully! Succesfully sent ${formatUnits(tokenAmount, decimals)} ${symbol} to ${recipient} using ${useAGW ? "AGW" : "wallet client"}. Transaction hash: ${hash}`, - content: { - hash, - tokenAmount: formatUnits(tokenAmount, decimals), - symbol, - recipient, - useAGW, - }, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error during token transfer:", error); - if (callback) { - callback({ - text: `Error transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 0.01 ETH to 0x114B242D931B47D5cDcEe7AF065856f70ee278C4", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 0.01 ETH to that address now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 0.01 ETH to 0x114B242D931B47D5cDcEe7AF065856f70ee278C4\nTransaction: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send 0.01 ETH to 0x114B242D931B47D5cDcEe7AF065856f70ee278C4 using your abstract global wallet", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 0.01 ETH to that address now using my AGW.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 0.01 ETH to 0x114B242D931B47D5cDcEe7AF065856f70ee278C4\nTransaction: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b using my AGW", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send 0.01 ETH to alim.getclave.eth", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 0.01 ETH to alim.getclave.eth now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 0.01 ETH to alim.getclave.eth\nTransaction: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send 100 USDC to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 100 USDC to that address now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 100 USDC to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62\nTransaction: 0x4fed598033f0added272c3ddefd4d83a521634a738474400b27378db462a76ec", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Please send 0.1 ETH to 0xbD8679cf79137042214fA4239b02F4022208EE82", - }, - }, - { - user: "{{agent}}", - content: { - text: "Of course. Sending 0.1 ETH to that address now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 0.1 ETH to 0xbD8679cf79137042214fA4239b02F4022208EE82\nTransaction: 0x0b9f23e69ea91ba98926744472717960cc7018d35bc3165bdba6ae41670da0f0", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Please send 1 MyToken to 0xbD8679cf79137042214fA4239b02F4022208EE82", - }, - }, - { - user: "{{agent}}", - content: { - text: "Of course. Sending 1 MyToken right away.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 1 MyToken to 0xbD8679cf79137042214fA4239b02F4022208EE82\nTransaction: 0x0b9f23e69ea91ba98926744472717960cc7018d35bc3165bdba6ae41670da0f0", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-abstract/src/constants/contracts/basicToken.json b/packages/plugin-abstract/src/constants/contracts/basicToken.json deleted file mode 100644 index c36c2b8982d6d..0000000000000 --- a/packages/plugin-abstract/src/constants/contracts/basicToken.json +++ /dev/null @@ -1,339 +0,0 @@ -{ - "_format": "hh-sol-artifact-1", - "contractName": "BasicToken", - "sourceName": "contracts/BasicToken.sol", - "abi": [ - { - "inputs": [ - { - "internalType": "string", - "name": "name", - "type": "string" - }, - { - "internalType": "string", - "name": "symbol", - "type": "string" - }, - { - "internalType": "uint256", - "name": "initialSupply", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "internalType": "uint256", - "name": "allowance", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "needed", - "type": "uint256" - } - ], - "name": "ERC20InsufficientAllowance", - "type": "error" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "internalType": "uint256", - "name": "balance", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "needed", - "type": "uint256" - } - ], - "name": "ERC20InsufficientBalance", - "type": "error" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "approver", - "type": "address" - } - ], - "name": "ERC20InvalidApprover", - "type": "error" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "receiver", - "type": "address" - } - ], - "name": "ERC20InvalidReceiver", - "type": "error" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "sender", - "type": "address" - } - ], - "name": "ERC20InvalidSender", - "type": "error" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "spender", - "type": "address" - } - ], - "name": "ERC20InvalidSpender", - "type": "error" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "owner", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "Approval", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "Transfer", - "type": "event" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "owner", - "type": "address" - }, - { - "internalType": "address", - "name": "spender", - "type": "address" - } - ], - "name": "allowance", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "approve", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - } - ], - "name": "balanceOf", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "decimals", - "outputs": [ - { - "internalType": "uint8", - "name": "", - "type": "uint8" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "name", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "symbol", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "transfer", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "transferFrom", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - } - ], - "bytecode": "0x608060405234801561001057600080fd5b506040516117bf3803806117bf833981810160405281019061003291906104c6565b828281600390816100439190610768565b5080600490816100539190610768565b505050610066338261006e60201b60201c565b50505061095a565b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16036100e05760006040517fec442f050000000000000000000000000000000000000000000000000000000081526004016100d7919061087b565b60405180910390fd5b6100f2600083836100f660201b60201c565b5050565b600073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff160361014857806002600082825461013c91906108c5565b9250508190555061021b565b60008060008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020549050818110156101d4578381836040517fe450d38c0000000000000000000000000000000000000000000000000000000081526004016101cb93929190610908565b60405180910390fd5b8181036000808673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002081905550505b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff160361026457806002600082825403925050819055506102b1565b806000808473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825401925050819055505b8173ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef8360405161030e919061093f565b60405180910390a3505050565b6000604051905090565b600080fd5b600080fd5b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b61038282610339565b810181811067ffffffffffffffff821117156103a1576103a061034a565b5b80604052505050565b60006103b461031b565b90506103c08282610379565b919050565b600067ffffffffffffffff8211156103e0576103df61034a565b5b6103e982610339565b9050602081019050919050565b60005b838110156104145780820151818401526020810190506103f9565b60008484015250505050565b600061043361042e846103c5565b6103aa565b90508281526020810184848401111561044f5761044e610334565b5b61045a8482856103f6565b509392505050565b600082601f8301126104775761047661032f565b5b8151610487848260208601610420565b91505092915050565b6000819050919050565b6104a381610490565b81146104ae57600080fd5b50565b6000815190506104c08161049a565b92915050565b6000806000606084860312156104df576104de610325565b5b600084015167ffffffffffffffff8111156104fd576104fc61032a565b5b61050986828701610462565b935050602084015167ffffffffffffffff81111561052a5761052961032a565b5b61053686828701610462565b9250506040610547868287016104b1565b9150509250925092565b600081519050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b600060028204905060018216806105a357607f821691505b6020821081036105b6576105b561055c565b5b50919050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b60006008830261061e7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff826105e1565b61062886836105e1565b95508019841693508086168417925050509392505050565b6000819050919050565b600061066561066061065b84610490565b610640565b610490565b9050919050565b6000819050919050565b61067f8361064a565b61069361068b8261066c565b8484546105ee565b825550505050565b600090565b6106a861069b565b6106b3818484610676565b505050565b5b818110156106d7576106cc6000826106a0565b6001810190506106b9565b5050565b601f82111561071c576106ed816105bc565b6106f6846105d1565b81016020851015610705578190505b610719610711856105d1565b8301826106b8565b50505b505050565b600082821c905092915050565b600061073f60001984600802610721565b1980831691505092915050565b6000610758838361072e565b9150826002028217905092915050565b61077182610551565b67ffffffffffffffff81111561078a5761078961034a565b5b610794825461058b565b61079f8282856106db565b600060209050601f8311600181146107d257600084156107c0578287015190505b6107ca858261074c565b865550610832565b601f1984166107e0866105bc565b60005b82811015610808578489015182556001820191506020850194506020810190506107e3565b868310156108255784890151610821601f89168261072e565b8355505b6001600288020188555050505b505050505050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006108658261083a565b9050919050565b6108758161085a565b82525050565b6000602082019050610890600083018461086c565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b60006108d082610490565b91506108db83610490565b92508282019050808211156108f3576108f2610896565b5b92915050565b61090281610490565b82525050565b600060608201905061091d600083018661086c565b61092a60208301856108f9565b61093760408301846108f9565b949350505050565b600060208201905061095460008301846108f9565b92915050565b610e56806109696000396000f3fe608060405234801561001057600080fd5b50600436106100935760003560e01c8063313ce56711610066578063313ce5671461013457806370a082311461015257806395d89b4114610182578063a9059cbb146101a0578063dd62ed3e146101d057610093565b806306fdde0314610098578063095ea7b3146100b657806318160ddd146100e657806323b872dd14610104575b600080fd5b6100a0610200565b6040516100ad9190610aaa565b60405180910390f35b6100d060048036038101906100cb9190610b65565b610292565b6040516100dd9190610bc0565b60405180910390f35b6100ee6102b5565b6040516100fb9190610bea565b60405180910390f35b61011e60048036038101906101199190610c05565b6102bf565b60405161012b9190610bc0565b60405180910390f35b61013c6102ee565b6040516101499190610c74565b60405180910390f35b61016c60048036038101906101679190610c8f565b6102f7565b6040516101799190610bea565b60405180910390f35b61018a61033f565b6040516101979190610aaa565b60405180910390f35b6101ba60048036038101906101b59190610b65565b6103d1565b6040516101c79190610bc0565b60405180910390f35b6101ea60048036038101906101e59190610cbc565b6103f4565b6040516101f79190610bea565b60405180910390f35b60606003805461020f90610d2b565b80601f016020809104026020016040519081016040528092919081815260200182805461023b90610d2b565b80156102885780601f1061025d57610100808354040283529160200191610288565b820191906000526020600020905b81548152906001019060200180831161026b57829003601f168201915b5050505050905090565b60008061029d61047b565b90506102aa818585610483565b600191505092915050565b6000600254905090565b6000806102ca61047b565b90506102d7858285610495565b6102e285858561052a565b60019150509392505050565b60006012905090565b60008060008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020549050919050565b60606004805461034e90610d2b565b80601f016020809104026020016040519081016040528092919081815260200182805461037a90610d2b565b80156103c75780601f1061039c576101008083540402835291602001916103c7565b820191906000526020600020905b8154815290600101906020018083116103aa57829003601f168201915b5050505050905090565b6000806103dc61047b565b90506103e981858561052a565b600191505092915050565b6000600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002054905092915050565b600033905090565b610490838383600161061e565b505050565b60006104a184846103f4565b90507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8110156105245781811015610514578281836040517ffb8f41b200000000000000000000000000000000000000000000000000000000815260040161050b93929190610d6b565b60405180910390fd5b6105238484848403600061061e565b5b50505050565b600073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff160361059c5760006040517f96c6fd1e0000000000000000000000000000000000000000000000000000000081526004016105939190610da2565b60405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff160361060e5760006040517fec442f050000000000000000000000000000000000000000000000000000000081526004016106059190610da2565b60405180910390fd5b6106198383836107f5565b505050565b600073ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff16036106905760006040517fe602df050000000000000000000000000000000000000000000000000000000081526004016106879190610da2565b60405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff16036107025760006040517f94280d620000000000000000000000000000000000000000000000000000000081526004016106f99190610da2565b60405180910390fd5b81600160008673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000208190555080156107ef578273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925846040516107e69190610bea565b60405180910390a35b50505050565b600073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff160361084757806002600082825461083b9190610dec565b9250508190555061091a565b60008060008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020549050818110156108d3578381836040517fe450d38c0000000000000000000000000000000000000000000000000000000081526004016108ca93929190610d6b565b60405180910390fd5b8181036000808673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002081905550505b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff160361096357806002600082825403925050819055506109b0565b806000808473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825401925050819055505b8173ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef83604051610a0d9190610bea565b60405180910390a3505050565b600081519050919050565b600082825260208201905092915050565b60005b83811015610a54578082015181840152602081019050610a39565b60008484015250505050565b6000601f19601f8301169050919050565b6000610a7c82610a1a565b610a868185610a25565b9350610a96818560208601610a36565b610a9f81610a60565b840191505092915050565b60006020820190508181036000830152610ac48184610a71565b905092915050565b600080fd5b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b6000610afc82610ad1565b9050919050565b610b0c81610af1565b8114610b1757600080fd5b50565b600081359050610b2981610b03565b92915050565b6000819050919050565b610b4281610b2f565b8114610b4d57600080fd5b50565b600081359050610b5f81610b39565b92915050565b60008060408385031215610b7c57610b7b610acc565b5b6000610b8a85828601610b1a565b9250506020610b9b85828601610b50565b9150509250929050565b60008115159050919050565b610bba81610ba5565b82525050565b6000602082019050610bd56000830184610bb1565b92915050565b610be481610b2f565b82525050565b6000602082019050610bff6000830184610bdb565b92915050565b600080600060608486031215610c1e57610c1d610acc565b5b6000610c2c86828701610b1a565b9350506020610c3d86828701610b1a565b9250506040610c4e86828701610b50565b9150509250925092565b600060ff82169050919050565b610c6e81610c58565b82525050565b6000602082019050610c896000830184610c65565b92915050565b600060208284031215610ca557610ca4610acc565b5b6000610cb384828501610b1a565b91505092915050565b60008060408385031215610cd357610cd2610acc565b5b6000610ce185828601610b1a565b9250506020610cf285828601610b1a565b9150509250929050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680610d4357607f821691505b602082108103610d5657610d55610cfc565b5b50919050565b610d6581610af1565b82525050565b6000606082019050610d806000830186610d5c565b610d8d6020830185610bdb565b610d9a6040830184610bdb565b949350505050565b6000602082019050610db76000830184610d5c565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000610df782610b2f565b9150610e0283610b2f565b9250828201905080821115610e1a57610e19610dbd565b5b9291505056fea2646970667358221220f413ecdb821df363531c0a0bcc98dfeb9baf7e41031710ae13ffb1596f19db2b64736f6c634300081c0033", - "linkReferences": {}, - "deployedLinkReferences": {} -} \ No newline at end of file diff --git a/packages/plugin-abstract/src/constants/index.ts b/packages/plugin-abstract/src/constants/index.ts deleted file mode 100644 index cd3b1267dc254..0000000000000 --- a/packages/plugin-abstract/src/constants/index.ts +++ /dev/null @@ -1 +0,0 @@ -export const ETH_ADDRESS = "0x000000000000000000000000000000000000800A"; diff --git a/packages/plugin-abstract/src/environment.ts b/packages/plugin-abstract/src/environment.ts deleted file mode 100644 index 502872ef0166c..0000000000000 --- a/packages/plugin-abstract/src/environment.ts +++ /dev/null @@ -1,44 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { isAddress } from "viem"; -import { z } from "zod"; - -export const abstractEnvSchema = z.object({ - ABSTRACT_ADDRESS: z - .string() - .min(1, "Abstract address is required") - .refine((address) => isAddress(address, { strict: false }), { - message: "Abstract address must be a valid address", - }), - ABSTRACT_PRIVATE_KEY: z - .string() - .min(1, "Abstract private key is required") - .refine((key) => /^[a-fA-F0-9]{64}$/.test(key), { - message: - "Abstract private key must be a 64-character hexadecimal string (32 bytes) without the '0x' prefix", - }), -}); - -export type AbstractConfig = z.infer; - -export async function validateAbstractConfig( - runtime: IAgentRuntime, -): Promise { - try { - const config = { - ABSTRACT_ADDRESS: runtime.getSetting("ABSTRACT_ADDRESS"), - ABSTRACT_PRIVATE_KEY: runtime.getSetting("ABSTRACT_PRIVATE_KEY"), - }; - - return abstractEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Abstract configuration validation failed:\n${errorMessages}`, - ); - } - throw error; - } -} diff --git a/packages/plugin-abstract/src/hooks/index.ts b/packages/plugin-abstract/src/hooks/index.ts deleted file mode 100644 index b77aa9b1c6c47..0000000000000 --- a/packages/plugin-abstract/src/hooks/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./useGetAccount"; -export * from "./useGetWalletClient"; diff --git a/packages/plugin-abstract/src/hooks/useGetAccount.ts b/packages/plugin-abstract/src/hooks/useGetAccount.ts deleted file mode 100644 index 5700c3f848c68..0000000000000 --- a/packages/plugin-abstract/src/hooks/useGetAccount.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import type { PrivateKeyAccount } from "viem/accounts"; -import { privateKeyToAccount } from "viem/accounts"; - -export const useGetAccount = (runtime: IAgentRuntime): PrivateKeyAccount => { - const PRIVATE_KEY = runtime.getSetting("ABSTRACT_PRIVATE_KEY"); - if (!PRIVATE_KEY) { - throw new Error("ABSTRACT_PRIVATE_KEY is not set"); - } - return privateKeyToAccount(`0x${PRIVATE_KEY}`); -}; diff --git a/packages/plugin-abstract/src/hooks/useGetWalletClient.ts b/packages/plugin-abstract/src/hooks/useGetWalletClient.ts deleted file mode 100644 index 93c464737dda5..0000000000000 --- a/packages/plugin-abstract/src/hooks/useGetWalletClient.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { createWalletClient, http } from "viem"; -import { abstractTestnet } from "viem/chains"; -import { eip712WalletActions } from "viem/zksync"; - -export const useGetWalletClient = (): ReturnType => { - const client = createWalletClient({ - chain: abstractTestnet, - transport: http(), - }).extend(eip712WalletActions()); - - return client; -}; diff --git a/packages/plugin-abstract/src/index.ts b/packages/plugin-abstract/src/index.ts deleted file mode 100644 index 09076cc0ce8b0..0000000000000 --- a/packages/plugin-abstract/src/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { Plugin } from "@elizaos/core"; - -import { transferAction, getBalanceAction, deployTokenAction } from "./actions"; - -export const abstractPlugin: Plugin = { - name: "abstract", - description: "Abstract Plugin for Eliza", - actions: [transferAction, getBalanceAction, deployTokenAction], - evaluators: [], - providers: [], -}; - -export default abstractPlugin; diff --git a/packages/plugin-abstract/src/utils/index.ts b/packages/plugin-abstract/src/utils/index.ts deleted file mode 100644 index 70b7085604757..0000000000000 --- a/packages/plugin-abstract/src/utils/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./viemHelpers"; diff --git a/packages/plugin-abstract/src/utils/viemHelpers.ts b/packages/plugin-abstract/src/utils/viemHelpers.ts deleted file mode 100644 index c1c7044e169d7..0000000000000 --- a/packages/plugin-abstract/src/utils/viemHelpers.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { - type Address, - createPublicClient, - getAddress, - http, - isAddress, - type PublicClient, -} from "viem"; -import { abstractTestnet, mainnet } from "viem/chains"; -import { normalize } from "viem/ens"; -import { elizaLogger } from "@elizaos/core"; -import { ETH_ADDRESS } from "../constants"; - -import { - type Account, - type Client, - createClient, - createWalletClient, - type Transport, -} from "viem"; -import { toAccount } from "viem/accounts"; -import type { ChainEIP712 } from "viem/zksync"; - -import { getSmartAccountAddressFromInitialSigner } from "./utils.js"; -import { - type AbstractWalletActions, - globalWalletActions, -} from "./walletActions.js"; - -// Shared clients -export const ethereumClient = createPublicClient({ - chain: mainnet, - transport: http(), -}); - -export const abstractPublicClient = createPublicClient({ - chain: abstractTestnet, - transport: http(), -}); - -// Helper to resolve ENS names -export async function resolveAddress( - addressOrEns: string, -): Promise
    { - if (isAddress(addressOrEns)) { - return getAddress(addressOrEns); - } - - let address: string; - try { - const name = normalize(addressOrEns.trim()); - const resolved = await ethereumClient.getEnsAddress({ name }); - if (resolved) { - address = resolved; - elizaLogger.log(`Resolved ${name} to ${resolved}`); - } - } catch (error) { - elizaLogger.error("Error resolving ENS name:", error); - } - - return address ? getAddress(address) : null; -} - -const tokens = [ - { - address: ETH_ADDRESS, - symbol: "ETH", - decimals: 18, - }, - { - address: "0xe4c7fbb0a626ed208021ccaba6be1566905e2dfc", - symbol: "USDC", - decimals: 6, - }, -]; - -export function getTokenByName(name: string) { - const token = tokens.find( - (token) => token.symbol.toLowerCase() === name.toLowerCase(), - ); - - if (!token) { - throw new Error(`Token ${name} not found`); - } - - return token; -} diff --git a/packages/plugin-abstract/tsconfig.json b/packages/plugin-abstract/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/plugin-abstract/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-abstract/tsup.config.ts b/packages/plugin-abstract/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/plugin-abstract/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-abstract/vitest.config.ts b/packages/plugin-abstract/vitest.config.ts deleted file mode 100644 index 81433e47fe0a7..0000000000000 --- a/packages/plugin-abstract/vitest.config.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - coverage: { - reporter: ['text', 'json', 'html'], - include: ['src/**/*.ts'], - exclude: ['**/*.test.ts', '**/*.d.ts'] - } - } -}); diff --git a/packages/plugin-agentkit/README.md b/packages/plugin-agentkit/README.md deleted file mode 100644 index f5a2580832c14..0000000000000 --- a/packages/plugin-agentkit/README.md +++ /dev/null @@ -1,123 +0,0 @@ -# @elizaos/plugin-agentkit - -AgentKit plugin for Eliza that enables interaction with CDP AgentKit tools for NFT and token management. - -## Setup - -1. Install dependencies: - -```bash -pnpm install -``` - -2. Configure environment variables: - -```env -CDP_API_KEY_NAME=your_key_name -CDP_API_KEY_PRIVATE_KEY=your_private_key -CDP_AGENT_KIT_NETWORK=base-sepolia # Optional: Defaults to base-sepolia -``` - -3. Add the plugin to your character configuration: - -```json -{ - "plugins": ["@elizaos/plugin-agentkit"], - "settings": { - "secrets": { - "CDP_API_KEY_NAME": "your_key_name", - "CDP_API_KEY_PRIVATE_KEY": "your_private_key" - } - } -} -``` - -## Available Tools - -The plugin provides access to the following CDP AgentKit tools: - -- `GET_WALLET_DETAILS`: Get wallet information -- `DEPLOY_NFT`: Deploy a new NFT collection -- `DEPLOY_TOKEN`: Deploy a new token -- `GET_BALANCE`: Check token or NFT balance -- `MINT_NFT`: Mint NFTs from a collection -- `REGISTER_BASENAME`: Register a basename for NFTs -- `REQUEST_FAUCET_FUNDS`: Request testnet funds -- `TRADE`: Execute trades -- `TRANSFER`: Transfer tokens or NFTs -- `WOW_BUY_TOKEN`: Buy WOW tokens -- `WOW_SELL_TOKEN`: Sell WOW tokens -- `WOW_CREATE_TOKEN`: Create new WOW tokens - -## Usage Examples - -1. Get wallet details: - -``` -Can you show me my wallet details? -``` - -2. Deploy an NFT collection: - -``` -Deploy a new NFT collection called "Music NFTs" with symbol "MUSIC" -``` - -3. Create a token: - -``` -Create a new WOW token called "Artist Token" with symbol "ART" -``` - -4. Check balance: - -``` -What's my current balance? -``` - -## Development - -1. Build the plugin: - -```bash -pnpm build -``` - -2. Run in development mode: - -```bash -pnpm dev -``` - -## Dependencies - -- @elizaos/core -- @coinbase/cdp-agentkit-core -- @coinbase/cdp-langchain -- @langchain/core - -## Network Support - -The plugin supports the following networks: - -- Base Sepolia (default) -- Base Mainnet - -Configure the network using the `CDP_AGENT_KIT_NETWORK` environment variable. - -## Troubleshooting - -1. If tools are not being triggered: - - - Verify CDP API key configuration - - Check network settings - - Ensure character configuration includes the plugin - -2. Common errors: - - "Cannot find package": Make sure dependencies are installed - - "API key not found": Check environment variables - - "Network error": Verify network configuration - -## License - -MIT diff --git a/packages/plugin-agentkit/__tests__/provider.test.ts b/packages/plugin-agentkit/__tests__/provider.test.ts deleted file mode 100644 index 7763ea2797708..0000000000000 --- a/packages/plugin-agentkit/__tests__/provider.test.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { getClient, walletProvider } from '../src/provider'; -import { CdpAgentkit } from '@coinbase/cdp-agentkit-core'; -import * as fs from 'fs'; - -// Mock dependencies -vi.mock('@coinbase/cdp-agentkit-core', () => ({ - CdpAgentkit: { - configureWithWallet: vi.fn().mockImplementation(async (config) => ({ - exportWallet: vi.fn().mockResolvedValue('mocked-wallet-data'), - wallet: { - addresses: [{ id: '0x123...abc' }] - } - })) - } -})); - -vi.mock('fs', () => ({ - existsSync: vi.fn(), - readFileSync: vi.fn(), - writeFileSync: vi.fn() -})); - -describe('AgentKit Provider', () => { - const mockRuntime = { - name: 'test-runtime', - memory: new Map(), - getMemory: vi.fn(), - setMemory: vi.fn(), - clearMemory: vi.fn() - }; - - beforeEach(() => { - vi.clearAllMocks(); - process.env.CDP_AGENT_KIT_NETWORK = 'base-sepolia'; - }); - - afterEach(() => { - delete process.env.CDP_AGENT_KIT_NETWORK; - }); - - describe('getClient', () => { - it('should create new wallet when no existing wallet data', async () => { - vi.mocked(fs.existsSync).mockReturnValue(false); - - const client = await getClient(); - - expect(CdpAgentkit.configureWithWallet).toHaveBeenCalledWith({ - networkId: 'base-sepolia' - }); - expect(fs.writeFileSync).toHaveBeenCalledWith( - 'wallet_data.txt', - 'mocked-wallet-data' - ); - expect(client).toBeDefined(); - }); - - it('should use existing wallet data when available', async () => { - vi.mocked(fs.existsSync).mockReturnValue(true); - vi.mocked(fs.readFileSync).mockReturnValue('existing-wallet-data'); - - const client = await getClient(); - - expect(CdpAgentkit.configureWithWallet).toHaveBeenCalledWith({ - cdpWalletData: 'existing-wallet-data', - networkId: 'base-sepolia' - }); - expect(fs.writeFileSync).toHaveBeenCalledWith( - 'wallet_data.txt', - 'mocked-wallet-data' - ); - expect(client).toBeDefined(); - }); - - it('should handle file read errors gracefully', async () => { - vi.mocked(fs.existsSync).mockReturnValue(true); - vi.mocked(fs.readFileSync).mockImplementation(() => { - throw new Error('File read error'); - }); - - const client = await getClient(); - - expect(CdpAgentkit.configureWithWallet).toHaveBeenCalledWith({ - networkId: 'base-sepolia' - }); - expect(fs.writeFileSync).toHaveBeenCalledWith( - 'wallet_data.txt', - 'mocked-wallet-data' - ); - expect(client).toBeDefined(); - }); - - it('should use custom network from environment variable', async () => { - process.env.CDP_AGENT_KIT_NETWORK = 'custom-network'; - vi.mocked(fs.existsSync).mockReturnValue(false); - - await getClient(); - - expect(CdpAgentkit.configureWithWallet).toHaveBeenCalledWith({ - networkId: 'custom-network' - }); - }); - }); - - describe('walletProvider', () => { - it('should return wallet address', async () => { - const result = await walletProvider.get(mockRuntime); - expect(result).toBe('AgentKit Wallet Address: 0x123...abc'); - }); - - it('should handle errors and return null', async () => { - vi.mocked(CdpAgentkit.configureWithWallet).mockRejectedValueOnce( - new Error('Configuration failed') - ); - - const result = await walletProvider.get(mockRuntime); - expect(result).toBeNull(); - }); - }); -}); diff --git a/packages/plugin-agentkit/biome.json b/packages/plugin-agentkit/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-agentkit/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-agentkit/package.json b/packages/plugin-agentkit/package.json deleted file mode 100644 index 041c37f853ee5..0000000000000 --- a/packages/plugin-agentkit/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@elizaos/plugin-agentkit", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "@coinbase/cdp-agentkit-core": "^0.0.10", - "@coinbase/cdp-langchain": "^0.0.11", - "@langchain/core": "^0.3.27", - "tsup": "8.3.5" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - } -} diff --git a/packages/plugin-agentkit/src/actions.ts b/packages/plugin-agentkit/src/actions.ts deleted file mode 100644 index b08c65f927e7a..0000000000000 --- a/packages/plugin-agentkit/src/actions.ts +++ /dev/null @@ -1,179 +0,0 @@ -import { - type Action, - generateText, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - composeContext, - generateObject, -} from "@elizaos/core"; -import type { CdpAgentkit } from "@coinbase/cdp-agentkit-core"; -import { CdpToolkit, type Tool } from "@coinbase/cdp-langchain"; - -type GetAgentKitActionsParams = { - getClient: () => Promise; - config?: { - networkId?: string; - }; -}; - -/** - * Get all AgentKit actions - */ -export async function getAgentKitActions({ - getClient, -}: GetAgentKitActionsParams): Promise { - const agentkit = await getClient(); - const cdpToolkit = new CdpToolkit(agentkit); - const tools = cdpToolkit.getTools(); - const actions = tools.map((tool: Tool) => ({ - name: tool.name.toUpperCase(), - description: tool.description, - similes: [], - validate: async () => true, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - _options?: Record, - callback?: HandlerCallback - ): Promise => { - try { - const client = await getClient(); - let currentState = - state ?? (await runtime.composeState(message)); - currentState = await runtime.updateRecentMessageState( - currentState - ); - - const parameterContext = composeParameterContext( - tool, - currentState - ); - const parameters = await generateParameters( - runtime, - parameterContext, - tool - ); - - const result = await executeToolAction( - tool, - parameters, - client - ); - - const responseContext = composeResponseContext( - tool, - result, - currentState - ); - const response = await generateResponse( - runtime, - responseContext - ); - - callback?.({ text: response, content: result }); - return true; - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); - callback?.({ - text: `Error executing action ${tool.name}: ${errorMessage}`, - content: { error: errorMessage }, - }); - return false; - } - }, - examples: [], - })); - return actions; -} - -async function executeToolAction( - tool: Tool, - parameters: unknown, - client: CdpAgentkit -): Promise { - const toolkit = new CdpToolkit(client); - const tools = toolkit.getTools(); - const selectedTool = tools.find((t) => t.name === tool.name); - - if (!selectedTool) { - throw new Error(`Tool ${tool.name} not found`); - } - - return await selectedTool.call(parameters); -} - -function composeParameterContext(tool: Tool, state: State): string { - const contextTemplate = `{{recentMessages}} - -Given the recent messages, extract the following information for the action "${tool.name}": -${tool.description} -`; - return composeContext({ state, template: contextTemplate }); -} - -async function generateParameters( - runtime: IAgentRuntime, - context: string, - tool: Tool -): Promise { - const { object } = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: tool.schema, - }); - - return object; -} - -function composeResponseContext( - tool: Tool, - result: unknown, - state: State -): string { - const responseTemplate = ` -# Action Examples -{{actionExamples}} - -# Knowledge -{{knowledge}} - -# Task: Generate dialog and actions for the character {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -{{providers}} - -{{attachments}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -The action "${tool.name}" was executed successfully. -Here is the result: -${JSON.stringify(result)} - -{{actions}} - -Respond to the message knowing that the action was successful and these were the previous messages: -{{recentMessages}} -`; - return composeContext({ state, template: responseTemplate }); -} - -async function generateResponse( - runtime: IAgentRuntime, - context: string -): Promise { - return generateText({ - runtime, - context, - modelClass: ModelClass.LARGE, - }); -} diff --git a/packages/plugin-agentkit/src/index.ts b/packages/plugin-agentkit/src/index.ts deleted file mode 100644 index d9e9fd3f7a1ed..0000000000000 --- a/packages/plugin-agentkit/src/index.ts +++ /dev/null @@ -1,44 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { walletProvider, getClient } from "./provider"; -import { getAgentKitActions } from "./actions"; - -// Initial banner -console.log("\n┌════════════════════════════════════════┐"); -console.log("│ AGENTKIT PLUGIN │"); -console.log("├────────────────────────────────────────┤"); -console.log("│ Initializing AgentKit Plugin... │"); -console.log("│ Version: 0.0.1 │"); -console.log("└════════════════════════════════════════┘"); - -const initializeActions = async () => { - try { - // Validate environment variables - const apiKeyName = process.env.CDP_API_KEY_NAME; - const apiKeyPrivateKey = process.env.CDP_API_KEY_PRIVATE_KEY; - - if (!apiKeyName || !apiKeyPrivateKey) { - console.warn("⚠️ Missing CDP API credentials - AgentKit actions will not be available"); - return []; - } - - const actions = await getAgentKitActions({ - getClient, - }); - console.log("✔ AgentKit actions initialized successfully."); - return actions; - } catch (error) { - console.error("❌ Failed to initialize AgentKit actions:", error); - return []; // Return empty array instead of failing - } -}; - -export const agentKitPlugin: Plugin = { - name: "[AgentKit] Integration", - description: "AgentKit integration plugin", - providers: [walletProvider], - evaluators: [], - services: [], - actions: await initializeActions(), -}; - -export default agentKitPlugin; diff --git a/packages/plugin-agentkit/src/provider.ts b/packages/plugin-agentkit/src/provider.ts deleted file mode 100644 index 038a9c0143d87..0000000000000 --- a/packages/plugin-agentkit/src/provider.ts +++ /dev/null @@ -1,60 +0,0 @@ -import type { Provider, IAgentRuntime } from "@elizaos/core"; -import { CdpAgentkit } from "@coinbase/cdp-agentkit-core"; -import * as fs from "node:fs"; - -const WALLET_DATA_FILE = "wallet_data.txt"; - -export async function getClient(): Promise { - // Validate required environment variables first - const apiKeyName = process.env.CDP_API_KEY_NAME; - const apiKeyPrivateKey = process.env.CDP_API_KEY_PRIVATE_KEY; - - if (!apiKeyName || !apiKeyPrivateKey) { - throw new Error("Missing required CDP API credentials. Please set CDP_API_KEY_NAME and CDP_API_KEY_PRIVATE_KEY environment variables."); - } - - let walletDataStr: string | null = null; - - // Read existing wallet data if available - if (fs.existsSync(WALLET_DATA_FILE)) { - try { - walletDataStr = fs.readFileSync(WALLET_DATA_FILE, "utf8"); - } catch (error) { - console.error("Error reading wallet data:", error); - // Continue without wallet data - } - } - - // Configure CDP AgentKit - const config = { - cdpWalletData: walletDataStr || undefined, - networkId: process.env.CDP_AGENT_KIT_NETWORK || "base-sepolia", - apiKeyName: apiKeyName, - apiKeyPrivateKey: apiKeyPrivateKey - }; - - try { - const agentkit = await CdpAgentkit.configureWithWallet(config); - // Save wallet data - const exportedWallet = await agentkit.exportWallet(); - fs.writeFileSync(WALLET_DATA_FILE, exportedWallet); - return agentkit; - } catch (error) { - console.error("Failed to initialize CDP AgentKit:", error); - throw new Error(`Failed to initialize CDP AgentKit: ${error.message || 'Unknown error'}`); - } -} - -export const walletProvider: Provider = { - async get(_runtime: IAgentRuntime): Promise { - try { - const client = await getClient(); - // Access wallet addresses using type assertion based on the known structure - const address = (client as unknown as { wallet: { addresses: Array<{ id: string }> } }).wallet.addresses[0].id; - return `AgentKit Wallet Address: ${address}`; - } catch (error) { - console.error("Error in AgentKit provider:", error); - return `Error initializing AgentKit wallet: ${error.message}`; - } - }, -}; diff --git a/packages/plugin-agentkit/tsconfig.json b/packages/plugin-agentkit/tsconfig.json deleted file mode 100644 index f642a90aee14f..0000000000000 --- a/packages/plugin-agentkit/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src", - "declaration": true - }, - "include": ["src"] -} diff --git a/packages/plugin-agentkit/tsup.config.ts b/packages/plugin-agentkit/tsup.config.ts deleted file mode 100644 index a68ccd636adf1..0000000000000 --- a/packages/plugin-agentkit/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "viem", - "@lifi/sdk", - ], -}); diff --git a/packages/plugin-agentkit/vitest.config.ts b/packages/plugin-agentkit/vitest.config.ts deleted file mode 100644 index 81433e47fe0a7..0000000000000 --- a/packages/plugin-agentkit/vitest.config.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - coverage: { - reporter: ['text', 'json', 'html'], - include: ['src/**/*.ts'], - exclude: ['**/*.test.ts', '**/*.d.ts'] - } - } -}); diff --git a/packages/plugin-akash/.npmignore b/packages/plugin-akash/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-akash/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-akash/assets/akash.jpg b/packages/plugin-akash/assets/akash.jpg deleted file mode 100644 index dd08e0e57057f..0000000000000 Binary files a/packages/plugin-akash/assets/akash.jpg and /dev/null differ diff --git a/packages/plugin-akash/biome.json b/packages/plugin-akash/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-akash/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-akash/jest.config.js b/packages/plugin-akash/jest.config.js deleted file mode 100644 index a8331cee2ff07..0000000000000 --- a/packages/plugin-akash/jest.config.js +++ /dev/null @@ -1,31 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - roots: ['/test'], - testMatch: [ - "**/__tests__/**/*.+(ts|tsx|js)", - "**/?(*.)+(spec|test).+(ts|tsx|js)" - ], - transform: { - "^.+\\.(ts|tsx)$": "ts-jest" - }, - moduleNameMapper: { - '^@/(.*)$': '/src/$1' - }, - setupFilesAfterEnv: ['/test/setup/jest.setup.ts'], - globals: { - 'ts-jest': { - tsconfig: 'tsconfig.json' - } - }, - testTimeout: 30000, - verbose: true, - collectCoverage: true, - coverageDirectory: "coverage", - coverageReporters: ["text", "lcov"], - coveragePathIgnorePatterns: [ - "/node_modules/", - "/test/fixtures/", - "/test/setup/" - ] -}; \ No newline at end of file diff --git a/packages/plugin-akash/package.json b/packages/plugin-akash/package.json deleted file mode 100644 index b6aade4973dfb..0000000000000 --- a/packages/plugin-akash/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "name": "@elizaos/plugin-akash", - "version": "0.25.6-alpha.1", - "description": "Akash Network Plugin for Eliza", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsup", - "dev": "tsup --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "test": "vitest", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage", - "test:ui": "vitest --ui" - }, - "dependencies": { - "@akashnetwork/akash-api": "^1.4.0", - "@akashnetwork/akashjs": "0.10.1", - "@cosmjs/proto-signing": "^0.31.3", - "@cosmjs/stargate": "0.31.3", - "@elizaos/core": "workspace:*", - "@types/js-yaml": "^4.0.9", - "axios": "^1.7.9", - "dotenv": "^16.4.1", - "jsrsasign": "^11.1.0", - "node-fetch": "^2.7.0", - "chalk": "^5.3.0", - "cli-table3": "^0.6.3", - "ora": "^8.0.1" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@types/dotenv": "^8.2.0", - "@types/jest": "^29.5.11", - "@types/js-yaml": "^4.0.9", - "@types/node": "^20.10.5", - "@vitest/coverage-v8": "^0.34.6", - "@vitest/ui": "^0.34.6", - "tsup": "^8.0.1", - "typescript": "^5.3.3", - "vite": "^5.0.10", - "vite-tsconfig-paths": "^4.2.2", - "vitest": "^3.0.0" - }, - "peerDependencies": { - "@elizaos/core": "workspace:*" - } -} diff --git a/packages/plugin-akash/readme.md b/packages/plugin-akash/readme.md deleted file mode 100644 index 081f353f26b3a..0000000000000 --- a/packages/plugin-akash/readme.md +++ /dev/null @@ -1,133 +0,0 @@ -# Akash Network Plugin for Eliza - -A powerful plugin for interacting with the Akash Network, enabling deployment management and cloud compute operations through Eliza. - -## Table of Contents -- [Installation](#installation) -- [Configuration](#configuration) -- [Directory Structure](#directory-structure) -- [Available Actions](#available-actions) - -## Installation - -```bash -pnpm add @elizaos/plugin-akash -``` - -## Configuration - -### Environment Variables -Create a `.env` file in your project root with the following configuration: - -```env -# Network Configuration -AKASH_ENV=mainnet -AKASH_NET=https://raw.githubusercontent.com/ovrclk/net/master/mainnet -RPC_ENDPOINT=https://rpc.akashnet.net:443 - -# Transaction Settings -AKASH_GAS_PRICES=0.025uakt -AKASH_GAS_ADJUSTMENT=1.5 -AKASH_KEYRING_BACKEND=os -AKASH_FROM=default -AKASH_FEES=20000uakt - -# Authentication -AKASH_MNEMONIC=your_12_word_mnemonic_here - -# Manifest Settings -AKASH_MANIFEST_MODE=auto # Options: auto, validate_only -AKASH_MANIFEST_VALIDATION_LEVEL=strict # Options: strict, basic, none -AKASH_MANIFEST_PATH=/path/to/manifests # Optional: Path to save generated manifests - -# Deployment Settings -AKASH_DEPOSIT=5000000uakt # Default deployment deposit -AKASH_SDL=deployment.yml # Default SDL file name -``` - -**Important Notes:** -- `AKASH_MNEMONIC`: Your 12-word wallet mnemonic phrase (required) -- `AKASH_MANIFEST_MODE`: Controls manifest generation behavior -- `AKASH_MANIFEST_VALIDATION_LEVEL`: Sets SDL validation strictness -- `AKASH_DEPOSIT`: Default deposit amount for deployments - -⚠️ Never commit your `.env` file with real credentials to version control! - - -#### SDL (Stack Definition Language) -``` -src/sdl/example.sdl.yml -``` -Place your SDL configuration files here. The plugin looks for SDL files in this directory by default. - -#### Certificates -``` -src/.certificates/ -``` -SSL certificates for secure provider communication are stored here. - -## Available Actions - -| Action | Description | Parameters | -|---------------------|------------------------------------------------|---------------------------------------------| -| CREATE_DEPLOYMENT | Create a new deployment | `sdl`, `sdlFile`, `deposit` | -| CLOSE_DEPLOYMENT | Close an existing deployment | `dseq`, `owner` | -| GET_PROVIDER_INFO | Get provider information | `provider` | -| GET_DEPLOYMENT_STATUS| Check deployment status | `dseq`, `owner` | -| GET_GPU_PRICING | Get GPU pricing comparison | `cpu`, `memory`, `storage` | -| GET_MANIFEST | Generate deployment manifest | `sdl`, `sdlFile` | -| GET_PROVIDERS_LIST | List available providers | `filter: { active, hasGPU, region }` | - - -Each action returns a structured response with: -```typescript -{ - text: string; // Human-readable response - content: { - success: boolean; // Operation success status - data?: any; // Action-specific data - error?: { // Present only on failure - code: string; - message: string; - }; - metadata: { // Operation metadata - timestamp: string; - source: string; - action: string; - version: string; - actionId: string; - } - } -} -``` - -## Error Handling - -The plugin includes comprehensive error handling with specific error codes: - -- `VALIDATION_SDL_FAILED`: SDL validation errors -- `WALLET_NOT_INITIALIZED`: Wallet setup issues -- `DEPLOYMENT_CREATION_FAILED`: Deployment failures -- `API_REQUEST_FAILED`: Network/API issues -- `MANIFEST_PARSING_FAILED`: Manifest generation errors -- `PROVIDER_FILTER_ERROR`: Provider filtering issues - -## Development - -### Running Tests -```bash -pnpm test -``` - -### Building -```bash -pnpm run build -``` - -## License - -This project is licensed under the MIT License - see the LICENSE file for details. - -## Support - -For support and questions, please open an issue in the repository or contact the maintainers. diff --git a/packages/plugin-akash/src/actions/closeDeployment.ts b/packages/plugin-akash/src/actions/closeDeployment.ts deleted file mode 100644 index 1b6b9a9fc6d6e..0000000000000 --- a/packages/plugin-akash/src/actions/closeDeployment.ts +++ /dev/null @@ -1,521 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; -import { SigningStargateClient } from "@cosmjs/stargate"; -import { getAkashTypeRegistry, getTypeUrl } from "@akashnetwork/akashjs/build/stargate"; -import { MsgCloseDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; -import { validateAkashConfig } from "../environment"; -import { fetchDeployments } from "./getDeploymentApi"; -import { AkashError, AkashErrorCode } from "../error/error"; -// import { getCertificatePath } from "../utils/paths"; -import { isPluginLoaded } from "../runtime_inspect"; - -interface CloseDeploymentContent extends Content { - dseq?: string; - closeAll?: boolean; -} - -// Certificate file path -// const CERTIFICATE_PATH = getCertificatePath(import.meta.url); - -// Initialize wallet and client -async function initializeClient(runtime: IAgentRuntime) { - elizaLogger.info("=== Initializing Client for Deployment Closure ==="); - const config = await validateAkashConfig(runtime); - - if (!config.AKASH_MNEMONIC) { - throw new AkashError( - "AKASH_MNEMONIC is required for closing deployments", - AkashErrorCode.WALLET_NOT_INITIALIZED - ); - } - - elizaLogger.debug("Initializing wallet", { - rpcEndpoint: config.RPC_ENDPOINT, - chainId: config.AKASH_CHAIN_ID, - version: config.AKASH_VERSION, - hasMnemonic: !!config.AKASH_MNEMONIC - }); - - const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { - prefix: "akash" - }); - - const [account] = await wallet.getAccounts(); - elizaLogger.debug("Wallet initialized successfully", { - address: account.address, - prefix: "akash" - }); - - // Initialize registry and client - const myRegistry = new Registry(getAkashTypeRegistry()); - const client = await SigningStargateClient.connectWithSigner( - config.AKASH_NODE || "https://rpc.akash.forbole.com:443", - wallet, - { registry: myRegistry } - ); - - elizaLogger.info("Client initialization complete", { - nodeUrl: config.AKASH_NODE || "https://rpc.akash.forbole.com:443", - address: account.address - }); - - return { client, account, wallet }; -} - -// Verify deployment status before closing -async function verifyDeploymentStatus(runtime: IAgentRuntime, dseq: string): Promise { - elizaLogger.info("Verifying deployment status", { dseq }); - - try { - const deployments = await fetchDeployments(runtime, undefined, 0, 100); - const deployment = deployments.results.find(d => d.dseq === dseq); - - if (!deployment) { - throw new AkashError( - `Deployment not found with DSEQ: ${dseq}`, - AkashErrorCode.DEPLOYMENT_NOT_FOUND - ); - } - - if (deployment.status.toLowerCase() !== 'active') { - throw new AkashError( - `Deployment ${dseq} is not active (current status: ${deployment.status})`, - AkashErrorCode.DEPLOYMENT_CLOSE_FAILED - ); - } - - return true; - } catch (error) { - if (error instanceof AkashError) { - throw error; - } - throw new AkashError( - `Failed to verify deployment status: ${error instanceof Error ? error.message : String(error)}`, - AkashErrorCode.DEPLOYMENT_NOT_FOUND - ); - } -} - -// Close a single deployment by DSEQ -async function closeSingleDeployment( - runtime: IAgentRuntime, - dseq: string -): Promise { - elizaLogger.info("Closing single deployment", { dseq }); - - try { - // Verify deployment exists and is active - await verifyDeploymentStatus(runtime, dseq); - - const { client, account } = await initializeClient(runtime); - - // Create close deployment message - const message = MsgCloseDeployment.fromPartial({ - id: { - dseq: dseq, - owner: account.address - } - }); - - const msgAny = { - typeUrl: getTypeUrl(MsgCloseDeployment), - value: message - }; - - // Set fee - const fee = { - amount: [{ denom: "uakt", amount: "20000" }], - gas: "800000" - }; - - // Send transaction - elizaLogger.info("Sending close deployment transaction", { dseq }); - const result = await client.signAndBroadcast( - account.address, - [msgAny], - fee, - `close deployment ${dseq}` - ); - - if (result.code !== 0) { - throw new AkashError( - `Transaction failed: ${result.rawLog}`, - AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, - { rawLog: result.rawLog } - ); - } - - elizaLogger.info("Deployment closed successfully", { - dseq, - transactionHash: result.transactionHash - }); - - return true; - } catch (error) { - elizaLogger.error("Failed to close deployment", { - dseq, - error: error instanceof Error ? error.message : String(error), - code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, - stack: error instanceof Error ? error.stack : undefined - }); - throw error; - } -} - -// Close all active deployments -async function closeAllDeployments( - runtime: IAgentRuntime -): Promise<{ success: string[], failed: string[] }> { - elizaLogger.info("Closing all active deployments"); - - try { - // Fetch active deployments - const deployments = await fetchDeployments(runtime, undefined, 0, 100); - const activeDeployments = deployments.results.filter(d => - d.status.toLowerCase() === 'active' - ); - - if (activeDeployments.length === 0) { - elizaLogger.info("No active deployments found to close"); - return { success: [], failed: [] }; - } - - elizaLogger.info("Found active deployments to close", { - count: activeDeployments.length, - dseqs: activeDeployments.map(d => d.dseq) - }); - - // Close each deployment - const results = { success: [] as string[], failed: [] as string[] }; - for (const deployment of activeDeployments) { - try { - await closeSingleDeployment(runtime, deployment.dseq); - results.success.push(deployment.dseq); - } catch (error) { - elizaLogger.error("Failed to close deployment", { - dseq: deployment.dseq, - error: error instanceof Error ? error.message : String(error), - code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED - }); - results.failed.push(deployment.dseq); - } - } - - elizaLogger.info("Finished closing deployments", results); - return results; - } catch (error) { - elizaLogger.error("Failed to close deployments", { - error: error instanceof Error ? error.message : String(error), - code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, - stack: error instanceof Error ? error.stack : undefined - }); - throw error; - } -} - -export const closeDeploymentAction: Action = { - name: "CLOSE_DEPLOYMENT", - similes: ["CLOSE_AKASH_DEPLOYMENT", "STOP_DEPLOYMENT", "TERMINATE_DEPLOYMENT"], - description: "Close an active deployment on the Akash Network", - examples: [[ - { - user: "user", - content: { - text: "Close deployment with DSEQ 123456", - dseq: "123456" - } as CloseDeploymentContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Closing deployment with DSEQ 123456..." - } as CloseDeploymentContent - } as ActionExample - ], [ - { - user: "user", - content: { - text: "Close all active deployments", - closeAll: true - } as CloseDeploymentContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Closing all active deployments..." - } as CloseDeploymentContent - } as ActionExample - ]], - - validate: async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("=== Starting Close Deployment Validation ==="); - elizaLogger.debug("Validating close deployment request", { message }); - - // Check if plugin is properly loaded - if (!isPluginLoaded(runtime, "akash")) { - elizaLogger.error("Akash plugin not properly loaded during validation"); - return false; - } - - try { - const params = message.content as Partial; - const config = await validateAkashConfig(runtime); - elizaLogger.debug("Validating parameters", { params }); - - // If no parameters provided, use environment defaults - if (!params.dseq && !params.closeAll) { - if (config.AKASH_CLOSE_DEP === "closeAll") { - params.closeAll = true; - } else if (config.AKASH_CLOSE_DEP === "dseq" && config.AKASH_CLOSE_DSEQ) { - params.dseq = config.AKASH_CLOSE_DSEQ; - } else { - throw new AkashError( - "Either dseq or closeAll parameter is required", - AkashErrorCode.VALIDATION_PARAMETER_MISSING, - { parameters: ["dseq", "closeAll"] } - ); - } - } - - if (params.dseq && params.closeAll) { - throw new AkashError( - "Cannot specify both dseq and closeAll parameters", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameters: ["dseq", "closeAll"] } - ); - } - - if (params.dseq && !/^\d+$/.test(params.dseq)) { - throw new AkashError( - "DSEQ must be a numeric string", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "dseq", value: params.dseq } - ); - } - - elizaLogger.debug("Validation completed successfully"); - return true; - } catch (error) { - elizaLogger.error("Close deployment validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State | undefined, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("=== Starting Close Deployment Request ===", { - actionId, - messageId: message.id, - userId: message.userId - }); - - try { - const config = await validateAkashConfig(runtime); - const params = message.content as Partial; - - // If no parameters provided, use environment defaults - if (!params.dseq && !params.closeAll) { - if (config.AKASH_CLOSE_DEP === "closeAll") { - params.closeAll = true; - } else if (config.AKASH_CLOSE_DEP === "dseq" && config.AKASH_CLOSE_DSEQ) { - params.dseq = config.AKASH_CLOSE_DSEQ; - } else { - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: 'AkashError' - }); - - const errorResponse = { - text: "Either DSEQ or closeAll parameter is required", - content: { - success: false, - error: { - code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, - message: "Either dseq or closeAll parameter is required" - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'closeDeployment', - version: '1.0.0', - actionId - } - } - }; - - callback(errorResponse); - } - return false; - } - } - - if (params.closeAll) { - const results = await closeAllDeployments(runtime); - - if (callback) { - elizaLogger.info("=== Preparing callback response for bulk closure ===", { - hasCallback: true, - actionId, - successCount: results.success.length, - failedCount: results.failed.length - }); - - const callbackResponse = { - text: `Deployment Closure Results:\n\nSuccessfully closed: ${results.success.length} deployments${ - results.success.length > 0 ? `\nDSEQs: ${results.success.join(', ')}` : '' - }${ - results.failed.length > 0 ? `\n\nFailed to close: ${results.failed.length} deployments\nDSEQs: ${results.failed.join(', ')}` : '' - }`, - content: { - success: results.failed.length === 0, - data: { - successful: results.success, - failed: results.failed, - totalClosed: results.success.length, - totalFailed: results.failed.length - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'closeDeployment', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing callback with response ===", { - actionId, - responseText: callbackResponse.text, - hasContent: !!callbackResponse.content, - contentKeys: Object.keys(callbackResponse.content), - metadata: callbackResponse.content.metadata - }); - - callback(callbackResponse); - - elizaLogger.info("=== Callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - return results.failed.length === 0; - - } else if (params.dseq) { - const success = await closeSingleDeployment(runtime, params.dseq); - - if (callback) { - elizaLogger.info("=== Preparing callback response for single closure ===", { - hasCallback: true, - actionId, - dseq: params.dseq, - success - }); - - const callbackResponse = { - text: success ? - `Successfully closed deployment DSEQ: ${params.dseq}` : - `Failed to close deployment DSEQ: ${params.dseq}`, - content: { - success, - data: { - dseq: params.dseq - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'closeDeployment', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing callback with response ===", { - actionId, - responseText: callbackResponse.text, - hasContent: !!callbackResponse.content, - contentKeys: Object.keys(callbackResponse.content), - metadata: callbackResponse.content.metadata - }); - - callback(callbackResponse); - - elizaLogger.info("=== Callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - return success; - } - - return false; - - } catch (error) { - elizaLogger.error("Close deployment request failed", { - error: error instanceof Error ? error.message : String(error), - code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, - actionId - }); - - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: error instanceof AkashError ? 'AkashError' : 'Error' - }); - - const errorResponse = { - text: `Failed to close deployment: ${error instanceof Error ? error.message : String(error)}`, - content: { - success: false, - error: { - code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, - message: error instanceof Error ? error.message : String(error) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'closeDeployment', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing error callback ===", { - actionId, - responseText: errorResponse.text, - hasContent: !!errorResponse.content, - contentKeys: Object.keys(errorResponse.content) - }); - - callback(errorResponse); - - elizaLogger.info("=== Error callback executed ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return false; - } - } -}; - -export default closeDeploymentAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/createCertificate.ts b/packages/plugin-akash/src/actions/createCertificate.ts deleted file mode 100644 index 801d0e9863e7d..0000000000000 --- a/packages/plugin-akash/src/actions/createCertificate.ts +++ /dev/null @@ -1,456 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; -import { SigningStargateClient } from "@cosmjs/stargate"; -import * as cert from "@akashnetwork/akashjs/build/certificates"; -import { certificateManager } from "@akashnetwork/akashjs/build/certificates/certificate-manager"; -import type { CertificatePem } from "@akashnetwork/akashjs/build/certificates/certificate-manager/CertificateManager"; -import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; -import { validateAkashConfig } from "../environment"; -import { AkashError, AkashErrorCode, withRetry } from "../error/error"; -import * as fs from 'node:fs'; -import * as path from 'node:path'; -import { Registry } from "@cosmjs/proto-signing"; -import type { SigningStargateClient as AkashSigningStargateClient } from "@akashnetwork/akashjs/node_modules/@cosmjs/stargate"; -import { getCertificatePath } from "../utils/paths"; - -interface CreateCertificateContent extends Content { - overwrite?: boolean; -} - -// Certificate file path -const CERTIFICATE_PATH = getCertificatePath(import.meta.url); - -// Save certificate to file -async function saveCertificate(certificate: CertificatePem): Promise { - elizaLogger.debug("Saving certificate to file", { path: CERTIFICATE_PATH }); - try { - // Ensure directory exists - const dir = path.dirname(CERTIFICATE_PATH); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } - const json = JSON.stringify(certificate); - fs.writeFileSync(CERTIFICATE_PATH, json); - elizaLogger.debug("Certificate saved successfully"); - } catch (error) { - elizaLogger.error("Failed to save certificate", { - error: error instanceof Error ? error.message : String(error), - path: CERTIFICATE_PATH - }); - throw new AkashError( - "Failed to save certificate", - AkashErrorCode.FILE_WRITE_ERROR, - { path: CERTIFICATE_PATH, error } - ); - } -} - -// Load certificate from file -function loadCertificate(): CertificatePem { - elizaLogger.debug("Loading certificate from file", { path: CERTIFICATE_PATH }); - try { - if (!fs.existsSync(CERTIFICATE_PATH)) { - throw new AkashError( - "Certificate file not found", - AkashErrorCode.CERTIFICATE_NOT_FOUND, - { path: CERTIFICATE_PATH } - ); - } - const json = fs.readFileSync(CERTIFICATE_PATH, "utf8"); - const certificate = JSON.parse(json); - elizaLogger.debug("Certificate loaded successfully", { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey - }); - return certificate; - } catch (error) { - elizaLogger.error("Failed to load certificate", { - error: error instanceof Error ? error.message : String(error), - path: CERTIFICATE_PATH - }); - if (error instanceof AkashError) { - throw error; - } - throw new AkashError( - "Failed to load certificate", - AkashErrorCode.FILE_READ_ERROR, - { path: CERTIFICATE_PATH, error } - ); - } -} - -// Initialize wallet with proper error handling -async function initializeWallet(mnemonic: string): Promise { - elizaLogger.debug("=== Initializing Wallet ===", { - mnemonicLength: mnemonic.split(' ').length, - hasMnemonic: !!mnemonic, - mnemonicFirstWord: mnemonic.split(' ')[0] - }); - - // Validate mnemonic format - const words = mnemonic.trim().split(/\s+/); - if (words.length !== 12 && words.length !== 24) { - const error = `Invalid mnemonic length: got ${words.length} words, expected 12 or 24 words`; - elizaLogger.error("Mnemonic validation failed", { - error, - wordCount: words.length, - expectedCounts: [12, 24], - mnemonicPreview: words.slice(0, 3).join(' ') + '...' - }); - throw new AkashError( - error, - AkashErrorCode.WALLET_INITIALIZATION_FAILED, - { - wordCount: words.length, - expectedCounts: [12, 24] - } - ); - } - - try { - elizaLogger.debug("Creating wallet with mnemonic", { - wordCount: words.length, - mnemonicPreview: words.slice(0, 3).join(' ') + '...' - }); - - const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic, { - prefix: "akash" - }); - const accounts = await wallet.getAccounts(); - - elizaLogger.debug("Wallet initialized successfully", { - accountCount: accounts.length, - firstAccountAddress: accounts[0]?.address, - addressPrefix: accounts[0]?.address?.substring(0, 6) - }); - - if (!accounts.length) { - throw new AkashError( - "No accounts found in wallet", - AkashErrorCode.WALLET_INITIALIZATION_FAILED - ); - } - - return wallet; - } catch (error) { - elizaLogger.error("Wallet initialization failed", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - mnemonicLength: words.length, - mnemonicPreview: words.slice(0, 3).join(' ') + '...' - }); - - if (error instanceof AkashError) { - throw error; - } - - throw new AkashError( - `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, - AkashErrorCode.WALLET_INITIALIZATION_FAILED, - { - mnemonicLength: words.length, - error: error instanceof Error ? error.message : String(error) - } - ); - } -} - -// Setup client with proper error handling and fallback RPC endpoints -async function setupClient(wallet: DirectSecp256k1HdWallet, rpcEndpoint: string): Promise { - // Try alternative RPC endpoints if the main one fails - const rpcEndpoints = [ - rpcEndpoint, - "https://rpc.akashnet.net:443", - "https://akash-rpc.polkachu.com:443", - "https://akash-rpc.europlots.com:443" - ]; - - elizaLogger.info("=== Setting up Stargate Client ===", { - primaryRpcEndpoint: rpcEndpoint, - allEndpoints: rpcEndpoints, - walletType: wallet.constructor.name - }); - - let lastError: Error | undefined; - for (const endpoint of rpcEndpoints) { - try { - elizaLogger.debug("Attempting to connect to RPC endpoint", { - endpoint, - attempt: rpcEndpoints.indexOf(endpoint) + 1, - totalEndpoints: rpcEndpoints.length - }); - - const registry = new Registry(getAkashTypeRegistry()); - elizaLogger.debug("Registry created for endpoint", { - endpoint, - registryType: registry.constructor.name - }); - - const client = await SigningStargateClient.connectWithSigner( - endpoint, - wallet, - { registry } - ); - - elizaLogger.debug("Client setup completed successfully", { - endpoint, - clientType: client.constructor.name - }); - - return client; - } catch (error) { - lastError = error as Error; - elizaLogger.warn("Failed to connect to RPC endpoint", { - endpoint, - error: error instanceof Error ? error.message : String(error), - remainingEndpoints: rpcEndpoints.slice(rpcEndpoints.indexOf(endpoint) + 1).length - }); - } - } - - throw new AkashError( - `Failed to connect to any RPC endpoint: ${lastError?.message}`, - AkashErrorCode.CLIENT_SETUP_FAILED, - { lastError } - ); -} - -export const createCertificateAction: Action = { - name: "CREATE_CERTIFICATE", - similes: ["GENERATE_CERTIFICATE", "SETUP_CERTIFICATE", "INIT_CERTIFICATE"], - description: "Create or load Akash certificate for provider interactions", - examples: [[ - { - user: "user", - content: { - text: "Create a new certificate", - overwrite: true - } as CreateCertificateContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Creating new certificate..." - } as CreateCertificateContent - } as ActionExample - ]], - - validate: async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("=== Starting Certificate Validation ==="); - try { - const params = message.content as Partial; - - // Validate Akash configuration - await validateAkashConfig(runtime); - - // If overwrite is specified, it must be a boolean - if (params.overwrite !== undefined && typeof params.overwrite !== 'boolean') { - throw new AkashError( - "Overwrite parameter must be a boolean", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "overwrite", value: params.overwrite } - ); - } - - elizaLogger.debug("Certificate validation completed successfully"); - return true; - } catch (error) { - elizaLogger.error("Certificate validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State | undefined, - options: { callback?: HandlerCallback } = {} - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("=== Starting Certificate Creation/Loading ===", { actionId }); - - try { - // First validate the parameters - if (!await createCertificateAction.validate(runtime, message)) { - const error = new AkashError( - "Invalid parameters provided", - AkashErrorCode.VALIDATION_PARAMETER_INVALID - ); - if (options.callback) { - options.callback({ - text: `Failed to validate parameters: ${error.message}`, - error: error.message, - content: { - success: false, - error: { - code: error.code, - message: error.message - } - } - }); - } - return false; - } - - const params = message.content as Partial; - const config = await validateAkashConfig(runtime); - - try { - // Check if certificate exists and overwrite is not true - if (fs.existsSync(CERTIFICATE_PATH) && !params.overwrite) { - elizaLogger.info("Loading existing certificate"); - const certificate = loadCertificate(); - - if (options.callback) { - options.callback({ - text: "Loaded existing certificate", - content: { - success: true, - certificate: { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey - } - } - }); - } - return true; - } - - // Initialize wallet - elizaLogger.info("Initializing wallet for certificate creation"); - const wallet = await initializeWallet(config.AKASH_MNEMONIC); - const accounts = await wallet.getAccounts(); - const address = accounts[0].address; - elizaLogger.debug("Wallet initialized", { - address, - accountCount: accounts.length - }); - - // Setup client - elizaLogger.debug("Setting up Stargate client"); - const client = await setupClient(wallet, config.RPC_ENDPOINT); - elizaLogger.debug("Client setup completed"); - - // Generate new certificate - elizaLogger.info("Generating new certificate"); - const certificate = certificateManager.generatePEM(address); - elizaLogger.debug("Certificate generated", { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey - }); - - // Broadcast certificate - elizaLogger.info("Broadcasting certificate to network"); - const result = await withRetry(async () => { - return await cert.broadcastCertificate( - certificate, - address, - client as unknown as AkashSigningStargateClient - ); - }); - - if (result.code !== 0) { - throw new AkashError( - `Could not create certificate: ${result.rawLog}`, - AkashErrorCode.CERTIFICATE_CREATION_FAILED, - { rawLog: result.rawLog } - ); - } - - elizaLogger.info("Certificate broadcast successful", { - code: result.code, - txHash: result.transactionHash, - height: result.height, - gasUsed: result.gasUsed - }); - - // Save certificate - await saveCertificate(certificate); - elizaLogger.info("Certificate saved to file", { path: CERTIFICATE_PATH }); - - if (options.callback) { - options.callback({ - text: "Certificate created and saved successfully", - content: { - success: true, - certificate: { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey - }, - transaction: { - hash: result.transactionHash, - height: result.height, - gasUsed: result.gasUsed - } - } - }); - } - - return true; - } catch (error) { - elizaLogger.error("Failed to create/load certificate", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined - }); - - if (options.callback) { - options.callback({ - text: `Failed to create/load certificate: ${error instanceof Error ? error.message : String(error)}`, - error: error instanceof Error ? error.message : String(error), - content: { - success: false, - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : { - code: AkashErrorCode.CERTIFICATE_CREATION_FAILED, - message: String(error) - } - } - }); - } - return false; - } - } catch (error) { - elizaLogger.error("Certificate operation failed", { - error: error instanceof Error ? error.message : String(error), - code: error instanceof AkashError ? error.code : AkashErrorCode.CERTIFICATE_CREATION_FAILED, - actionId - }); - - if (options.callback) { - options.callback({ - text: `Certificate operation failed: ${error instanceof Error ? error.message : String(error)}`, - error: error instanceof Error ? error.message : String(error), - content: { - success: false, - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : { - code: AkashErrorCode.CERTIFICATE_CREATION_FAILED, - message: String(error) - } - } - }); - } - - return false; - } - } -}; - -export default createCertificateAction; diff --git a/packages/plugin-akash/src/actions/createDeployment.ts b/packages/plugin-akash/src/actions/createDeployment.ts deleted file mode 100644 index d21cecd416caf..0000000000000 --- a/packages/plugin-akash/src/actions/createDeployment.ts +++ /dev/null @@ -1,1471 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { MsgCreateDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; -import { QueryClientImpl as QueryProviderClient, QueryProviderRequest } from "@akashnetwork/akash-api/akash/provider/v1beta3"; -import { QueryBidsRequest, QueryClientImpl as QueryMarketClient, MsgCreateLease, BidID } from "@akashnetwork/akash-api/akash/market/v1beta4"; -import * as cert from "@akashnetwork/akashjs/build/certificates"; -import { getRpc } from "@akashnetwork/akashjs/build/rpc"; -import { SDL } from "@akashnetwork/akashjs/build/sdl"; -import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; -import type { CertificatePem } from "@akashnetwork/akashjs/build/certificates/certificate-manager/CertificateManager"; -import { certificateManager } from "@akashnetwork/akashjs/build/certificates/certificate-manager"; -import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; -import { SigningStargateClient } from "@cosmjs/stargate"; -import { validateAkashConfig } from "../environment"; -import { AkashError, AkashErrorCode, withRetry } from "../error/error"; -import * as fs from 'node:fs'; -import * as path from 'node:path'; -import { getCertificatePath, getDefaultSDLPath } from "../utils/paths"; -// import { fileURLToPath } from 'url'; -import { inspectRuntime, isPluginLoaded } from "../runtime_inspect"; -import https from 'node:https'; -import axios from 'axios'; - -interface CreateDeploymentContent extends Content { - sdl?: string; - sdlFile?: string; - deposit?: string; -} - -// Certificate file path -const CERTIFICATE_PATH = getCertificatePath(import.meta.url); - -// Save certificate to file -function saveCertificate(certificate: CertificatePem) { - elizaLogger.debug("Saving certificate to file", { path: CERTIFICATE_PATH }); - try { - // Ensure directory exists - const dir = path.dirname(CERTIFICATE_PATH); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } - const json = JSON.stringify(certificate); - fs.writeFileSync(CERTIFICATE_PATH, json); - elizaLogger.debug("Certificate saved successfully"); - } catch (error) { - elizaLogger.error("Failed to save certificate", { - error: error instanceof Error ? error.message : String(error), - path: CERTIFICATE_PATH - }); - throw error; - } -} - -// Load certificate from file -function loadCertificate(path: string): CertificatePem { - elizaLogger.debug("Loading certificate from file", { path }); - try { - const json = fs.readFileSync(path, "utf8"); - const certificate = JSON.parse(json); - elizaLogger.debug("Certificate loaded successfully", { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey - }); - return certificate; - } catch (error) { - elizaLogger.error("Failed to load certificate", { - error: error instanceof Error ? error.message : String(error), - path - }); - throw error; - } -} - -const DEFAULT_SDL_PATH = (() => { - const currentFileUrl = import.meta.url; - // elizaLogger.info("=== Starting SDL Path Resolution in createDeployment ===", { - // currentFileUrl, - // cwd: process.cwd(), - // importMetaUrl: import.meta.url - // }); - - // Use the utility function from paths.ts instead of manual resolution - const sdlPath = getDefaultSDLPath(currentFileUrl); - - // Only log if file doesn't exist - if (!fs.existsSync(sdlPath)) { - elizaLogger.warn("Default SDL path not found", { - sdlPath, - exists: false - }); - } - - return sdlPath; -})(); - -const validateDeposit = (deposit: string): boolean => { - const pattern = /^\d+uakt$/; - return pattern.test(deposit); -}; - -const loadSDLFromFile = (filePath: string): string => { - // elizaLogger.info("=== Loading SDL File ===", { - // requestedPath: filePath, - // resolvedPath: path.resolve(filePath), - // defaultSdlPath: DEFAULT_SDL_PATH, - // cwd: process.cwd(), - // exists: fs.existsSync(filePath), - // defaultExists: fs.existsSync(DEFAULT_SDL_PATH) - // }); - - try { - // If path doesn't contain plugin-akash and it's not the default path, adjust it - if (!filePath.includes('plugin-akash') && filePath !== DEFAULT_SDL_PATH) { - const adjustedPath = path.join(path.dirname(DEFAULT_SDL_PATH), path.basename(filePath)); - // elizaLogger.info("Adjusting SDL path", { - // originalPath: filePath, - // adjustedPath, - // exists: fs.existsSync(adjustedPath), - // dirExists: fs.existsSync(path.dirname(adjustedPath)), - // dirContents: fs.existsSync(path.dirname(adjustedPath)) ? fs.readdirSync(path.dirname(adjustedPath)) : [] - // }); - filePath = adjustedPath; - } - - // Try multiple possible locations - const possiblePaths = [ - filePath, - path.join(process.cwd(), filePath), - path.join(process.cwd(), 'packages', 'plugin-akash', filePath), - path.join(process.cwd(), 'packages', 'plugin-akash', 'src', filePath), - path.join(path.dirname(DEFAULT_SDL_PATH), filePath) - ]; - - // elizaLogger.info("Attempting to load SDL from possible paths", { - // possiblePaths, - // existsMap: possiblePaths.map(p => ({ path: p, exists: fs.existsSync(p) })) - // }); - - for (const tryPath of possiblePaths) { - if (fs.existsSync(tryPath)) { - const content = fs.readFileSync(tryPath, "utf8"); - elizaLogger.info("SDL file loaded successfully from", { - path: tryPath - }); - return content; - } - } - - // If we get here, none of the paths worked - throw new Error(`SDL file not found in any of the possible locations`); - } catch (error) { - elizaLogger.error("Failed to read SDL file", { - filePath, - error: error instanceof Error ? error.message : String(error) - }); - throw new AkashError( - `Failed to read SDL file: ${error instanceof Error ? error.message : String(error)}`, - AkashErrorCode.VALIDATION_SDL_FAILED, - { filePath } - ); - } -}; - -// Preserved for future use -/* const formatErrorMessage = (error: unknown): string => { - if (error instanceof AkashError) { - if (error.code === AkashErrorCode.WALLET_NOT_INITIALIZED) { - return "Akash wallet not initialized"; - } - if (error.code === AkashErrorCode.DEPLOYMENT_CREATION_FAILED) { - return `Transaction failed: ${error.details?.rawLog || 'Unknown error'}`; - } - if (error.code === AkashErrorCode.MANIFEST_PARSING_FAILED) { - return "Failed to parse SDL"; - } - if (error.code === AkashErrorCode.VALIDATION_PARAMETER_MISSING) { - return `${error.message}`; - } - if (error.code === AkashErrorCode.VALIDATION_SDL_FAILED) { - return `Failed to parse SDL: ${error.details?.error || error.message}`; - } - if (error.code === AkashErrorCode.VALIDATION_PARAMETER_INVALID) { - return `Invalid deposit format. Must be in format: uakt`; - } - return error.message; - } - - const message = error instanceof Error ? error.message : String(error); - if (message.toLowerCase().includes("insufficient funds")) { - return "Insufficient funds"; - } - if (message.toLowerCase().includes("invalid deposit")) { - return "Invalid deposit amount"; - } - if (message.toLowerCase().includes("cannot read properties")) { - return "Failed to parse SDL: Invalid format"; - } - return message; -}; */ - -async function initializeWallet(mnemonic: string) { - elizaLogger.debug("=== Initializing Wallet ===", { - mnemonicLength: mnemonic.split(' ').length, - hasMnemonic: !!mnemonic, - mnemonicFirstWord: mnemonic.split(' ')[0] - }); - - // Validate mnemonic format - const words = mnemonic.trim().split(/\s+/); - if (words.length !== 12 && words.length !== 24) { - const error = `Invalid mnemonic length: got ${words.length} words, expected 12 or 24 words`; - elizaLogger.error("Mnemonic validation failed", { - error, - wordCount: words.length, - expectedCounts: [12, 24], - mnemonicPreview: words.slice(0, 3).join(' ') + '...' - }); - throw new AkashError( - error, - AkashErrorCode.WALLET_INITIALIZATION_FAILED, - { - wordCount: words.length, - expectedCounts: [12, 24] - } - ); - } - - try { - elizaLogger.debug("Creating wallet with mnemonic", { - wordCount: words.length, - mnemonicPreview: words.slice(0, 3).join(' ') + '...' - }); - - const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic, { - prefix: "akash" - }); - const accounts = await wallet.getAccounts(); - - elizaLogger.debug("Wallet initialized successfully", { - accountCount: accounts.length, - firstAccountAddress: accounts[0]?.address, - addressPrefix: accounts[0]?.address?.substring(0, 6) - }); - - if (!accounts.length) { - throw new AkashError( - "No accounts found in wallet", - AkashErrorCode.WALLET_INITIALIZATION_FAILED - ); - } - - return wallet; - } catch (error) { - elizaLogger.error("Wallet initialization failed", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - mnemonicLength: words.length, - mnemonicPreview: words.slice(0, 3).join(' ') + '...' - }); - - // Provide more specific error messages - const errorMessage = error instanceof Error ? error.message : String(error); - if (errorMessage.includes("Invalid mnemonic")) { - throw new AkashError( - "Invalid mnemonic format: The mnemonic phrase contains invalid words or is malformed", - AkashErrorCode.WALLET_INITIALIZATION_FAILED, - { - mnemonicLength: words.length, - error: errorMessage - } - ); - } - - throw new AkashError( - `Failed to initialize wallet: ${errorMessage}`, - AkashErrorCode.WALLET_INITIALIZATION_FAILED, - { - mnemonicLength: words.length, - error: errorMessage - } - ); - } -} - -async function setupClient(wallet: DirectSecp256k1HdWallet, rpcEndpoint: string) { - // Try alternative RPC endpoints if the main one fails - const rpcEndpoints = [ - "https://akash-rpc.europlots.com:443", // New endpoint first - rpcEndpoint, - "https://rpc.akashnet.net:443", - "https://rpc.akash.forbole.com:443", - "https://rpc-akash.ecostake.com:443", - "https://akash-rpc.polkachu.com:443", - "https://akash.c29r3.xyz:443/rpc" - ]; - - elizaLogger.info("=== Setting up Stargate Client ===", { - primaryRpcEndpoint: rpcEndpoint, - allEndpoints: rpcEndpoints, - walletType: wallet.constructor.name, - preferredEndpoint: rpcEndpoints[0] - }); - - let lastError: Error | undefined; - for (const endpoint of rpcEndpoints) { - try { - elizaLogger.debug("Attempting to connect to RPC endpoint", { - endpoint, - attempt: rpcEndpoints.indexOf(endpoint) + 1, - totalEndpoints: rpcEndpoints.length - }); - - const registry = new Registry(getAkashTypeRegistry()); - elizaLogger.debug("Registry created for endpoint", { - endpoint, - registryType: registry.constructor.name - }); - - const client = await SigningStargateClient.connectWithSigner( - endpoint, - wallet, - { registry } - ); - - // Check if client is connected by attempting to get the height - try { - const height = await client.getHeight(); - elizaLogger.info("Stargate client setup successful", { - endpoint, - height, - clientType: client.constructor.name, - attempt: rpcEndpoints.indexOf(endpoint) + 1 - }); - return client; - } catch (heightError) { - elizaLogger.error("Failed to get chain height", { - endpoint, - attempt: rpcEndpoints.indexOf(endpoint) + 1, - error: heightError instanceof Error ? heightError.message : String(heightError) - }); - lastError = heightError instanceof Error ? heightError : new Error(String(heightError)); - continue; - } - } catch (error) { - elizaLogger.error("Failed to connect to RPC endpoint", { - endpoint, - attempt: rpcEndpoints.indexOf(endpoint) + 1, - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined - }); - lastError = error instanceof Error ? error : new Error(String(error)); - continue; - } - } - - // If we get here, all endpoints failed - elizaLogger.error("All RPC endpoints failed", { - endpoints: rpcEndpoints, - lastError: lastError?.message, - totalAttempts: rpcEndpoints.length - }); - throw new AkashError( - `Failed to setup client: ${lastError?.message}`, - AkashErrorCode.CLIENT_SETUP_FAILED, - { rpcEndpoint: rpcEndpoints.join(", ") } - ); -} - -async function fetchBid(dseq: number, owner: string, rpcEndpoint: string) { - elizaLogger.info("=== Starting Bid Fetch Process ===", { - dseq, - owner, - ownerPrefix: owner.substring(0, 6) - }); - - const maxRetries = 3; - let lastError: Error | undefined; - - for (let retry = 0; retry < maxRetries; retry++) { - try { - elizaLogger.debug("Connecting to RPC for bid fetch", { - rpcEndpoint, - attempt: retry + 1, - maxRetries - }); - - const rpc = await getRpc(rpcEndpoint); - elizaLogger.debug("RPC connection established", { - rpcType: rpc.constructor.name, - attempt: retry + 1 - }); - - const client = new QueryMarketClient(rpc); - const request = QueryBidsRequest.fromPartial({ - filters: { - owner: owner, - dseq: dseq - } - }); - - const startTime = Date.now(); - const timeout = 1000 * 60 * 5; // 5 minutes timeout - elizaLogger.debug("Starting bid polling loop", { - timeout: "5 minutes", - pollInterval: "5 seconds", - attempt: retry + 1 - }); - - while (Date.now() - startTime < timeout) { - const elapsedTime = Math.round((Date.now() - startTime) / 1000); - elizaLogger.debug("Polling for bids", { - dseq, - owner: owner.substring(0, 6), - elapsedSeconds: elapsedTime, - remainingSeconds: Math.round(timeout/1000 - elapsedTime), - attempt: retry + 1 - }); - - try { - await new Promise(resolve => setTimeout(resolve, 5000)); - const bids = await client.Bids(request); - - if (bids.bids.length > 0 && bids.bids[0].bid !== undefined) { - elizaLogger.info("Bid found successfully", { - dseq, - owner: owner.substring(0, 6), - bidCount: bids.bids.length, - elapsedSeconds: elapsedTime, - attempt: retry + 1 - }); - elizaLogger.debug("Bid details", { - bid: bids.bids[0].bid, - provider: bids.bids[0].bid?.bidId?.provider - }); - return bids.bids[0].bid; - } - } catch (pollError) { - // Log but continue polling if it's a temporary error - elizaLogger.warn("Temporary error during bid polling", { - error: pollError instanceof Error ? pollError.message : String(pollError), - dseq, - attempt: retry + 1, - willRetry: true - }); - continue; - } - } - - elizaLogger.error("Bid fetch timeout", { - dseq, - owner: owner.substring(0, 6), - timeout: "5 minutes", - attempt: retry + 1 - }); - throw new AkashError( - `Could not fetch bid for deployment ${dseq}. Timeout reached.`, - AkashErrorCode.BID_FETCH_TIMEOUT, - { dseq, owner } - ); - } catch (error) { - lastError = error instanceof Error ? error : new Error(String(error)); - elizaLogger.error("Error during bid fetch", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - dseq, - owner: owner.substring(0, 6), - attempt: retry + 1, - hasMoreRetries: retry < maxRetries - 1 - }); - - if (retry < maxRetries - 1) { - // Wait before retrying (exponential backoff) - const delay = Math.pow(2, retry) * 1000; - elizaLogger.info("Retrying bid fetch after delay", { - delay, - nextAttempt: retry + 2, - maxRetries - }); - await new Promise(resolve => setTimeout(resolve, delay)); - continue; - } - } - } - - // If we get here, all retries failed - elizaLogger.error("All bid fetch attempts failed", { - dseq, - owner: owner.substring(0, 6), - attempts: maxRetries, - finalError: lastError?.message - }); - throw lastError || new Error("Failed to fetch bid after all retries"); -} - -async function createLease(deployment: any, wallet: DirectSecp256k1HdWallet, client: SigningStargateClient, rpcEndpoint: string): Promise { - const { dseq, owner } = deployment.id; - elizaLogger.info("Starting lease creation", { dseq, owner }); - - try { - elizaLogger.debug("Fetching bid for lease creation"); - const bid = await fetchBid(dseq, owner, rpcEndpoint); - const accounts = await wallet.getAccounts(); - - if (bid.bidId === undefined) { - elizaLogger.error("Invalid bid - missing bidId", { dseq, owner }); - throw new AkashError("Bid ID is undefined", AkashErrorCode.INVALID_BID); - } - - elizaLogger.debug("Creating lease message", { - dseq, - owner, - bidId: bid.bidId - }); - - const lease = { - bidId: bid.bidId - }; - - const fee = { - amount: [{ denom: "uakt", amount: "50000" }], - gas: "2000000" - }; - - const msg = { - typeUrl: `/${MsgCreateLease.$type}`, - value: MsgCreateLease.fromPartial(lease) - }; - - elizaLogger.info("Broadcasting lease creation transaction"); - const tx = await client.signAndBroadcast(accounts[0].address, [msg], fee, "create lease"); - - if (tx.code !== 0) { - elizaLogger.error("Lease creation failed", { - dseq, - owner, - code: tx.code, - rawLog: tx.rawLog - }); - throw new AkashError( - `Could not create lease: ${tx.rawLog}`, - AkashErrorCode.LEASE_CREATION_FAILED, - { rawLog: tx.rawLog } - ); - } - - elizaLogger.info("Lease created successfully", { - dseq, - owner, - txHash: tx.transactionHash - }); - - return { - id: BidID.toJSON(bid.bidId) - }; - } catch (error) { - elizaLogger.error("Error during lease creation", { - error, - dseq, - owner - }); - throw error; - } -} - -interface LeaseStatus { - services: Record; -} - -async function queryLeaseStatus(lease: any, providerUri: string, certificate: CertificatePem): Promise { - const id = lease.id; - elizaLogger.info("Querying lease status", { - dseq: id?.dseq, - gseq: id?.gseq, - oseq: id?.oseq, - providerUri - }); - - if (id === undefined) { - elizaLogger.error("Invalid lease - missing ID"); - throw new AkashError("Lease ID is undefined", AkashErrorCode.INVALID_LEASE); - } - - const leasePath = `/lease/${id.dseq}/${id.gseq}/${id.oseq}/status`; - elizaLogger.debug("Setting up request", { - providerUri, - leasePath, - hasCert: !!certificate.cert, - hasKey: !!certificate.privateKey - }); - - const MAX_RETRIES = 3; - const INITIAL_RETRY_DELAY = 3000; - let retryCount = 0; - - while (retryCount < MAX_RETRIES) { - try { - const url = new URL(providerUri); - const fullUrl = `${url.protocol}//${url.hostname}${url.port ? ':' + url.port : ''}${leasePath}`; - - elizaLogger.debug("Making request", { - url: fullUrl, - method: 'GET', - hasCertificate: !!certificate, - retryCount - }); - - const agent = new https.Agent({ - cert: certificate.cert, - key: certificate.privateKey, - rejectUnauthorized: false, - keepAlive: false, - timeout: 10000 - }); - - try { - const response = await fetch(fullUrl, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - // @ts-expect-error - TypeScript's fetch types don't include Node's agent support, but it exists at runtime - agent, - signal: AbortSignal.timeout(10000) - }); - - if (response.status !== 200) { - elizaLogger.warn("Non-OK response from lease status query", { - statusCode: response.status, - statusText: response.statusText, - dseq: id.dseq, - url: fullUrl, - retryCount - }); - - if (response.status === 404) { - elizaLogger.debug("Deployment not ready yet (404)", { - dseq: id.dseq, - retryCount - }); - return undefined; - } - throw new Error(`Could not query lease status: ${response.status}`); - } - - const data = await response.json() as LeaseStatus; - elizaLogger.debug("Lease status received", { - dseq: id.dseq, - dataLength: JSON.stringify(data).length, - hasServices: !!data.services, - serviceCount: Object.keys(data.services || {}).length - }); - return data; - } finally { - agent.destroy(); - } - } catch (error) { - elizaLogger.warn("Error during lease status query", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - dseq: id.dseq, - providerUri, - retryCount - }); - - if (retryCount < MAX_RETRIES - 1) { - const delay = INITIAL_RETRY_DELAY * Math.pow(2, retryCount); - elizaLogger.debug("Retrying after error", { - delay, - nextRetry: retryCount + 1, - maxRetries: MAX_RETRIES - }); - await new Promise(r => setTimeout(r, delay)); - retryCount++; - continue; - } - - // On final retry, if it's a network error or 404, return undefined - if (error instanceof Error && - ((error as any).code === 'ECONNABORTED' || - (error as any).code === 'ETIMEDOUT' || - ((error as any).response && (error as any).response.status === 404))) { - elizaLogger.info("Returning undefined after max retries", { - dseq: id.dseq, - error: error.message - }); - return undefined; - } - - throw error; - } - } - - elizaLogger.info("Max retries reached, returning undefined", { - dseq: id.dseq, - maxRetries: MAX_RETRIES - }); - return undefined; -} - -async function sendManifest(sdl: SDL, lease: any, certificate: CertificatePem, rpcEndpoint: string) { - elizaLogger.info("Starting manifest send process"); - if (lease.id === undefined) { - elizaLogger.error("Invalid lease - missing ID"); - throw new AkashError("Lease ID is undefined", AkashErrorCode.INVALID_LEASE); - } - - try { - const { dseq, provider } = lease.id; - elizaLogger.debug("Getting provider info", { provider }); - - const rpc = await getRpc(rpcEndpoint); - const client = new QueryProviderClient(rpc); - const request = QueryProviderRequest.fromPartial({ - owner: provider - }); - - const tx = await client.Provider(request); - - if (tx.provider === undefined) { - elizaLogger.error("Provider not found", { provider }); - throw new AkashError( - `Could not find provider ${provider}`, - AkashErrorCode.PROVIDER_NOT_FOUND - ); - } - - const providerInfo = tx.provider; - elizaLogger.debug("Provider info retrieved", { - provider, - hostUri: providerInfo.hostUri - }); - - const manifest = sdl.manifestSortedJSON(); - const path = `/deployment/${dseq}/manifest`; - - elizaLogger.info("Sending manifest to provider", { - dseq, - provider, - manifestLength: manifest.length - }); - - const uri = new URL(providerInfo.hostUri); - - const httpsAgent = new https.Agent({ - cert: certificate.cert, - key: certificate.privateKey, - rejectUnauthorized: false, - keepAlive: false, - timeout: 10000 - }); - - try { - const fullUrl = `${uri.protocol}//${uri.hostname}${uri.port ? ':' + uri.port : ''}${path}`; - elizaLogger.debug("Making manifest request", { - url: fullUrl, - method: 'PUT', - manifestLength: manifest.length - }); - - const response = await axios.put(fullUrl, manifest, { - headers: { - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - httpsAgent, - timeout: 10000, - validateStatus: null // Don't throw on any status code - }); - - if (response.status !== 200) { - elizaLogger.error("Failed to send manifest", { - statusCode: response.status, - statusText: response.statusText, - dseq - }); - throw new Error(`Failed to send manifest: ${response.status} ${response.statusText}`); - } - - elizaLogger.info("Manifest sent successfully", { dseq }); - } finally { - httpsAgent.destroy(); - } - - // Wait for deployment to start - elizaLogger.info("Waiting for deployment to start", { dseq }); - const startTime = Date.now(); - const timeout = 1000 * 60 * 10; // 10 minutes timeout - let consecutiveErrors = 0; - const MAX_CONSECUTIVE_ERRORS = 5; - - while (Date.now() - startTime < timeout) { - const elapsedTime = Math.round((Date.now() - startTime) / 1000); - elizaLogger.debug("Checking deployment status", { - dseq, - elapsedTime: `${elapsedTime}s`, - remainingTime: `${Math.round(timeout/1000 - elapsedTime)}s`, - consecutiveErrors - }); - - try { - const status = await queryLeaseStatus(lease, providerInfo.hostUri, certificate); - - if (status === undefined) { - consecutiveErrors++; - elizaLogger.debug("Status check returned undefined", { - dseq, - consecutiveErrors, - maxConsecutiveErrors: MAX_CONSECUTIVE_ERRORS - }); - - if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) { - elizaLogger.warn("Too many consecutive undefined status responses", { - dseq, - consecutiveErrors - }); - // Don't throw, just continue waiting - consecutiveErrors = 0; - } - - await new Promise(resolve => setTimeout(resolve, 3000)); - continue; - } - - // Reset error counter on successful status check - consecutiveErrors = 0; - - for (const [name, service] of Object.entries<{ uris?: string[] }>(status.services)) { - if (service.uris) { - const rawUrl = service.uris[0]; - // Ensure URL has protocol - const serviceUrl = rawUrl.startsWith('http') ? rawUrl : `http://${rawUrl}`; - elizaLogger.info("Service is available", { - name, - rawUrl, - serviceUrl, - dseq - }); - return serviceUrl; - } - } - } catch (error) { - consecutiveErrors++; - const errorMessage = error instanceof Error ? error.message : String(error); - elizaLogger.warn("Error checking deployment status", { - error: errorMessage, - dseq, - consecutiveErrors, - maxConsecutiveErrors: MAX_CONSECUTIVE_ERRORS - }); - - if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) { - elizaLogger.error("Too many consecutive errors checking deployment status", { - dseq, - consecutiveErrors, - error: errorMessage - }); - throw new AkashError( - "Too many consecutive errors checking deployment status", - AkashErrorCode.DEPLOYMENT_START_TIMEOUT, - { dseq, error: errorMessage } - ); - } - } - - await new Promise(resolve => setTimeout(resolve, 3000)); - } - - elizaLogger.error("Deployment start timeout", { - dseq, - timeout: "10 minutes" - }); - throw new AkashError( - "Could not start deployment. Timeout reached.", - AkashErrorCode.DEPLOYMENT_START_TIMEOUT - ); - } catch (error) { - elizaLogger.error("Error during manifest send process", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - dseq: lease.id.dseq - }); - throw error; - } -} - -async function loadOrCreateCertificate(wallet: DirectSecp256k1HdWallet, client: SigningStargateClient): Promise { - elizaLogger.info("=== Starting Certificate Creation/Loading Process ==="); - try { - const accounts = await wallet.getAccounts(); - const address = accounts[0].address; - elizaLogger.debug("Got wallet address for certificate", { - address, - addressLength: address.length, - addressPrefix: address.substring(0, 6) - }); - - // Check if certificate exists - if (fs.existsSync(CERTIFICATE_PATH)) { - elizaLogger.info("Found existing certificate file", { path: CERTIFICATE_PATH }); - const cert = loadCertificate(CERTIFICATE_PATH); - elizaLogger.debug("Loaded existing certificate", { - hasCert: !!cert.cert, - hasPrivateKey: !!cert.privateKey, - hasPublicKey: !!cert.publicKey, - certLength: cert.cert?.length, - privateKeyLength: cert.privateKey?.length, - publicKeyLength: cert.publicKey?.length - }); - return cert; - } - - // Create new certificate exactly like the example - elizaLogger.info("No existing certificate found, creating new one", { address }); - const certificate = certificateManager.generatePEM(address); - elizaLogger.debug("Certificate generated", { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey, - certLength: certificate.cert?.length, - privateKeyLength: certificate.privateKey?.length, - publicKeyLength: certificate.publicKey?.length - }); - - // Broadcast certificate - elizaLogger.info("Broadcasting certificate to network", { - address, - certLength: certificate.cert?.length, - publicKeyLength: certificate.publicKey?.length - }); - - const result = await cert.broadcastCertificate( - certificate, - address, - client as any - ).catch(error => { - elizaLogger.error("Certificate broadcast failed", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - address, - certLength: certificate.cert?.length - }); - throw error; - }); - - if (result.code !== 0) { - const error = `Could not create certificate: ${result.rawLog}`; - elizaLogger.error("Certificate broadcast returned error code", { - code: result.code, - rawLog: result.rawLog, - address, - txHash: result.transactionHash - }); - throw new AkashError( - error, - AkashErrorCode.CERTIFICATE_CREATION_FAILED, - { rawLog: result.rawLog } - ); - } - - elizaLogger.info("Certificate broadcast successful", { - code: result.code, - txHash: result.transactionHash, - height: result.height, - gasUsed: result.gasUsed - }); - - // Save certificate - saveCertificate(certificate); - elizaLogger.info("Certificate saved to file", { path: CERTIFICATE_PATH }); - - elizaLogger.info("Certificate process completed successfully", { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey, - path: CERTIFICATE_PATH - }); - - return certificate; - } catch (error) { - elizaLogger.error("Certificate creation/broadcast process failed", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - path: CERTIFICATE_PATH - }); - throw error; - } -} - -async function parseSDL(sdlContent: string): Promise { - try { - // Clean up SDL content by taking only the part after the YAML document separator - const yamlSeparatorIndex = sdlContent.indexOf('---'); - if (yamlSeparatorIndex === -1) { - throw new Error("No YAML document separator (---) found in SDL"); - } - - // Extract only the actual YAML content - const cleanSDL = sdlContent.substring(yamlSeparatorIndex); - - elizaLogger.info("Starting SDL parsing process", { - originalLength: sdlContent.length, - cleanLength: cleanSDL.length, - yamlSeparatorIndex, - cleanContent: cleanSDL.substring(0, 200) + '...', - firstLine: cleanSDL.split('\n')[0], - lastLine: cleanSDL.split('\n').slice(-1)[0], - lineCount: cleanSDL.split('\n').length, - hasVersion: cleanSDL.includes('version: "2.0"'), - hasServices: cleanSDL.includes('services:'), - hasProfiles: cleanSDL.includes('profiles:'), - hasDeployment: cleanSDL.includes('deployment:'), - charCodes: cleanSDL.substring(0, 50).split('').map(c => c.charCodeAt(0)) - }); - - // Try to parse SDL with clean content - exactly like the example - const parsedSDL = SDL.fromString(cleanSDL, "beta3"); - elizaLogger.debug("Initial SDL parsing successful", { - hasVersion: !!parsedSDL.version, - hasServices: !!parsedSDL.services, - hasProfiles: !!parsedSDL.profiles, - hasDeployment: !!parsedSDL.deployments, - serviceCount: Object.keys(parsedSDL.services || {}).length, - profileCount: Object.keys(parsedSDL.profiles || {}).length - }); - - // Get groups and version like the example - const groups = parsedSDL.groups(); - const version = await parsedSDL.manifestVersion(); - - elizaLogger.info("SDL validation completed", { - groupCount: groups.length, - version, - groups: JSON.stringify(groups) - }); - - return parsedSDL; - } catch (error) { - elizaLogger.error("Failed to parse SDL", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - sdlContent: sdlContent.substring(0, 200) + '...', - sdlLength: sdlContent.length - }); - throw error; - } -} - -export const createDeploymentAction: Action = { - name: "CREATE_DEPLOYMENT", - similes: ["DEPLOY", "START_DEPLOYMENT", "LAUNCH"], - description: "Create a new deployment on Akash Network", - examples: [[ - { - user: "user", - content: { - text: "Deploy SDL on Akash Network", - sdl: "version: \"2.0\"\n\nservices:\n web:\n image: nginx\n expose:\n - port: 80\n as: 80\n to:\n - global: true" - } as CreateDeploymentContent - } as ActionExample - ]], - - validate: async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("=== Starting Deployment Validation ==="); - elizaLogger.debug("Validating deployment request", { message }); - - // Check if plugin is properly loaded - if (!isPluginLoaded(runtime, "akash")) { - elizaLogger.error("Akash plugin not properly loaded during validation"); - return false; - } - - try { - const params = message.content as Partial; - elizaLogger.debug("Checking SDL content", { params }); - - // Get SDL content either from direct string, specified file, or default file - let sdlContent: string; - if (params.sdl) { - sdlContent = params.sdl; - } else if (params.sdlFile) { - sdlContent = loadSDLFromFile(params.sdlFile); - } else { - sdlContent = loadSDLFromFile(DEFAULT_SDL_PATH); - } - - if (params.deposit && !validateDeposit(params.deposit)) { - throw new AkashError( - "Invalid deposit format", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "deposit", value: params.deposit } - ); - } - - elizaLogger.debug("Validating SDL format"); - try { - // Clean up SDL content by taking only the part after the YAML document separator - const yamlSeparatorIndex = sdlContent.indexOf('---'); - if (yamlSeparatorIndex === -1) { - throw new Error("No YAML document separator (---) found in SDL"); - } - - // Extract only the actual YAML content - const cleanSDL = sdlContent.substring(yamlSeparatorIndex); - - // Use exact same approach as example for validation - const sdl = SDL.fromString(cleanSDL, "beta3"); - await sdl.manifestVersion(); // Verify we can get the version - elizaLogger.debug("SDL format validation successful", { - groups: sdl.groups(), - groupCount: sdl.groups().length - }); - } catch (sdlError) { - elizaLogger.error("SDL format validation failed", { error: sdlError }); - throw new AkashError( - `Invalid SDL format: ${sdlError instanceof Error ? sdlError.message : String(sdlError)}`, - AkashErrorCode.VALIDATION_SDL_FAILED, - { sdl: sdlContent } - ); - } - - elizaLogger.debug("Validation completed successfully"); - return true; - } catch (error) { - elizaLogger.error("Deployment validation failed", { - error: error instanceof AkashError ? { - category: error.category, - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - _options: { [key: string]: unknown; } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("=== Starting Deployment Creation ===", { - actionId, - messageId: message.id, - userId: message.userId - }); - - // Inspect runtime to verify plugin and action registration - inspectRuntime(runtime); - - try { - elizaLogger.debug("=== Validating Akash Configuration ==="); - const config = await validateAkashConfig(runtime); - elizaLogger.debug("Configuration validated successfully", { - rpcEndpoint: config.RPC_ENDPOINT, - chainId: config.AKASH_CHAIN_ID, - version: config.AKASH_VERSION, - hasMnemonic: !!config.AKASH_MNEMONIC - }); - - const params = message.content as CreateDeploymentContent; - elizaLogger.debug("=== Processing Deployment Parameters ===", { - hasSDL: !!params.sdl, - hasSDLFile: !!params.sdlFile, - hasDeposit: !!params.deposit - }); - - // Get SDL content either from direct string, specified file, or default file - let sdlContent: string; - let sdlSource: string; - if (params.sdl) { - sdlContent = params.sdl; - sdlSource = 'direct'; - } else if (params.sdlFile) { - sdlContent = loadSDLFromFile(params.sdlFile); - sdlSource = 'file'; - } else { - sdlContent = loadSDLFromFile(DEFAULT_SDL_PATH); - sdlSource = 'default'; - } - elizaLogger.debug("SDL content loaded", { - source: sdlSource, - contentLength: sdlContent.length - }); - - if (params.deposit && !validateDeposit(params.deposit)) { - elizaLogger.error("Invalid deposit format", { - deposit: params.deposit - }); - throw new AkashError( - "Invalid deposit format", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "deposit", value: params.deposit } - ); - } - - // Initialize wallet from mnemonic - elizaLogger.info("=== Initializing Wallet and Client ==="); - const wallet = await initializeWallet(config.AKASH_MNEMONIC); - const accounts = await wallet.getAccounts(); - const address = accounts[0].address; - elizaLogger.debug("Wallet initialized", { - address, - accountCount: accounts.length - }); - - // Setup client - elizaLogger.debug("Setting up Stargate client"); - const client = await setupClient(wallet, config.RPC_ENDPOINT); - elizaLogger.debug("Client setup completed", { - rpcEndpoint: config.RPC_ENDPOINT - }); - - // Load or create certificate - elizaLogger.info("=== Setting up Certificate ==="); - const certificate = await loadOrCreateCertificate(wallet, client); - elizaLogger.debug("Certificate setup completed", { - hasCert: !!certificate.cert, - hasPrivateKey: !!certificate.privateKey, - hasPublicKey: !!certificate.publicKey - }); - - // Parse SDL - elizaLogger.info("=== Parsing SDL Configuration ==="); - let sdl: SDL; - try { - sdl = await parseSDL(sdlContent); - elizaLogger.debug("SDL parsed successfully", { - groupCount: sdl.groups().length, - groups: sdl.groups(), - version: await sdl.manifestVersion() - }); - } catch (sdlError) { - elizaLogger.error("SDL parsing failed", { - error: sdlError instanceof Error ? sdlError.message : String(sdlError), - sdlContent - }); - throw new AkashError( - `SDL parsing failed: ${sdlError instanceof Error ? sdlError.message : String(sdlError)}`, - AkashErrorCode.MANIFEST_PARSING_FAILED, - { - sdl: sdlContent, - actionId - } - ); - } - - elizaLogger.info("=== Creating Deployment Message ==="); - const blockHeight = await client.getHeight(); - elizaLogger.debug("Current block height", { blockHeight }); - - const deployment = { - id: { - owner: address, - dseq: blockHeight - }, - groups: sdl.groups(), - deposit: { - denom: "uakt", - amount: params.deposit?.replace("uakt", "") || config.AKASH_DEPOSIT.replace("uakt", "") - }, - version: await sdl.manifestVersion(), - depositor: address - }; - - elizaLogger.debug("Deployment object created", { - owner: deployment.id.owner, - dseq: deployment.id.dseq, - groupCount: deployment.groups.length, - groups: deployment.groups, - deposit: deployment.deposit, - version: deployment.version - }); - - const msg = { - typeUrl: "/akash.deployment.v1beta3.MsgCreateDeployment", - value: MsgCreateDeployment.fromPartial(deployment) - }; - - // Broadcast transaction with retry for network issues - elizaLogger.info("=== Broadcasting Deployment Transaction ===", { - owner: address, - dseq: blockHeight, - deposit: params.deposit || config.AKASH_DEPOSIT, - groups: deployment.groups - }); - - const result = await withRetry(async () => { - elizaLogger.debug("Attempting to sign and broadcast transaction", { - attempt: 'current', - fees: config.AKASH_DEPOSIT, - gas: "800000", - groups: deployment.groups - }); - - const txResult = await client.signAndBroadcast( - address, - [msg], - { - amount: [{ denom: "uakt", amount: config.AKASH_DEPOSIT.replace("uakt", "") }], - gas: "800000", - } - ); - - elizaLogger.debug("Transaction broadcast result", { - code: txResult.code, - height: txResult.height, - transactionHash: txResult.transactionHash, - gasUsed: txResult.gasUsed, - gasWanted: txResult.gasWanted, - rawLog: txResult.rawLog - }); - - if (txResult.code !== 0) { - elizaLogger.error("Transaction failed", { - code: txResult.code, - rawLog: txResult.rawLog, - groups: deployment.groups - }); - throw new AkashError( - `Transaction failed: ${txResult.rawLog}`, - AkashErrorCode.DEPLOYMENT_CREATION_FAILED, - { - rawLog: txResult.rawLog, - dseq: blockHeight, - owner: address, - actionId, - groups: deployment.groups - } - ); - } - - return txResult; - }); - - elizaLogger.info("=== Deployment Created Successfully ===", { - txHash: result.transactionHash, - owner: address, - dseq: blockHeight, - actionId, - height: result.height, - gasUsed: result.gasUsed - }); - - // Create lease - elizaLogger.debug("=== Creating Lease ==="); - const lease = await createLease(deployment, wallet, client, config.RPC_ENDPOINT); - elizaLogger.debug("Lease created", { - leaseId: lease.id, - dseq: deployment.id.dseq - }); - - // Send manifest - elizaLogger.debug("=== Sending Manifest ==="); - const serviceUrl = await sendManifest(sdl, lease, certificate, config.RPC_ENDPOINT); - elizaLogger.debug("Manifest sent successfully", { - serviceUrl - }); - - if (callback) { - elizaLogger.info("=== Preparing callback response for deployment creation ===", { - hasCallback: true, - actionId, - dseq: String(blockHeight) - }); - - const callbackResponse = { - text: `Deployment created and started successfully\nDSEQ: ${blockHeight}\nOwner: ${address}\nTx Hash: ${result.transactionHash}\nService URL: ${serviceUrl}`, - content: { - success: true, - data: { - txHash: result.transactionHash, - owner: address, - dseq: String(blockHeight), - serviceUrl - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'createDeployment', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing callback with response ===", { - actionId, - responseText: callbackResponse.text, - hasContent: !!callbackResponse.content, - contentKeys: Object.keys(callbackResponse.content), - metadata: callbackResponse.content.metadata - }); - - callback(callbackResponse); - - elizaLogger.info("=== Callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - elizaLogger.info("=== Deployment Process Completed Successfully ===", { - actionId, - txHash: result.transactionHash, - dseq: blockHeight - }); - - return true; - } catch (error) { - elizaLogger.error("=== Deployment Creation Failed ===", { - error: error instanceof AkashError ? { - category: error.category, - code: error.code, - message: error.message, - details: error.details - } : String(error), - actionId, - stack: error instanceof Error ? error.stack : undefined - }); - - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: error instanceof AkashError ? 'AkashError' : 'Error' - }); - - const errorResponse = { - text: "Failed to create deployment", - content: { - success: false, - error: { - code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CREATION_FAILED, - message: error instanceof Error ? error.message : String(error) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'createDeployment', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing error callback ===", { - actionId, - responseText: errorResponse.text, - hasContent: !!errorResponse.content, - contentKeys: Object.keys(errorResponse.content) - }); - - callback(errorResponse); - - elizaLogger.info("=== Error callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return false; - } - }, -}; - -export default createDeploymentAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/estimateGas.ts b/packages/plugin-akash/src/actions/estimateGas.ts deleted file mode 100644 index 192684151b55d..0000000000000 --- a/packages/plugin-akash/src/actions/estimateGas.ts +++ /dev/null @@ -1,356 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { DirectSecp256k1HdWallet, Registry, type EncodeObject } from "@cosmjs/proto-signing"; -import { SigningStargateClient } from "@cosmjs/stargate"; -import { MsgCloseDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; -import { getAkashTypeRegistry, getTypeUrl } from "@akashnetwork/akashjs/build/stargate"; -import { validateAkashConfig } from "../environment"; -import { AkashError, AkashErrorCode } from "../error/error"; -import { encodingForModel } from "js-tiktoken"; - -/* -interface AkashMessage { - typeUrl: string; - value: { - id?: { - owner: string; - dseq: string; - }; - [key: string]: unknown; - }; -} -*/ - -interface EstimateGasContent extends Content { - text: string; - dseq?: string; - operation: "close" | "create" | "update"; - message?: EncodeObject; -} - -function getTotalTokensFromString(str: string): number { - try { - const encoding = encodingForModel("gpt-3.5-turbo"); - return encoding.encode(str).length; - } catch (error) { - elizaLogger.warn("Failed to count tokens", { error }); - return 0; - } -} - -export const estimateGas: Action = { - name: "ESTIMATE_GAS", - similes: ["CALCULATE_GAS", "GET_GAS_ESTIMATE", "CHECK_GAS"], - description: "Estimate gas for a transaction on Akash Network", - examples: [[ - { - user: "user", - content: { - text: "Can you estimate gas for closing deployment with DSEQ 123456?", - operation: "close" - } as EstimateGasContent - } as ActionExample - ]], - - validate: async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("Validating gas estimation request", { message }); - try { - const params = message.content as Partial; - const config = await validateAkashConfig(runtime); - - // Extract DSEQ from text if present - if (params.text && !params.dseq) { - const dseqMatch = params.text.match(/dseq\s*(?::|=|\s)\s*(\d+)/i) || - params.text.match(/deployment\s+(?:number|sequence|#)?\s*(\d+)/i) || - params.text.match(/(\d{6,})/); // Matches standalone numbers of 6+ digits - if (dseqMatch) { - params.dseq = dseqMatch[1]; - elizaLogger.debug("Extracted DSEQ from text", { - text: params.text, - extractedDseq: params.dseq - }); - } - } - - // If no operation provided, check environment configuration - if (!params.operation) { - if (config.AKASH_GAS_OPERATION) { - params.operation = config.AKASH_GAS_OPERATION as "close" | "create" | "update"; - elizaLogger.info("Using operation from environment", { operation: params.operation }); - } else { - throw new AkashError( - "Operation type is required (close, create, or update)", - AkashErrorCode.VALIDATION_PARAMETER_MISSING, - { parameter: "operation" } - ); - } - } - - // For close operations, check DSEQ from various sources - if (params.operation === "close") { - if (!params.dseq) { - if (config.AKASH_GAS_DSEQ) { - params.dseq = config.AKASH_GAS_DSEQ; - elizaLogger.info("Using DSEQ from environment", { dseq: params.dseq }); - } else { - throw new AkashError( - "Deployment sequence (dseq) is required for close operation", - AkashErrorCode.VALIDATION_PARAMETER_MISSING, - { parameter: "dseq" } - ); - } - } - } - - // For create/update operations, check message - if ((params.operation === "create" || params.operation === "update") && !params.message) { - throw new AkashError( - "Message is required for create/update operations", - AkashErrorCode.VALIDATION_PARAMETER_MISSING, - { parameter: "message" } - ); - } - - return true; - } catch (error) { - elizaLogger.error("Gas estimation validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("Starting gas estimation", { actionId }); - - elizaLogger.debug("=== Handler Parameters ===", { - hasRuntime: !!runtime, - hasMessage: !!message, - hasState: !!state, - hasOptions: !!options, - hasCallback: !!callback, - actionId - }); - - try { - const config = await validateAkashConfig(runtime); - const params = message.content as Partial; - - // Initialize wallet and get address - const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { prefix: "akash" }); - const [account] = await wallet.getAccounts(); - - // Initialize client with Akash registry - const myRegistry = new Registry(getAkashTypeRegistry()); - const client = await SigningStargateClient.connectWithSigner( - config.RPC_ENDPOINT, - wallet, - { registry: myRegistry } - ); - - let msg: EncodeObject; - switch (params.operation) { - case "close": - msg = { - typeUrl: getTypeUrl(MsgCloseDeployment), - value: MsgCloseDeployment.fromPartial({ - id: { - owner: account.address, - dseq: params.dseq - } - }) - }; - break; - case "create": - case "update": - if (!params.message) { - if (callback) { - callback({ - text: `Message is required for ${params.operation} operations.`, - content: { - success: false, - error: { - code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, - message: "Missing message", - help: `Please provide a message object for the ${params.operation} operation.` - } - } - }); - } - return false; - } - msg = params.message; - break; - default: - if (callback) { - callback({ - text: `Invalid operation type: ${params.operation}. Must be one of: close, create, or update.`, - content: { - success: false, - error: { - code: AkashErrorCode.VALIDATION_PARAMETER_INVALID, - message: "Invalid operation", - help: "Specify a valid operation type: 'close', 'create', or 'update'." - } - } - }); - } - return false; - } - - // Estimate gas - elizaLogger.info("Estimating gas for operation", { - operation: params.operation, - dseq: params.dseq, - owner: account.address - }); - - const gasEstimate = await client.simulate( - account.address, - [msg], - `Estimate gas for ${params.operation} operation` - ); - - elizaLogger.info("Gas estimation completed", { - gasEstimate, - operation: params.operation, - dseq: params.dseq, - owner: account.address, - actionId - }); - - if (callback) { - elizaLogger.info("=== Preparing callback response for gas estimation ===", { - hasCallback: true, - actionId, - operation: params.operation, - dseq: params.dseq - }); - - const operationText = params.operation === "close" ? `closing deployment ${params.dseq}` : params.operation; - const estimateData = { - gasEstimate, - operation: params.operation, - dseq: params.dseq, - owner: account.address, - message: msg - }; - - let responseText = `I've estimated the gas for ${operationText}:\n`; - responseText += `• Gas Required: ${gasEstimate} units\n`; - responseText += `• Operation: ${params.operation}\n`; - if (params.dseq) { - responseText += `• DSEQ: ${params.dseq}\n`; - } - responseText += `• Owner: ${account.address}`; - - const response = { - text: responseText, - content: { - success: true, - data: estimateData, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'estimateGas', - version: '1.0.0', - actionId, - tokenCount: getTotalTokensFromString(responseText) - } - } - }; - - elizaLogger.info("=== Executing callback with response ===", { - actionId, - responseText: response.text, - hasContent: !!response.content, - contentKeys: Object.keys(response.content), - metadata: response.content.metadata - }); - - callback(response); - - elizaLogger.info("=== Callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } else { - elizaLogger.warn("=== No callback provided for gas estimation ===", { - actionId, - operation: params.operation, - dseq: params.dseq - }); - } - - return true; - } catch (error) { - elizaLogger.error("Gas estimation failed", { - error: error instanceof Error ? error.message : String(error), - actionId - }); - - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: error instanceof AkashError ? 'AkashError' : 'Error' - }); - - const errorResponse = { - code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, - message: error instanceof Error ? error.message : String(error), - details: error instanceof AkashError ? error.details : undefined - }; - - const response = { - text: `Failed to estimate gas: ${errorResponse.message}`, - content: { - success: false, - error: errorResponse, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'estimateGas', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing error callback ===", { - actionId, - errorResponse, - hasContent: !!response.content, - contentKeys: Object.keys(response.content) - }); - - callback(response); - - elizaLogger.info("=== Error callback executed ===", { - actionId, - timestamp: new Date().toISOString() - }); - } else { - elizaLogger.warn("=== No callback provided for error handling ===", { - actionId, - errorMessage: error instanceof Error ? error.message : String(error) - }); - } - - return false; - } - } -}; - -export default estimateGas; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getDeploymentApi.ts b/packages/plugin-akash/src/actions/getDeploymentApi.ts deleted file mode 100644 index fc46ac9ed30cc..0000000000000 --- a/packages/plugin-akash/src/actions/getDeploymentApi.ts +++ /dev/null @@ -1,499 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; -import { validateAkashConfig } from "../environment"; -import { AkashError, AkashErrorCode } from "../error/error"; -import * as fs from 'node:fs'; -import * as path from 'node:path'; -import { getDeploymentsPath } from "../utils/paths"; - -export interface DeploymentInfo { - owner: string; - dseq: string; - status: string; - createdHeight: number; - cpuUnits: number; - gpuUnits: number; - memoryQuantity: number; - storageQuantity: number; -} - -export interface DeploymentListResponse { - count: number; - results: DeploymentInfo[]; -} - -interface GetDeploymentsContent extends Content { - status?: 'active' | 'closed'; - skip?: number; - limit?: number; -} - -async function sleep(ms: number) { - return new Promise(resolve => setTimeout(resolve, ms)); -} - -async function fetchWithRetry(url: string, options: RequestInit, retries = 3, delay = 1000): Promise { - for (let i = 0; i < retries; i++) { - try { - const response = await fetch(url, options); - if (response.ok) { - return response; - } - - const error = await response.text(); - elizaLogger.warn(`API request failed (attempt ${i + 1}/${retries})`, { - status: response.status, - error - }); - - if (i < retries - 1) { - await sleep(delay * (2 ** i)); // Exponential backoff - continue; - } - - throw new AkashError( - `API request failed after ${retries} attempts: ${response.status} - ${error}`, - AkashErrorCode.API_ERROR - ); - } catch (error) { - if (i === retries - 1) { - throw error; - } - elizaLogger.warn(`API request error (attempt ${i + 1}/${retries})`, { - error: error instanceof Error ? error.message : String(error) - }); - await sleep(delay * (2 ** i)); - } - } - throw new AkashError( - `Failed to fetch after ${retries} retries`, - AkashErrorCode.API_ERROR - ); -} - -export async function initializeWallet(runtime: IAgentRuntime): Promise<{wallet: DirectSecp256k1HdWallet | null, address: string}> { - try { - // Validate configuration and get mnemonic - const config = await validateAkashConfig(runtime); - - elizaLogger.info("Initializing wallet with config", { - hasMnemonic: !!config.AKASH_MNEMONIC, - hasWalletAddress: !!config.AKASH_WALLET_ADDRESS - }); - - // First try to get the wallet address directly - if (config.AKASH_WALLET_ADDRESS) { - elizaLogger.info("Using provided wallet address", { - address: config.AKASH_WALLET_ADDRESS - }); - return { - wallet: null, - address: config.AKASH_WALLET_ADDRESS - }; - } - - // If no wallet address, create wallet from mnemonic - if (!config.AKASH_MNEMONIC) { - throw new AkashError( - "Neither AKASH_WALLET_ADDRESS nor AKASH_MNEMONIC provided", - AkashErrorCode.WALLET_NOT_INITIALIZED - ); - } - - try { - elizaLogger.info("Creating wallet from mnemonic"); - const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { - prefix: "akash" - }); - - // Get account address - const accounts = await wallet.getAccounts(); - const address = accounts[0].address; - - elizaLogger.info("Wallet initialized from mnemonic", { - address, - accountCount: accounts.length - }); - - return { wallet, address }; - } catch (error) { - throw new AkashError( - `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, - AkashErrorCode.WALLET_NOT_INITIALIZED, - { originalError: error instanceof Error ? error.message : String(error) } - ); - } - } catch (error) { - // Ensure all errors are properly wrapped as AkashError - if (error instanceof AkashError) { - throw error; - } - throw new AkashError( - `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, - AkashErrorCode.WALLET_NOT_INITIALIZED, - { originalError: error instanceof Error ? error.message : String(error) } - ); - } -} - -export async function fetchDeployments( - runtime: IAgentRuntime, - status?: 'active' | 'closed', - skip = 0, - limit = 10 -): Promise { - elizaLogger.info("Initializing deployment fetch", { - status: status || 'all', - skip, - limit - }); - - try { - // Initialize wallet and get address - const { address } = await initializeWallet(runtime); - - if (!address) { - throw new AkashError( - "Failed to get wallet address", - AkashErrorCode.WALLET_NOT_INITIALIZED - ); - } - - elizaLogger.info("Fetching deployments from API", { - address, - status: status || 'all', - skip, - limit - }); - - // Map status for API compatibility - const apiStatus = status; - - // Don't include status in URL if not specified - const params = new URLSearchParams(); - if (apiStatus) { - params.append('status', apiStatus); - } - params.append('reverseSorting', 'true'); - const url = `https://console-api.akash.network/v1/addresses/${address}/deployments/${skip}/${limit}?${params.toString()}`; - elizaLogger.debug("Making API request", { url }); - - const response = await fetchWithRetry(url, { - headers: { - 'accept': 'application/json' - } - }); - - const data = await response.json() as DeploymentListResponse; - elizaLogger.info("Deployments fetched successfully", { - count: data.count, - resultCount: data.results.length, - status: status || 'all' - }); - - // Keep status as-is from API - data.results = data.results.map(deployment => ({ - ...deployment, - status: deployment.status.toLowerCase() - })); - - // Save deployments to files, organized by their actual status - const deploymentDir = getDeploymentsPath(import.meta.url); - elizaLogger.info("Using deployments directory", { deploymentDir }); - - // Create base deployments directory if it doesn't exist - if (!fs.existsSync(deploymentDir)) { - elizaLogger.info("Creating deployments directory", { deploymentDir }); - fs.mkdirSync(deploymentDir, { recursive: true }); - } - - // Group deployments by status - const deploymentsByStatus = data.results.reduce((acc, deployment) => { - const status = deployment.status.toLowerCase(); - if (!acc[status]) { - acc[status] = []; - } - acc[status].push(deployment); - return acc; - }, {} as Record); - - // Save deployments by status - for (const [status, deployments] of Object.entries(deploymentsByStatus)) { - const statusDir = path.join(deploymentDir, status); - elizaLogger.info("Processing status directory", { statusDir, status, deploymentCount: deployments.length }); - - // Ensure status directory exists - if (!fs.existsSync(statusDir)) { - elizaLogger.info("Creating status directory", { statusDir }); - fs.mkdirSync(statusDir, { recursive: true }); - } - - // Save all deployments for this status in parallel - await Promise.all(deployments.map(async (deployment) => { - const filePath = path.join(statusDir, `${deployment.dseq}.json`); - elizaLogger.debug("Saving deployment file", { filePath, dseq: deployment.dseq }); - await saveDeploymentInfo(deployment, filePath); - })); - } - - return data; - } catch (error) { - elizaLogger.error("Failed to fetch deployments", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined - }); - throw error; - } -} - -export async function saveDeploymentInfo(deploymentInfo: DeploymentInfo, filePath: string): Promise { - elizaLogger.info("Saving deployment info", { - dseq: deploymentInfo.dseq, - owner: deploymentInfo.owner, - filePath - }); - - try { - // Ensure directory exists - const dir = path.dirname(filePath); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } - - // Save deployment info - fs.writeFileSync(filePath, JSON.stringify(deploymentInfo, null, 2), 'utf8'); - elizaLogger.debug("Deployment info saved successfully"); - } catch (error) { - elizaLogger.error("Failed to save deployment info", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - filePath - }); - throw error; - } -} - -export async function loadDeploymentInfo(filePath: string): Promise { - elizaLogger.info("Loading deployment info", { filePath }); - - try { - if (!fs.existsSync(filePath)) { - throw new AkashError( - `Deployment info file not found: ${filePath}`, - AkashErrorCode.FILE_NOT_FOUND - ); - } - - const data = fs.readFileSync(filePath, 'utf8'); - const deploymentInfo = JSON.parse(data) as DeploymentInfo; - elizaLogger.debug("Deployment info loaded successfully", { - dseq: deploymentInfo.dseq, - owner: deploymentInfo.owner - }); - - return deploymentInfo; - } catch (error) { - elizaLogger.error("Failed to load deployment info", { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - filePath - }); - throw error; - } -} - -export const getDeploymentApiAction: Action = { - name: "GET_DEPLOYMENTS", - similes: ["LIST_DEPLOYMENTS", "FETCH_DEPLOYMENTS", "SHOW_DEPLOYMENTS"], - description: "Fetch deployments from Akash Network", - examples: [[ - { - user: "user", - content: { - text: "Get all deployments", - } as GetDeploymentsContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Fetching all deployments..." - } as GetDeploymentsContent - } as ActionExample - ], [ - { - user: "user", - content: { - text: "Get active deployments", - status: "active" - } as GetDeploymentsContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Fetching active deployments..." - } as GetDeploymentsContent - } as ActionExample - ]], - - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("Validating get deployments request", { message }); - try { - const params = message.content as Partial; - - if (params.status && !['active', 'closed'].includes(params.status)) { - throw new AkashError( - "Status must be either 'active' or 'closed'", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "status", value: params.status } - ); - } - - if (params.skip !== undefined && (typeof params.skip !== 'number' || params.skip < 0)) { - throw new AkashError( - "Skip must be a non-negative number", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "skip", value: params.skip } - ); - } - - if (params.limit !== undefined && (typeof params.limit !== 'number' || params.limit <= 0)) { - throw new AkashError( - "Limit must be a positive number", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "limit", value: params.limit } - ); - } - - return true; - } catch (error) { - elizaLogger.error("Get deployments validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State | undefined, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("Starting deployment API request", { actionId }); - - try { - // const config = await validateAkashConfig(runtime); - const params = message.content as Partial; - - // Fetch deployments - const deployments = await fetchDeployments( - runtime, - params.status, - params.skip, - params.limit - ); - - if (callback) { - elizaLogger.info("=== Preparing callback response for deployments ===", { - hasCallback: true, - actionId, - deploymentCount: deployments.count - }); - - const callbackResponse = { - text: `Found ${deployments.count} deployment${deployments.count !== 1 ? 's' : ''}${params.status ? ` with status: ${params.status}` : ''}\n\nDeployments:\n${deployments.results.map(dep => - `- DSEQ: ${dep.dseq}\n Status: ${dep.status}\n CPU: ${dep.cpuUnits} units\n Memory: ${dep.memoryQuantity} units\n Storage: ${dep.storageQuantity} units` - ).join('\n\n')}`, - content: { - success: true, - data: { - deployments: deployments.results, - total: deployments.count, - status: params.status || 'all' - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentApi', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing callback with response ===", { - actionId, - responseText: callbackResponse.text, - hasContent: !!callbackResponse.content, - contentKeys: Object.keys(callbackResponse.content), - metadata: callbackResponse.content.metadata - }); - - callback(callbackResponse); - - elizaLogger.info("=== Callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return true; - } catch (error) { - elizaLogger.error("Get deployments request failed", { - error: error instanceof Error ? error.message : String(error), - actionId - }); - - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: error instanceof AkashError ? 'AkashError' : 'Error' - }); - - const errorResponse = { - text: `Failed to get deployments: ${error instanceof Error ? error.message : String(error)}`, - content: { - success: false, - error: { - code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, - message: error instanceof Error ? error.message : String(error) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentApi', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing error callback ===", { - actionId, - responseText: errorResponse.text, - hasContent: !!errorResponse.content, - contentKeys: Object.keys(errorResponse.content) - }); - - callback(errorResponse); - - elizaLogger.info("=== Error callback executed ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return false; - } - } -}; - -export default getDeploymentApiAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getDeploymentStatus.ts b/packages/plugin-akash/src/actions/getDeploymentStatus.ts deleted file mode 100644 index 2dfd2abae5afd..0000000000000 --- a/packages/plugin-akash/src/actions/getDeploymentStatus.ts +++ /dev/null @@ -1,493 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; -import { QueryDeploymentRequest, QueryClientImpl as DeploymentQueryClient } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; -import { getRpc } from "@akashnetwork/akashjs/build/rpc"; -import { validateAkashConfig } from "../environment"; -import { AkashError, AkashErrorCode } from "../error/error"; - -interface GetDeploymentStatusContent extends Content { - text: string; - dseq?: string; -} - -interface DeploymentGroup { - groupId?: { - owner: string; - dseq: string; - gseq: number; - }; - state: string; - resources: Array<{ - resources: { - cpu: { - units: { - val: string; - }; - }; - memory: { - quantity: { - val: string; - }; - }; - storage: Array<{ - quantity: { - val: string; - }; - }>; - }; - count: number; - price: { - denom: string; - amount: string; - }; - }>; -} - -interface DeploymentResponse { - deploymentId?: { - owner: string; - dseq: string; - }; - state: string; - version: string; - createdAt: string; - escrowAccount?: { - balance?: { - denom: string; - amount: string; - }; - }; - groups?: DeploymentGroup[]; -} - -enum DeploymentState { - UNKNOWN = 0, - ACTIVE = 1, - CLOSED = 2, - INSUFFICIENT_FUNDS = 3, -} - -export const getDeploymentStatusAction: Action = { - name: "GET_DEPLOYMENT_STATUS", - similes: ["CHECK_DEPLOYMENT", "DEPLOYMENT_STATUS", "DEPLOYMENT_STATE", "CHECK DSEQ"], - description: "Get the current status of a deployment on Akash Network", - examples: [[ - { - user: "user", - content: { - text: "Can you check the deployment status of the DSEQ 123456?", - } as GetDeploymentStatusContent - } as ActionExample - ]], - - validate: async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("Validating get deployment status request", { message }); - try { - const params = message.content as Partial; - const config = await validateAkashConfig(runtime); - - // Extract DSEQ from text if present - if (params.text && !params.dseq) { - // Pattern to match DSEQ followed by numbers - const dseqMatch = params.text.match(/(?:DSEQ|dseq)\s*(\d+)/i); - if (dseqMatch) { - params.dseq = dseqMatch[1]; - elizaLogger.debug("Extracted DSEQ from text", { - text: params.text, - extractedDseq: params.dseq - }); - } - } - - // If no dseq provided, check environment configuration - if (!params.dseq) { - if (config.AKASH_DEP_STATUS === "dseq" && config.AKASH_DEP_DSEQ) { - params.dseq = config.AKASH_DEP_DSEQ; - } else if (config.AKASH_DEP_STATUS === "param_passed") { - elizaLogger.info("DSEQ parameter is required when AKASH_DEP_STATUS is set to param_passed", { - current_status: config.AKASH_DEP_STATUS - }); - return true; // Allow validation to pass, we'll handle the missing parameter in the handler - } else { - elizaLogger.info("No DSEQ provided and no valid environment configuration found", { - dep_status: config.AKASH_DEP_STATUS, - dep_dseq: config.AKASH_DEP_DSEQ - }); - return true; // Allow validation to pass, we'll handle the missing configuration in the handler - } - } - - // If dseq is provided, validate its format - if (params.dseq && !/^\d+$/.test(params.dseq)) { - throw new AkashError( - "Invalid DSEQ format. Must be a numeric string", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "dseq", value: params.dseq } - ); - } - - return true; - } catch (error) { - elizaLogger.error("Get deployment status validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State | undefined, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("Starting deployment status request", { actionId }); - - try { - const config = await validateAkashConfig(runtime); - const params = message.content as Partial; - let dseqSource = "parameter"; // Track where the DSEQ came from - - // Handle missing dseq parameter based on environment configuration - if (!params.dseq) { - if (config.AKASH_DEP_STATUS === "dseq") { - if (config.AKASH_DEP_DSEQ) { - params.dseq = config.AKASH_DEP_DSEQ; - dseqSource = "environment"; - } else { - if (callback) { - callback({ - text: "AKASH_DEP_DSEQ is not set in your environment. Please set a valid deployment sequence number.", - content: { - success: false, - error: { - code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, - message: "Missing AKASH_DEP_DSEQ", - help: "When AKASH_DEP_STATUS is set to 'dseq', you must also set AKASH_DEP_DSEQ in your .env file." - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }); - } - return false; - } - } else if (config.AKASH_DEP_STATUS === "param_passed") { - if (callback) { - callback({ - text: "DSEQ parameter is required. Please provide a deployment sequence number.", - content: { - success: false, - error: { - code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, - message: "Missing required parameter: dseq", - help: "You need to provide a deployment sequence number (dseq) to check its status." - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }); - } - return false; - } else { - if (callback) { - callback({ - text: "No deployment configuration found. Please set AKASH_DEP_STATUS and AKASH_DEP_DSEQ in your environment or provide a dseq parameter.", - content: { - success: false, - error: { - code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, - message: "Missing configuration", - help: "Set AKASH_DEP_STATUS='dseq' and AKASH_DEP_DSEQ in your .env file, or set AKASH_DEP_STATUS='param_passed' and provide dseq parameter in your request." - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }); - } - return false; - } - } - - // Initialize wallet from mnemonic - const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { prefix: "akash" }); - const [account] = await wallet.getAccounts(); - - // Initialize query client - const queryClient = new DeploymentQueryClient(await getRpc(config.RPC_ENDPOINT)); - - // Query deployment - elizaLogger.info("Querying deployment status", { - dseq: params.dseq, - owner: account.address - }); - - try { - const request = QueryDeploymentRequest.fromPartial({ - id: { - owner: account.address, - dseq: params.dseq - } - }); - - const response = await queryClient.Deployment(request); - - if (!response.deployment) { - // Different messages based on DSEQ source - if (dseqSource === "environment") { - if (callback) { - callback({ - text: "The deployment sequence number in your environment configuration was not found. Please check AKASH_DEP_DSEQ value.", - content: { - success: false, - error: { - code: AkashErrorCode.DEPLOYMENT_NOT_FOUND, - message: "Invalid AKASH_DEP_DSEQ", - help: "Update AKASH_DEP_DSEQ in your .env file with a valid deployment sequence number, or switch to AKASH_DEP_STATUS='param_passed' to provide DSEQ as a parameter.", - current_dseq: params.dseq - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }); - } - } else { - throw new AkashError( - "Deployment not found", - AkashErrorCode.DEPLOYMENT_NOT_FOUND, - { - dseq: params.dseq, - owner: account.address, - actionId - } - ); - } - return false; - } - - // Format deployment status - const deployment = response.deployment as unknown as DeploymentResponse; - const status = { - owner: deployment.deploymentId?.owner, - dseq: deployment.deploymentId?.dseq, - state: deployment.state, - version: deployment.version, - createdAt: deployment.createdAt, - balance: deployment.escrowAccount?.balance, - groups: deployment.groups?.map((group: DeploymentGroup) => ({ - groupId: group.groupId, - state: group.state, - resources: group.resources - })) - }; - - elizaLogger.info("Deployment status retrieved successfully", { - dseq: params.dseq, - state: status.state, - owner: status.owner, - actionId - }); - - if (callback) { - // Convert numeric state to readable string - const stateString = DeploymentState[status.state as keyof typeof DeploymentState] || 'UNKNOWN'; - - const formattedBalance = deployment.escrowAccount?.balance - ? `${deployment.escrowAccount.balance.amount}${deployment.escrowAccount.balance.denom}` - : 'No balance information'; - - elizaLogger.info("=== Preparing callback response for deployment status ===", { - hasCallback: true, - actionId, - dseq: params.dseq - }); - - const callbackResponse = { - text: `Deployment ${params.dseq} Status:\nState: ${stateString}\nBalance: ${formattedBalance}\nCreated At: ${status.createdAt}`, - content: { - success: true, - data: { - deployment: status, - queryResponse: response.deployment - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing callback with response ===", { - actionId, - responseText: callbackResponse.text, - hasContent: !!callbackResponse.content, - contentKeys: Object.keys(callbackResponse.content), - metadata: callbackResponse.content.metadata - }); - - callback(callbackResponse); - - elizaLogger.info("=== Callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return true; - } catch (queryError) { - // Handle query errors differently based on DSEQ source - if (dseqSource === "environment") { - elizaLogger.warn("Failed to query deployment from environment configuration", { - dseq: params.dseq, - error: queryError instanceof Error ? queryError.message : String(queryError) - }); - if (callback) { - callback({ - text: "Could not find deployment with the configured DSEQ. Please check your environment settings.", - content: { - success: false, - error: { - code: AkashErrorCode.API_ERROR, - message: "Invalid AKASH_DEP_DSEQ configuration", - help: "Verify that AKASH_DEP_DSEQ contains a valid deployment sequence number, or switch to AKASH_DEP_STATUS='param_passed' to provide DSEQ as a parameter.", - current_dseq: params.dseq - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }); - } - } else { - elizaLogger.error("Failed to query deployment", { - error: queryError instanceof Error ? queryError.message : String(queryError), - actionId - }); - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: queryError instanceof AkashError ? 'AkashError' : 'Error' - }); - - const errorResponse = { - text: `Failed to get deployment status: ${queryError instanceof Error ? queryError.message : String(queryError)}`, - content: { - success: false, - error: { - code: queryError instanceof AkashError ? queryError.code : AkashErrorCode.API_ERROR, - message: queryError instanceof Error ? queryError.message : String(queryError) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing error callback ===", { - actionId, - responseText: errorResponse.text, - hasContent: !!errorResponse.content, - contentKeys: Object.keys(errorResponse.content) - }); - - callback(errorResponse); - - elizaLogger.info("=== Error callback executed ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - } - return false; - } - } catch (error) { - elizaLogger.error("Get deployment status request failed", { - error: error instanceof Error ? error.message : String(error), - actionId - }); - - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: error instanceof AkashError ? 'AkashError' : 'Error' - }); - - const errorResponse = { - text: `Failed to get deployment status: ${error instanceof Error ? error.message : String(error)}`, - content: { - success: false, - error: { - code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, - message: error instanceof Error ? error.message : String(error) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getDeploymentStatus', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing error callback ===", { - actionId, - responseText: errorResponse.text, - hasContent: !!errorResponse.content, - contentKeys: Object.keys(errorResponse.content) - }); - - callback(errorResponse); - - elizaLogger.info("=== Error callback executed ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return false; - } - } -}; - -export default getDeploymentStatusAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getGPUPricing.ts b/packages/plugin-akash/src/actions/getGPUPricing.ts deleted file mode 100644 index 395fa5796e230..0000000000000 --- a/packages/plugin-akash/src/actions/getGPUPricing.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { getConfig } from "../environment"; - -interface GetGPUPricingContent extends Content { - cpu?: number; // CPU units in millicores (e.g., 1000 = 1 CPU) - memory?: number; // Memory in bytes (e.g., 1000000000 = 1GB) - storage?: number; // Storage in bytes (e.g., 1000000000 = 1GB) -} - -interface PricingResponse { - spec: { - cpu: number; - memory: number; - storage: number; - }; - akash: number; - aws: number; - gcp: number; - azure: number; -} - -// Get configuration with defaults -const config = getConfig(process.env.AKASH_ENV); -const PRICING_API_URL = config.AKASH_PRICING_API_URL; -const DEFAULT_CPU = Number.parseInt(config.AKASH_DEFAULT_CPU || "1000"); -const DEFAULT_MEMORY = Number.parseInt(config.AKASH_DEFAULT_MEMORY || "1000000000"); -const DEFAULT_STORAGE = Number.parseInt(config.AKASH_DEFAULT_STORAGE || "1000000000"); - -// Custom error class for GPU pricing errors -class GPUPricingError extends Error { - constructor(message: string, public code: string) { - super(message); - this.name = 'GPUPricingError'; - } -} - -export const getGPUPricingAction: Action = { - name: "GET_GPU_PRICING", - similes: ["GET_PRICING", "COMPARE_PRICES", "CHECK_PRICING"], - description: "Get GPU pricing comparison between Akash and major cloud providers", - examples: [[ - { - user: "user", - content: { - text: "Get GPU pricing for 2 CPUs, 2GB memory, and 10GB storage", - cpu: 2000, - memory: 2000000000, - storage: 10000000000 - } as GetGPUPricingContent - } as ActionExample - ], [ - { - user: "user", - content: { - text: "Compare GPU prices across providers" - } as GetGPUPricingContent - } as ActionExample - ]], - - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("Validating GPU pricing request", { message }); - try { - const params = message.content as Partial; - - // Validate CPU if provided - if (params.cpu !== undefined && (Number.isNaN(params.cpu) || params.cpu <= 0)) { - throw new GPUPricingError("CPU units must be a positive number", "INVALID_CPU"); - } - - // Validate memory if provided - if (params.memory !== undefined && (Number.isNaN(params.memory) || params.memory <= 0)) { - throw new GPUPricingError("Memory must be a positive number", "INVALID_MEMORY"); - } - - // Validate storage if provided - if (params.storage !== undefined && (Number.isNaN(params.storage) || params.storage <= 0)) { - throw new GPUPricingError("Storage must be a positive number", "INVALID_STORAGE"); - } - - return true; - } catch (error) { - elizaLogger.error("GPU pricing validation failed", { - error: error instanceof GPUPricingError ? { - code: error.code, - message: error.message - } : String(error) - }); - return false; - } - }, - - handler: async ( - _runtime: IAgentRuntime, - message: Memory, - _state: State | undefined, - _options: { [key: string]: unknown; } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("Starting GPU pricing request", { actionId }); - - try { - const params = message.content as GetGPUPricingContent; - - // Use provided values or defaults - const requestBody = { - cpu: params.cpu || DEFAULT_CPU, - memory: params.memory || DEFAULT_MEMORY, - storage: params.storage || DEFAULT_STORAGE - }; - - elizaLogger.info("Fetching pricing information", { - specs: requestBody, - apiUrl: PRICING_API_URL - }); - - // Make API request using fetch - const response = await fetch(PRICING_API_URL, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - body: JSON.stringify(requestBody) - }); - - if (!response.ok) { - throw new GPUPricingError( - `API request failed with status ${response.status}: ${response.statusText}`, - "API_ERROR" - ); - } - - const data = await response.json() as PricingResponse; - - // Calculate savings percentages - const savings = { - vs_aws: ((data.aws - data.akash) / data.aws * 100).toFixed(2), - vs_gcp: ((data.gcp - data.akash) / data.gcp * 100).toFixed(2), - vs_azure: ((data.azure - data.akash) / data.azure * 100).toFixed(2) - }; - - elizaLogger.info("Pricing information retrieved successfully", { - specs: data.spec, - pricing: { - akash: data.akash, - aws: data.aws, - gcp: data.gcp, - azure: data.azure - }, - savings - }); - - if (callback) { - const callbackResponse = { - text: `GPU Pricing Comparison\nAkash: $${data.akash}\nAWS: $${data.aws} (${savings.vs_aws}% savings)\nGCP: $${data.gcp} (${savings.vs_gcp}% savings)\nAzure: $${data.azure} (${savings.vs_azure}% savings)`, - content: { - success: true, - data: { - specs: { - cpu: data.spec.cpu, - memory: data.spec.memory, - storage: data.spec.storage - }, - pricing: { - akash: data.akash, - aws: data.aws, - gcp: data.gcp, - azure: data.azure - }, - savings: { - vs_aws: `${savings.vs_aws}%`, - vs_gcp: `${savings.vs_gcp}%`, - vs_azure: `${savings.vs_azure}%` - } - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getGPUPricing', - version: '1.0.0', - actionId - } - } - }; - - callback(callbackResponse); - } - - return true; - } catch (error) { - elizaLogger.error("GPU pricing request failed", { - error: error instanceof Error ? error.message : String(error), - actionId - }); - - if (callback) { - const errorResponse = { - text: "Failed to get GPU pricing information", - content: { - success: false, - error: { - code: error instanceof GPUPricingError ? error.code : 'UNKNOWN_ERROR', - message: error instanceof Error ? error.message : String(error) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getGPUPricing', - version: '1.0.0', - actionId - } - } - }; - - callback(errorResponse); - } - - return false; - } - } -}; - -export default getGPUPricingAction; diff --git a/packages/plugin-akash/src/actions/getManifest.ts b/packages/plugin-akash/src/actions/getManifest.ts deleted file mode 100644 index 5760617d2629a..0000000000000 --- a/packages/plugin-akash/src/actions/getManifest.ts +++ /dev/null @@ -1,361 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { SDL } from "@akashnetwork/akashjs/build/sdl"; -import { validateAkashConfig } from "../environment"; -import { AkashError, AkashErrorCode } from "../error/error"; -import * as fs from 'node:fs'; -import * as path from 'node:path'; -import yaml from 'js-yaml'; -// import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; -import { getDefaultSDLPath } from "../utils/paths"; - -interface GetManifestContent extends Content { - sdl?: string; - sdlFile?: string; -} - -// elizaLogger.info("Default SDL path initialized", { DEFAULT_SDL_PATH }); -// elizaLogger.info("Loading SDL from file", { filePath }); -// elizaLogger.info("Resolved SDL file path", { resolvedPath }); -// elizaLogger.error("SDL file not found", { resolvedPath }); -// elizaLogger.info("SDL file loaded successfully", { content }); -// elizaLogger.error("Failed to read SDL file", { error }); -// elizaLogger.error("SDL validation failed", { error }); -// elizaLogger.info("Using provided SDL content"); -// elizaLogger.info("Loading SDL from file", { path: params.sdlFile }); -// elizaLogger.info("Loading default SDL", { path: DEFAULT_SDL_PATH }); -// elizaLogger.debug("Parsing SDL content and generating manifest"); - -const DEFAULT_SDL_PATH = (() => { - const currentFileUrl = import.meta.url; - const sdlPath = getDefaultSDLPath(currentFileUrl); - - // Only log if file doesn't exist - if (!fs.existsSync(sdlPath)) { - elizaLogger.warn("Default SDL path not found", { - sdlPath, - exists: false - }); - } - - return sdlPath; -})(); - -const loadSDLFromFile = (filePath: string): string => { - try { - // If path doesn't contain plugin-akash and it's not the default path, adjust it - if (!filePath.includes('plugin-akash') && filePath !== DEFAULT_SDL_PATH) { - const adjustedPath = path.join(path.dirname(DEFAULT_SDL_PATH), path.basename(filePath)); - filePath = adjustedPath; - } - - // Try multiple possible locations - const possiblePaths = [ - filePath, - path.join(process.cwd(), filePath), - path.join(process.cwd(), 'packages', 'plugin-akash', filePath), - path.join(process.cwd(), 'packages', 'plugin-akash', 'src', filePath), - path.join(path.dirname(DEFAULT_SDL_PATH), filePath) - ]; - - for (const tryPath of possiblePaths) { - if (fs.existsSync(tryPath)) { - const content = fs.readFileSync(tryPath, "utf8"); - elizaLogger.info("SDL file loaded successfully from", { - path: tryPath - }); - return content; - } - } - - // If we get here, none of the paths worked - throw new AkashError( - 'SDL file not found in any of the possible locations', - AkashErrorCode.VALIDATION_SDL_FAILED, - { - filePath, - triedPaths: possiblePaths - } - ); - } catch (error) { - elizaLogger.error("Failed to read SDL file", { - filePath, - error: error instanceof Error ? error.message : String(error) - }); - throw new AkashError( - `Failed to read SDL file: ${error instanceof Error ? error.message : String(error)}`, - AkashErrorCode.VALIDATION_SDL_FAILED, - { filePath } - ); - } -}; - -const validateSDL = (sdlContent: string, validationLevel = "strict"): boolean => { - try { - // First try to parse as YAML - const parsed = yaml.load(sdlContent); - if (!parsed || typeof parsed !== 'object') { - throw new Error('Invalid SDL format: not a valid YAML object'); - } - - if (validationLevel === "none") { - // elizaLogger.debug("Skipping SDL validation (validation level: none)"); - return true; - } - - // Required sections based on validation level - const requiredSections = ['version', 'services']; - const sectionsToCheck = validationLevel === "strict" ? - [...requiredSections, 'profiles', 'deployment'] : - requiredSections; - - for (const section of sectionsToCheck) { - if (!(section in parsed)) { - throw new Error(`Invalid SDL format: missing required section '${section}'`); - } - } - - // elizaLogger.debug("SDL validation successful", { - // validationLevel, - // checkedSections: sectionsToCheck - // }); - return true; - } catch (error) { - elizaLogger.error("SDL validation failed", { - error: error instanceof Error ? error.message : String(error), - validationLevel - }); - return false; - } -}; - -export const getManifestAction: Action = { - name: "GET_MANIFEST", - similes: ["LOAD_MANIFEST", "READ_MANIFEST", "PARSE_MANIFEST"], - description: "Load and validate SDL to generate a manifest for Akash deployments", - examples: [[ - { - user: "user", - content: { - text: "Get manifest from SDL file", - sdlFile: "deployment.yml" - } as GetManifestContent - } as ActionExample - ]], - - validate: async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("Validating manifest request", { message }); - try { - const params = message.content as Partial; - const config = await validateAkashConfig(runtime); - - // Either SDL content or file path must be provided - if (!params.sdl && !params.sdlFile && !config.AKASH_SDL) { - throw new AkashError( - "Either SDL content, file path, or AKASH_SDL environment variable must be provided", - AkashErrorCode.VALIDATION_PARAMETER_MISSING, - { parameters: ["sdl", "sdlFile", "AKASH_SDL"] } - ); - } - - // If SDL content is provided, validate it - if (params.sdl) { - const validationLevel = config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict"; - if (!validateSDL(params.sdl, validationLevel)) { - throw new AkashError( - "Invalid SDL format", - AkashErrorCode.VALIDATION_SDL_FAILED - ); - } - } - - return true; - } catch (error) { - elizaLogger.error("Manifest validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State | undefined, - _options: { [key: string]: unknown; } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("Starting manifest operation", { actionId }); - - try { - const config = await validateAkashConfig(runtime); - const params = message.content as Partial; - - let sdlContent: string; - try { - // Load SDL content based on priority: params.sdl > params.sdlFile > config.AKASH_SDL - if (params.sdl) { - sdlContent = params.sdl; - elizaLogger.info("Using provided SDL content"); - } else if (params.sdlFile) { - sdlContent = loadSDLFromFile(params.sdlFile); - elizaLogger.info("Loaded SDL from file", { path: params.sdlFile }); - } else { - const sdlPath = config.AKASH_SDL || DEFAULT_SDL_PATH; - sdlContent = loadSDLFromFile(sdlPath); - elizaLogger.info("Using SDL from environment", { path: sdlPath }); - } - - // Validate based on environment settings - const validationLevel = config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict"; - const isValid = validateSDL(sdlContent, validationLevel); - - if (!isValid) { - throw new AkashError( - "SDL validation failed", - AkashErrorCode.VALIDATION_SDL_FAILED - ); - } - - // Check manifest mode - const manifestMode = config.AKASH_MANIFEST_MODE || "auto"; - if (manifestMode === "validate_only") { - elizaLogger.info("Validation successful (validate_only mode)"); - if (callback) { - const callbackResponse = { - text: "SDL validation successful", - content: { - success: true, - data: { - validationLevel, - mode: manifestMode - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getManifest', - version: '1.0.0', - actionId - } - } - }; - callback(callbackResponse); - } - return true; - } - - // Generate manifest - const sdl = new SDL(yaml.load(sdlContent) as any); - const manifest = sdl.manifest(); - - // Save manifest if path is specified - if (config.AKASH_MANIFEST_PATH) { - const manifestPath = path.join( - config.AKASH_MANIFEST_PATH, - `manifest-${Date.now()}.yaml` - ); - fs.writeFileSync(manifestPath, yaml.dump(manifest), 'utf8'); - elizaLogger.info("Manifest saved", { path: manifestPath }); - } - - if (callback) { - const callbackResponse = { - text: "Manifest generated successfully", - content: { - success: true, - data: { - manifest, - settings: { - mode: manifestMode, - validationLevel, - outputPath: config.AKASH_MANIFEST_PATH - } - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getManifest', - version: '1.0.0', - actionId - } - } - }; - callback(callbackResponse); - } - - return true; - } catch (error) { - const formattedError = error instanceof Error ? error.message : String(error); - elizaLogger.error("Manifest operation failed", { - error: formattedError, - settings: { - mode: config.AKASH_MANIFEST_MODE || "auto", - validationLevel: config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", - outputPath: config.AKASH_MANIFEST_PATH - } - }); - - if (callback) { - const errorResponse = { - text: "Failed to process manifest", - content: { - success: false, - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : { - code: AkashErrorCode.MANIFEST_PARSING_FAILED, - message: formattedError - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getManifest', - version: '1.0.0', - actionId - } - } - }; - callback(errorResponse); - } - return false; - } - } catch (error) { - elizaLogger.error("Manifest operation failed", { - error: error instanceof Error ? error.message : String(error), - actionId - }); - - if (callback) { - const errorResponse = { - text: "Manifest operation failed", - content: { - success: false, - error: { - code: AkashErrorCode.MANIFEST_PARSING_FAILED, - message: error instanceof Error ? error.message : String(error) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getManifest', - version: '1.0.0', - actionId - } - } - }; - callback(errorResponse); - } - - return false; - } - } -}; - -export default getManifestAction; diff --git a/packages/plugin-akash/src/actions/getProviderInfo.ts b/packages/plugin-akash/src/actions/getProviderInfo.ts deleted file mode 100644 index 1848fa73fa7d7..0000000000000 --- a/packages/plugin-akash/src/actions/getProviderInfo.ts +++ /dev/null @@ -1,369 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { QueryProviderRequest, QueryClientImpl as ProviderQueryClient } from "@akashnetwork/akash-api/akash/provider/v1beta3"; -import { getRpc } from "@akashnetwork/akashjs/build/rpc"; -import { AkashError, AkashErrorCode } from "../error/error"; -import { validateAkashConfig } from "../environment"; - -interface GetProviderInfoContent extends Content { - text: string; - provider?: string; -} - -interface ProviderResponse { - provider?: { - owner: string; - hostUri: string; - attributes: Array<{ - key: string; - value: string; - }>; - info?: { - email: string; - website: string; - capabilities: string[]; - }; - status?: ProviderStatus; - }; -} - -interface ProviderStatus { - cluster?: { - nodes: Array<{ - name: string; - capacity: { - cpu: string; - memory: string; - storage: string; - }; - allocatable: { - cpu: string; - memory: string; - storage: string; - }; - }>; - }; - leases?: { - active: number; - pending: number; - available: number; - }; -} - -const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); - -export const getProviderInfoAction: Action = { - name: "GET_PROVIDER_INFO", - similes: ["CHECK_PROVIDER", "PROVIDER_INFO", "PROVIDER_STATUS", "CHECK PROVIDER"], - description: "Get detailed information about a provider on Akash Network", - examples: [[ - { - user: "user", - content: { - text: "Can you check the provider info for akash1ccktptfkvdc67msasmesuy5m7gpc76z75kukpz?", - } as GetProviderInfoContent - } as ActionExample - ]], - - validate: async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("Validating get provider info request", { message }); - try { - const params = message.content as Partial; - const config = await validateAkashConfig(runtime); - - // Extract provider address from text if present - if (params.text && !params.provider) { - // Pattern to match akash1 followed by address characters - const providerMatch = params.text.match(/akash1[a-zA-Z0-9]{38}/); - if (providerMatch) { - params.provider = providerMatch[0]; - elizaLogger.debug("Extracted provider address from text", { - text: params.text, - extractedProvider: params.provider - }); - } - } - - // If still no provider specified, use environment default - if (!params.provider && config.AKASH_PROVIDER_INFO) { - params.provider = config.AKASH_PROVIDER_INFO; - } - - if (!params.provider) { - throw new AkashError( - "Provider address is required", - AkashErrorCode.VALIDATION_PARAMETER_MISSING, - { parameter: "provider" } - ); - } - - // Validate provider address format - if (!params.provider.startsWith("akash1")) { - throw new AkashError( - "Invalid provider address format. Must start with 'akash1'", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "provider", value: params.provider } - ); - } - - return true; - } catch (error) { - elizaLogger.error("Get provider info validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("Starting provider info request", { actionId }); - - elizaLogger.debug("=== Handler Parameters ===", { - hasRuntime: !!runtime, - hasMessage: !!message, - hasState: !!state, - hasOptions: !!options, - hasCallback: !!callback, - actionId - }); - - try { - const config = await validateAkashConfig(runtime); - const params = message.content as Partial; - - // If no provider specified, use environment default - if (!params.provider && config.AKASH_PROVIDER_INFO) { - params.provider = config.AKASH_PROVIDER_INFO; - } - - if (!params.provider) { - throw new AkashError( - "Provider address is required", - AkashErrorCode.VALIDATION_PARAMETER_MISSING, - { parameter: "provider" } - ); - } - - // Query provider information - elizaLogger.info("Querying provider information", { - provider: params.provider, - actionId - }); - - const queryClient = new ProviderQueryClient(await getRpc(config.RPC_ENDPOINT)); - const request = QueryProviderRequest.fromPartial({ - owner: params.provider - }); - - try { - const response = await queryClient.Provider(request) as ProviderResponse; - - if (!response.provider) { - throw new AkashError( - "Failed to query provider: Provider not found", - AkashErrorCode.PROVIDER_NOT_FOUND, - { - provider: params.provider, - actionId - } - ); - } - - // Add a delay before querying status - await sleep(2000); // 2 second delay - - // Query provider status from their API - elizaLogger.info("Querying provider status", { - hostUri: response.provider.hostUri, - actionId - }); - - const hostUri = response.provider.hostUri.replace(/^https?:\/\//, ''); - elizaLogger.debug("Making provider status request", { url: `https://${hostUri}/status` }); - - try { - const statusResponse = await fetch(`https://${hostUri}/status`, { - headers: { - 'Accept': 'application/json' - }, - signal: AbortSignal.timeout(5000) - }); - - if (!statusResponse.ok) { - elizaLogger.debug("Provider status not available", { - status: statusResponse.status, - provider: params.provider, - hostUri: response.provider.hostUri, - actionId - }); - } else { - const statusData = await statusResponse.json(); - response.provider.status = statusData; - } - } catch (statusError) { - elizaLogger.debug("Provider status fetch failed", { - error: statusError instanceof Error ? statusError.message : String(statusError), - provider: params.provider, - hostUri: response.provider.hostUri, - actionId - }); - } - - // Format provider information - const info = { - owner: response.provider.owner, - hostUri: response.provider.hostUri, - attributes: response.provider.attributes, - info: response.provider.info, - status: response.provider.status ? { - nodes: response.provider.status.cluster?.nodes.map(node => ({ - name: node.name, - capacity: node.capacity, - allocatable: node.allocatable - })), - leases: response.provider.status.leases - } : undefined - }; - - elizaLogger.info("Provider information retrieved successfully", { - provider: params.provider, - hostUri: response.provider.hostUri, - hasStatus: !!response.provider.status, - actionId - }); - - if (callback) { - elizaLogger.info("=== Preparing callback response for provider info ===", { - hasCallback: true, - actionId, - provider: params.provider - }); - - const callbackResponse = { - text: `Provider ${params.provider} information:\nHost URI: ${info.hostUri}\nOwner: ${info.owner}${info.info ? `\nEmail: ${info.info.email}\nWebsite: ${info.info.website}` : ''}\nAttributes: ${info.attributes.map(attr => `${attr.key}: ${attr.value}`).join(', ')}`, - content: { - success: true, - data: { - provider: info, - queryResponse: response.provider - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getProviderInfo', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing callback with response ===", { - actionId, - responseText: callbackResponse.text, - hasContent: !!callbackResponse.content, - contentKeys: Object.keys(callbackResponse.content), - metadata: callbackResponse.content.metadata - }); - - callback(callbackResponse); - - elizaLogger.info("=== Callback executed successfully ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return true; - } catch (queryError) { - // Handle specific error cases - const errorMessage = queryError instanceof Error ? queryError.message : String(queryError); - - if (errorMessage.toLowerCase().includes("invalid address")) { - throw new AkashError( - "Failed to query provider: Invalid address format", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { - provider: params.provider, - error: errorMessage, - actionId - } - ); - } - - // For all other query errors, treat as provider not found - throw new AkashError( - "Failed to query provider: Provider not found or not accessible", - AkashErrorCode.PROVIDER_NOT_FOUND, - { - provider: params.provider, - error: errorMessage, - actionId - } - ); - } - } catch (error) { - elizaLogger.error("Get provider info request failed", { - error: error instanceof Error ? error.message : String(error), - actionId - }); - - if (callback) { - elizaLogger.info("=== Preparing error callback response ===", { - actionId, - hasCallback: true, - errorType: error instanceof AkashError ? 'AkashError' : 'Error' - }); - - const errorResponse = { - code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, - message: error instanceof Error ? error.message : String(error), - details: error instanceof AkashError ? error.details : undefined - }; - - const response = { - text: `Failed to get provider information: ${errorResponse.message}`, - content: { - success: false, - error: errorResponse, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getProviderInfo', - version: '1.0.0', - actionId - } - } - }; - - elizaLogger.info("=== Executing error callback ===", { - actionId, - errorResponse, - hasContent: !!response.content, - contentKeys: Object.keys(response.content) - }); - - callback(response); - - elizaLogger.info("=== Error callback executed ===", { - actionId, - timestamp: new Date().toISOString() - }); - } - - return false; - } - } -}; - -export default getProviderInfoAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getProvidersList.ts b/packages/plugin-akash/src/actions/getProvidersList.ts deleted file mode 100644 index e8449b82e5524..0000000000000 --- a/packages/plugin-akash/src/actions/getProvidersList.ts +++ /dev/null @@ -1,333 +0,0 @@ -import { type Action, elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -import { AkashError, AkashErrorCode } from "../error/error"; -import { validateAkashConfig } from "../environment"; - -interface GetProvidersListContent extends Content { - filter?: { - active?: boolean; - hasGPU?: boolean; - region?: string; - }; -} - -interface ProviderAttributes { - key: string; - value: string; -} - -interface ProviderInfo { - owner: string; - hostUri: string; - attributes: ProviderAttributes[]; - active: boolean; - uptime: number; - leaseCount: number; - info?: { - email?: string; - website?: string; - capabilities?: string[]; - }; - status?: { - available: boolean; - error?: string; - lastCheckTime: string; - resources?: { - cpu: { - total: number; - available: number; - }; - memory: { - total: number; - available: number; - }; - storage: { - total: number; - available: number; - }; - }; - }; -} - -const API_BASE_URL = "https://console-api.akash.network/v1"; - -async function fetchProviders(): Promise { - try { - const response = await fetch(`${API_BASE_URL}/providers`, { - headers: { - 'Accept': 'application/json' - } - }); - - if (!response.ok) { - throw new AkashError( - "Failed to fetch providers list: Invalid response from API", - AkashErrorCode.API_RESPONSE_INVALID, - { - status: response.status, - statusText: response.statusText - } - ); - } - - const data = await response.json(); - return data; - } catch (error) { - if (error instanceof AkashError) { - throw error; - } - throw new AkashError( - `Failed to fetch providers list: ${error instanceof Error ? error.message : String(error)}`, - AkashErrorCode.API_REQUEST_FAILED, - { - error: error instanceof Error ? error.message : String(error) - } - ); - } -} - -function filterProviders(providers: ProviderInfo[], filter?: GetProvidersListContent['filter']): ProviderInfo[] { - if (!filter) return providers; - - try { - let filtered = [...providers]; - - if (filter.active !== undefined) { - filtered = filtered.filter(p => { - const isActive = p.active && p.status?.available !== false; - return isActive === filter.active; - }); - } - - if (filter.hasGPU) { - filtered = filtered.filter(p => - p.attributes.some(attr => - attr.key.toLowerCase().includes('gpu') && - attr.value.toLowerCase() !== 'false' && - attr.value !== '0' - ) - ); - } - - if (filter.region) { - const regionFilter = filter.region.toLowerCase(); - filtered = filtered.filter(p => - p.attributes.some(attr => - attr.key.toLowerCase() === 'region' && - attr.value.toLowerCase().includes(regionFilter) - ) - ); - } - - return filtered; - } catch (error) { - throw new AkashError( - "Failed to apply provider filters", - AkashErrorCode.PROVIDER_FILTER_ERROR, - { filter, error: error instanceof Error ? error.message : String(error) } - ); - } -} - -export const getProvidersListAction: Action = { - name: "GET_PROVIDERS_LIST", - similes: ["LIST_PROVIDERS", "FETCH_PROVIDERS", "GET_ALL_PROVIDERS"], - description: "Get a list of all available providers on the Akash Network with their details and status", - examples: [[ - { - user: "user", - content: { - text: "Get a list of all active providers" - } as GetProvidersListContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Fetching list of active Akash providers...", - filter: { - active: true - } - } as GetProvidersListContent - } as ActionExample - ], [ - { - user: "user", - content: { - text: "Show me all GPU providers in the US region", - filter: { - hasGPU: true, - region: "us" - } - } as GetProvidersListContent - } as ActionExample - ]], - - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.debug("Validating get providers list request", { message }); - try { - const params = message.content as Partial; - - // Validate filter parameters if provided - if (params.filter) { - if (params.filter.region && typeof params.filter.region !== 'string') { - throw new AkashError( - "Region filter must be a string", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "filter.region" } - ); - } - - if (params.filter.active !== undefined && typeof params.filter.active !== 'boolean') { - throw new AkashError( - "Active filter must be a boolean", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "filter.active" } - ); - } - - if (params.filter.hasGPU !== undefined && typeof params.filter.hasGPU !== 'boolean') { - throw new AkashError( - "HasGPU filter must be a boolean", - AkashErrorCode.VALIDATION_PARAMETER_INVALID, - { parameter: "filter.hasGPU" } - ); - } - } - - return true; - } catch (error) { - elizaLogger.error("Get providers list validation failed", { - error: error instanceof AkashError ? { - code: error.code, - message: error.message, - details: error.details - } : String(error) - }); - return false; - } - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State | undefined, - _options: { [key: string]: unknown; } = {}, - callback?: HandlerCallback - ): Promise => { - const actionId = Date.now().toString(); - elizaLogger.info("Starting providers list request", { actionId }); - - try { - await validateAkashConfig(runtime); - const params = message.content as GetProvidersListContent; - - elizaLogger.info("Fetching providers list", { - filter: params.filter, - actionId - }); - - // Fetch providers - const allProviders = await fetchProviders(); - - // Apply filters - const filteredProviders = filterProviders(allProviders, params.filter); - - elizaLogger.info("Providers list retrieved successfully", { - totalProviders: allProviders.length, - filteredProviders: filteredProviders.length, - filter: params.filter, - actionId - }); - - if (callback) { - const callbackResponse = { - text: `Retrieved ${filteredProviders.length} providers${params.filter ? ' (filtered)' : ''} from total ${allProviders.length}`, - content: { - success: true, - data: { - summary: { - total: allProviders.length, - filtered: filteredProviders.length, - activeCount: filteredProviders.filter(p => p.active && p.status?.available !== false).length, - gpuCount: filteredProviders.filter(p => - p.attributes.some(attr => - attr.key.toLowerCase().includes('gpu') && - attr.value.toLowerCase() !== 'false' && - attr.value !== '0' - ) - ).length - }, - providers: filteredProviders.map(p => ({ - owner: p.owner, - hostUri: p.hostUri, - active: p.active && p.status?.available !== false, - uptime: p.uptime, - leaseCount: p.leaseCount, - attributes: p.attributes, - info: { - ...p.info, - capabilities: p.info?.capabilities || [], - region: p.attributes.find(a => a.key.toLowerCase() === 'region')?.value || 'unknown' - }, - resources: p.status?.resources || { - cpu: { total: 0, available: 0 }, - memory: { total: 0, available: 0 }, - storage: { total: 0, available: 0 } - }, - status: { - available: p.status?.available || false, - lastCheckTime: p.status?.lastCheckTime || new Date().toISOString(), - error: p.status?.error - } - })) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getProvidersList', - version: '1.0.0', - actionId, - filters: params.filter || {} - } - } - }; - - callback(callbackResponse); - } - - return true; - } catch (error) { - elizaLogger.error("Get providers list request failed", { - error: error instanceof Error ? error.message : String(error), - code: error instanceof AkashError ? error.code : undefined, - actionId - }); - - if (callback) { - const errorResponse = { - text: "Failed to get providers list", - content: { - success: false, - error: { - code: error instanceof AkashError ? error.code : AkashErrorCode.API_REQUEST_FAILED, - message: error instanceof Error ? error.message : String(error) - }, - metadata: { - timestamp: new Date().toISOString(), - source: 'akash-plugin', - action: 'getProvidersList', - version: '1.0.0', - actionId - } - } - }; - - callback(errorResponse); - } - - return false; - } - } -}; -export default getProvidersListAction; - diff --git a/packages/plugin-akash/src/environment.ts b/packages/plugin-akash/src/environment.ts deleted file mode 100644 index 652caf22c27f6..0000000000000 --- a/packages/plugin-akash/src/environment.ts +++ /dev/null @@ -1,259 +0,0 @@ -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { z } from "zod"; - -// Add ENV variable at the top -let ENV = "mainnet"; - -// Log environment information -elizaLogger.info("Environment sources", { - shellVars: Object.keys(process.env).filter(key => key.startsWith('AKASH_')), -}); - -export const akashEnvSchema = z.object({ - AKASH_MNEMONIC: z.string() - .min(1, "Wallet mnemonic is required") - .refine( - (mnemonic) => { - const words = mnemonic.trim().split(/\s+/); - return words.length === 12 || words.length === 24; - }, - { - message: "Mnemonic must be 12 or 24 words", - path: ["AKASH_MNEMONIC"] - } - ), - AKASH_WALLET_ADDRESS: z.string() - .min(1, "Wallet address is required") - .regex(/^akash[a-zA-Z0-9]{39}$/, "Invalid Akash wallet address format") - .optional(), - AKASH_NET: z.string().min(1, "Network configuration URL is required"), - AKASH_VERSION: z.string().min(1, "Akash version is required"), - AKASH_CHAIN_ID: z.string().min(1, "Chain ID is required"), - AKASH_NODE: z.string().min(1, "Node URL is required"), - RPC_ENDPOINT: z.string().min(1, "RPC endpoint is required"), - AKASH_GAS_PRICES: z.string().min(1, "Gas prices are required"), - AKASH_GAS_ADJUSTMENT: z.string().min(1, "Gas adjustment is required"), - AKASH_KEYRING_BACKEND: z.string().min(1, "Keyring backend is required"), - AKASH_FROM: z.string().min(1, "Key name is required"), - AKASH_FEES: z.string().min(1, "Transaction fees are required"), - AKASH_DEPOSIT: z.string().min(1, "Deposit is required be careful with the value not too low generally around 500000uakt"), - AKASH_PRICING_API_URL: z.string().optional(), - AKASH_DEFAULT_CPU: z.string().optional(), - AKASH_DEFAULT_MEMORY: z.string().optional(), - AKASH_DEFAULT_STORAGE: z.string().optional(), - AKASH_SDL: z.string().optional(), - AKASH_CLOSE_DEP: z.string().optional(), - AKASH_CLOSE_DSEQ: z.string().optional(), - AKASH_PROVIDER_INFO: z.string().optional(), - AKASH_DEP_STATUS: z.string().optional(), - AKASH_DEP_DSEQ: z.string().optional(), - AKASH_GAS_OPERATION: z.string().optional(), - AKASH_GAS_DSEQ: z.string().optional(), - // Manifest Configuration - AKASH_MANIFEST_MODE: z.string() - .optional() - .refine( - (mode) => !mode || ["auto", "manual", "validate_only"].includes(mode), - { - message: "AKASH_MANIFEST_MODE must be one of: auto, manual, validate_only" - } - ), - AKASH_MANIFEST_PATH: z.string() - .optional(), - AKASH_MANIFEST_VALIDATION_LEVEL: z.string() - .optional() - .refine( - (level) => !level || ["strict", "lenient", "none"].includes(level), - { - message: "AKASH_MANIFEST_VALIDATION_LEVEL must be one of: strict, lenient, none" - } - ), -}); - -export type AkashConfig = z.infer; - -export function getConfig( - env: string | undefined | null = ENV || - process.env.AKASH_ENV -) { - ENV = env || "mainnet"; - switch (env) { - case "mainnet": - return { - AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/mainnet", - RPC_ENDPOINT: "https://rpc.akashnet.net:443", - AKASH_GAS_PRICES: "0.025uakt", - AKASH_GAS_ADJUSTMENT: "1.5", - AKASH_KEYRING_BACKEND: "os", - AKASH_FROM: "default", - AKASH_FEES: "20000uakt", - AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", - AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", - AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", - AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", - AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", - AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", - AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", - AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", - AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", - AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", - AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", - AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", - AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", - AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", - AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", - AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", - AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" - }; - case "testnet": - return { - AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/testnet", - RPC_ENDPOINT: "https://rpc.sandbox-01.aksh.pw", - AKASH_GAS_PRICES: "0.025uakt", - AKASH_GAS_ADJUSTMENT: "1.5", - AKASH_KEYRING_BACKEND: "test", - AKASH_FROM: "default", - AKASH_FEES: "20000uakt", - AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", - AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", - AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", - AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", - AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", - AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", - AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", - AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", - AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", - AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", - AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", - AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", - AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", - AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", - AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", - AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", - AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" - }; - default: - return { - AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/mainnet", - RPC_ENDPOINT: "https://rpc.akashnet.net:443", - AKASH_GAS_PRICES: "0.025uakt", - AKASH_GAS_ADJUSTMENT: "1.5", - AKASH_KEYRING_BACKEND: "os", - AKASH_FROM: "default", - AKASH_FEES: "20000uakt", - AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", - AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", - AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", - AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", - AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", - AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", - AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", - AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", - AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", - AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", - AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", - AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", - AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", - AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", - AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", - AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", - AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" - }; - } -} - -export async function validateAkashConfig( - runtime: IAgentRuntime -): Promise { - try { - // Log environment information - // elizaLogger.info("Environment configuration details", { - // shellMnemonic: process.env.AKASH_MNEMONIC, - // runtimeMnemonic: runtime.getSetting("AKASH_MNEMONIC"), - // envVars: { - // fromShell: Object.keys(process.env).filter(key => key.startsWith('AKASH_')), - // fromRuntime: Object.keys(runtime) - // .filter(key => typeof runtime.getSetting === 'function' && runtime.getSetting(key)) - // .filter(key => key.startsWith('AKASH_')) - // } - // }); - - const envConfig = getConfig( - runtime.getSetting("AKASH_ENV") ?? undefined - ); - - // Fetch dynamic values from the network configuration - const akashNet = process.env.AKASH_NET || runtime.getSetting("AKASH_NET") || envConfig.AKASH_NET; - const version = await fetch(`${akashNet}/version.txt`).then(res => res.text()); - const chainId = await fetch(`${akashNet}/chain-id.txt`).then(res => res.text()); - const node = await fetch(`${akashNet}/rpc-nodes.txt`).then(res => res.text().then(text => text.split('\n')[0])); - - // Prioritize shell environment variables over runtime settings - const mnemonic = process.env.AKASH_MNEMONIC || runtime.getSetting("AKASH_MNEMONIC"); - - // elizaLogger.debug("SDL configuration", { - // fromShell: process.env.AKASH_SDL, - // fromRuntime: runtime.getSetting("AKASH_SDL"), - // fromConfig: envConfig.AKASH_SDL - // }); - - if (!mnemonic) { - throw new Error( - "AKASH_MNEMONIC not found in environment variables or runtime settings.\n" + - "Please ensure AKASH_MNEMONIC is set in your shell environment or runtime settings" - ); - } - - // Clean the mnemonic string - handle quotes and whitespace - const cleanMnemonic = mnemonic - .trim() - .replace(/^["']|["']$/g, '') // Remove surrounding quotes - .replace(/\n/g, ' ') - .replace(/\r/g, ' ') - .replace(/\s+/g, ' '); - - const mnemonicWords = cleanMnemonic.split(' ').filter(word => word.length > 0); - - if (mnemonicWords.length !== 12 && mnemonicWords.length !== 24) { - throw new Error( - `Invalid AKASH_MNEMONIC length: got ${mnemonicWords.length} words, expected 12 or 24 words.\n` + - `Words found: ${mnemonicWords.join(', ')}` - ); - } - - const config = { - AKASH_MNEMONIC: cleanMnemonic, - AKASH_NET: akashNet, - AKASH_VERSION: version, - AKASH_CHAIN_ID: chainId, - AKASH_NODE: node, - RPC_ENDPOINT: process.env.RPC_ENDPOINT || runtime.getSetting("RPC_ENDPOINT") || envConfig.RPC_ENDPOINT, - AKASH_GAS_PRICES: process.env.AKASH_GAS_PRICES || runtime.getSetting("AKASH_GAS_PRICES") || envConfig.AKASH_GAS_PRICES, - AKASH_GAS_ADJUSTMENT: process.env.AKASH_GAS_ADJUSTMENT || runtime.getSetting("AKASH_GAS_ADJUSTMENT") || envConfig.AKASH_GAS_ADJUSTMENT, - AKASH_KEYRING_BACKEND: process.env.AKASH_KEYRING_BACKEND || runtime.getSetting("AKASH_KEYRING_BACKEND") || envConfig.AKASH_KEYRING_BACKEND, - AKASH_FROM: process.env.AKASH_FROM || runtime.getSetting("AKASH_FROM") || envConfig.AKASH_FROM, - AKASH_FEES: process.env.AKASH_FEES || runtime.getSetting("AKASH_FEES") || envConfig.AKASH_FEES, - AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || runtime.getSetting("AKASH_PRICING_API_URL") || envConfig.AKASH_PRICING_API_URL, - AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || runtime.getSetting("AKASH_DEFAULT_CPU") || envConfig.AKASH_DEFAULT_CPU, - AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || runtime.getSetting("AKASH_DEFAULT_MEMORY") || envConfig.AKASH_DEFAULT_MEMORY, - AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || runtime.getSetting("AKASH_DEFAULT_STORAGE") || envConfig.AKASH_DEFAULT_STORAGE, - AKASH_SDL: process.env.AKASH_SDL || runtime.getSetting("AKASH_SDL") || envConfig.AKASH_SDL, - AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || runtime.getSetting("AKASH_CLOSE_DEP") || envConfig.AKASH_CLOSE_DEP, - AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || runtime.getSetting("AKASH_CLOSE_DSEQ") || envConfig.AKASH_CLOSE_DSEQ, - AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || runtime.getSetting("AKASH_PROVIDER_INFO") || envConfig.AKASH_PROVIDER_INFO, - AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || runtime.getSetting("AKASH_DEP_STATUS") || envConfig.AKASH_DEP_STATUS, - AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || runtime.getSetting("AKASH_DEP_DSEQ") || envConfig.AKASH_DEP_DSEQ, - AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || runtime.getSetting("AKASH_GAS_OPERATION") || envConfig.AKASH_GAS_OPERATION, - AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || runtime.getSetting("AKASH_GAS_DSEQ") || envConfig.AKASH_GAS_DSEQ, - AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || runtime.getSetting("AKASH_MANIFEST_MODE") || envConfig.AKASH_MANIFEST_MODE, - AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || runtime.getSetting("AKASH_MANIFEST_PATH") || envConfig.AKASH_MANIFEST_PATH, - AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || runtime.getSetting("AKASH_MANIFEST_VALIDATION_LEVEL") || envConfig.AKASH_MANIFEST_VALIDATION_LEVEL, - AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || runtime.getSetting("AKASH_DEPOSIT") || envConfig.AKASH_DEPOSIT - }; - - return akashEnvSchema.parse(config); - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); - throw new Error(`Failed to validate Akash configuration: ${errorMessage}`); - } -} diff --git a/packages/plugin-akash/src/error/error.ts b/packages/plugin-akash/src/error/error.ts deleted file mode 100644 index 9b6adeed193f9..0000000000000 --- a/packages/plugin-akash/src/error/error.ts +++ /dev/null @@ -1,124 +0,0 @@ -export enum AkashErrorCategory { - WALLET = 'WALLET', - DEPLOYMENT = 'DEPLOYMENT', - LEASE = 'LEASE', - PROVIDER = 'PROVIDER', - MANIFEST = 'MANIFEST', - NETWORK = 'NETWORK', - TRANSACTION = 'TRANSACTION', - VALIDATION = 'VALIDATION', - SDK = 'SDK', - API = 'API', - FILE = 'FILE' -} - -export enum AkashErrorCode { - // Wallet Errors (1000-1999) - WALLET_NOT_INITIALIZED = 1000, - WALLET_CONNECTION_FAILED = 1001, - WALLET_INSUFFICIENT_FUNDS = 1002, - WALLET_UNAUTHORIZED = 1003, - WALLET_SIGNATURE_FAILED = 1004, - WALLET_MESSAGE_INVALID = 1005, - WALLET_INITIALIZATION_FAILED = "WALLET_INITIALIZATION_FAILED", - CLIENT_SETUP_FAILED = "CLIENT_SETUP_FAILED", - - // Certificate Errors (1500-1599) - CERTIFICATE_CREATION_FAILED = 1500, - CERTIFICATE_BROADCAST_FAILED = 1501, - CERTIFICATE_NOT_FOUND = 1502, - - // Deployment Errors (2000-2999) - DEPLOYMENT_NOT_FOUND = 2000, - DEPLOYMENT_CREATION_FAILED = 2001, - DEPLOYMENT_UPDATE_FAILED = 2002, - DEPLOYMENT_CLOSE_FAILED = 2003, - DEPLOYMENT_START_TIMEOUT = 2004, - - // Lease Errors (3000-3999) - LEASE_NOT_FOUND = 3000, - LEASE_CREATION_FAILED = 3001, - LEASE_CLOSE_FAILED = 3002, - LEASE_INVALID_STATE = 3003, - LEASE_BID_NOT_FOUND = 3004, - LEASE_QUERY_FAILED = 3005, - LEASE_STATUS_ERROR = 3006, - LEASE_VALIDATION_FAILED = 3007, - INVALID_LEASE = 3008, - - // Provider Errors (4000-4999) - PROVIDER_NOT_FOUND = 4000, - PROVIDER_UNREACHABLE = 4001, - PROVIDER_RESPONSE_ERROR = 4002, - PROVIDER_LIST_ERROR = 4003, - PROVIDER_FILTER_ERROR = 4004, - - // Manifest Errors (5000-5999) - MANIFEST_INVALID = 5000, - MANIFEST_PARSING_FAILED = 5001, - MANIFEST_DEPLOYMENT_FAILED = 5002, - MANIFEST_VALIDATION_FAILED = 5003, - - // Bid Errors (6000-6999) - BID_FETCH_TIMEOUT = 6000, - INVALID_BID = 6001, - - // SDL Errors (7000-7999) - SDL_PARSING_FAILED = 7000, - - // Validation Errors (8000-8999) - VALIDATION_PARAMETER_MISSING = 8000, - VALIDATION_PARAMETER_INVALID = 8001, - VALIDATION_STATE_INVALID = 8002, - VALIDATION_SDL_FAILED = 8003, - VALIDATION_CONFIG_INVALID = 8004, - - // Generic Errors (9000-9999) - INSUFFICIENT_FUNDS = 9000, - - // API Errors (10000-10999) - API_ERROR = 10000, - API_RESPONSE_INVALID = 10001, - API_REQUEST_FAILED = 10002, - API_TIMEOUT = 10003, - - // File System Errors (11000-11999) - FILE_NOT_FOUND = 11000, - FILE_READ_ERROR = 11001, - FILE_WRITE_ERROR = 11002, - FILE_PERMISSION_ERROR = 11003, - - // Network Errors (12000-12999) - RPC_CONNECTION_FAILED = 12000 -} - -export class AkashError extends Error { - constructor( - message: string, - public code: AkashErrorCode, - public details?: Record, - public category = "akash" - ) { - super(message); - this.name = "AkashError"; - } -} - -export async function withRetry( - fn: () => Promise, - maxRetries = 3, - delay = 1000 -): Promise { - let lastError: Error | undefined; - for (let i = 0; i < maxRetries; i++) { - try { - return await fn(); - } catch (error) { - lastError = error as Error; - if (i < maxRetries - 1) { - await new Promise(resolve => setTimeout(resolve, delay * (2 ** i))); - } - } - } - throw lastError; -} diff --git a/packages/plugin-akash/src/index.ts b/packages/plugin-akash/src/index.ts deleted file mode 100644 index 5ff80a4aca33b..0000000000000 --- a/packages/plugin-akash/src/index.ts +++ /dev/null @@ -1,109 +0,0 @@ -import type { Plugin} from "@elizaos/core"; -import chalk from 'chalk'; -import Table from 'cli-table3'; -import ora from 'ora'; -import { getConfig } from "./environment"; -import { createDeploymentAction } from "./actions/createDeployment"; -import { closeDeploymentAction } from "./actions/closeDeployment"; -import { getProviderInfoAction } from "./actions/getProviderInfo"; -import { getDeploymentStatusAction } from "./actions/getDeploymentStatus"; -import { estimateGas } from "./actions/estimateGas"; -import { getDeploymentApiAction } from "./actions/getDeploymentApi"; -import { getGPUPricingAction } from "./actions/getGPUPricing"; -import { getManifestAction } from "./actions/getManifest"; -import { getProvidersListAction } from "./actions/getProvidersList"; - - -// Start the loader -const spinner = ora({ - text: chalk.cyan('Initializing Akash Network Plugin...'), - spinner: 'dots12', - color: 'cyan' -}).start(); - -const actions = [ - createDeploymentAction, - closeDeploymentAction, - getProviderInfoAction, - getDeploymentStatusAction, - estimateGas, - getDeploymentApiAction, - getGPUPricingAction, - getManifestAction, - getProvidersListAction, -]; - -const AKASH_SPASH = getConfig().AKASH_WALLET_ADDRESS; - -// Initial banner -// Only show splash screen if AKASH_SPASH is true -if (AKASH_SPASH) { - // Initial banner with chalk styling - console.log(`\n${chalk.cyan('┌────────────────────────────────────────┐')}`); - console.log(chalk.cyan('│') + chalk.yellow.bold(' AKASH NETWORK PLUGIN ') + chalk.cyan(' │')); - console.log(chalk.cyan('├────────────────────────────────────────┤')); - console.log(chalk.cyan('│') + chalk.white(' Initializing Akash Network Plugin... ') + chalk.cyan('│')); - console.log(chalk.cyan('│') + chalk.white(' Version: 0.1.1 ') + chalk.cyan('│')); - console.log(chalk.cyan('└────────────────────────────────────────┘')); - - // Stop the loader - spinner.succeed(chalk.green('Akash Network Plugin initialized successfully!')); - - // Create a beautiful table for actions - const actionTable = new Table({ - head: [ - chalk.cyan('Action'), - chalk.cyan('H'), - chalk.cyan('V'), - chalk.cyan('E'), - chalk.cyan('Similes') - ], - style: { - head: [], - border: ['cyan'] - } - }); - - // Format and add action information - for (const action of actions) { - actionTable.push([ - chalk.white(action.name), - typeof action.handler === 'function' ? chalk.green('✓') : chalk.red('✗'), - typeof action.validate === 'function' ? chalk.green('✓') : chalk.red('✗'), - action.examples?.length > 0 ? chalk.green('✓') : chalk.red('✗'), - chalk.gray(action.similes?.join(', ') || 'none') - ]); - } - - // Display the action table - console.log(`\n${actionTable.toString()}`); - - // Plugin status with a nice table - const statusTable = new Table({ - style: { - border: ['cyan'] - } - }); - - statusTable.push( - [chalk.cyan('Plugin Status')], - [chalk.white('Name : ') + chalk.yellow('plugin-akash')], - [chalk.white('Actions : ') + chalk.green(actions.length.toString())], - [chalk.white('Status : ') + chalk.green('Loaded & Ready')] - ); - - console.log(`\n${statusTable.toString()}\n`); - } else { - // Stop the loader silently if splash is disabled - spinner.stop(); - } - - const akashPlugin: Plugin = { - name: "plugin-akash", - description: "Akash Network Plugin for deploying and managing cloud compute", - actions: actions, - evaluators: [] - }; - -export { akashPlugin }; -export default akashPlugin; diff --git a/packages/plugin-akash/src/providers/wallet.ts b/packages/plugin-akash/src/providers/wallet.ts deleted file mode 100644 index d944ebe462747..0000000000000 --- a/packages/plugin-akash/src/providers/wallet.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; -import { SigningStargateClient } from "@cosmjs/stargate"; -import { elizaLogger, type IAgentRuntime, type Memory } from "@elizaos/core"; -// import { IAgentRuntime, Memory } from "@elizaos/core/src/types"; -import { validateAkashConfig } from "../environment"; -import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; -import { - type AkashProvider, - type AkashWalletState, - AkashError, - AKASH_ERROR_CODES, -} from "../types"; - -// Use a proper UUID for the wallet room -const WALLET_ROOM_ID = "00000000-0000-0000-0000-000000000001"; - -export const walletProvider: AkashProvider = { - type: "AKASH_WALLET", - version: "1.0.0", - name: "wallet", - description: "Akash wallet provider", - - initialize: async (runtime: IAgentRuntime): Promise => { - elizaLogger.info("Initializing Akash wallet provider"); - try { - const mnemonic = runtime.getSetting("AKASH_MNEMONIC"); - if (!mnemonic) { - throw new Error("AKASH_MNEMONIC not found in environment variables"); - } - - const config = await validateAkashConfig(runtime); - - // Create wallet from mnemonic - const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { - prefix: "akash", - }); - - // Get the wallet address - const [account] = await wallet.getAccounts(); - const address = account.address; - - // Create signing client with registry - const client = await SigningStargateClient.connectWithSigner( - config.RPC_ENDPOINT, - wallet, - { registry: getAkashTypeRegistry() as any } - ); - - // Store wallet info in memory manager - const state: AkashWalletState = { - wallet, - client, - address, - }; - - // Create memory object - const memory: Memory = { - id: WALLET_ROOM_ID, - userId: runtime.agentId, - agentId: runtime.agentId, - roomId: WALLET_ROOM_ID, - content: { - type: "wallet_state", - text: `Akash wallet initialized with address: ${address}`, - data: state, - }, - createdAt: Date.now(), - }; - - await runtime.messageManager.createMemory(memory); - - elizaLogger.info("Akash wallet provider initialized successfully", { - address, - }); - } catch (error) { - elizaLogger.error("Failed to initialize Akash wallet provider", { - error: error instanceof Error ? error.message : String(error) - }); - throw error; - } - }, - - get: async (runtime: IAgentRuntime, _message?: Memory): Promise => { - const memories = await runtime.messageManager.getMemories({ - roomId: WALLET_ROOM_ID, - count: 1, - }); - - const state = memories[0]?.content?.data; - if (!state) { - throw new AkashError( - "Akash wallet not initialized", - AKASH_ERROR_CODES.WALLET_NOT_INITIALIZED - ); - } - return state as AkashWalletState; - }, - - validate: async (_runtime: IAgentRuntime, _message?: Memory): Promise => { - return true; - }, - - process: async (_runtime: IAgentRuntime, _message?: Memory): Promise => { - // No processing needed for wallet provider - } -}; - -export default walletProvider; diff --git a/packages/plugin-akash/src/runtime_inspect.ts b/packages/plugin-akash/src/runtime_inspect.ts deleted file mode 100644 index 985b6d0d466e7..0000000000000 --- a/packages/plugin-akash/src/runtime_inspect.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import type { IAgentRuntime, Plugin, Action } from "@elizaos/core"; - -/** - * Utility to inspect runtime plugin loading - */ -export function inspectRuntime(runtime: IAgentRuntime) { - elizaLogger.info("=== Runtime Plugin Inspection ==="); - - // Check if runtime has plugins array - const hasPlugins = !!(runtime as any).plugins; - elizaLogger.info("Runtime plugins status:", { - hasPluginsArray: hasPlugins, - pluginCount: hasPlugins ? (runtime as any).plugins.length : 0 - }); - - // If plugins exist, check for our plugin - if (hasPlugins) { - const plugins = (runtime as any).plugins as Plugin[]; - const akashPlugin = plugins.find(p => p.name === "akash"); - - elizaLogger.info("Akash plugin status:", { - isLoaded: !!akashPlugin, - pluginDetails: akashPlugin ? { - name: akashPlugin.name, - actionCount: akashPlugin.actions?.length || 0, - actions: akashPlugin.actions?.map(a => a.name) || [] - } : null - }); - } - - // Check registered actions - const hasActions = !!(runtime as any).actions; - if (hasActions) { - const actions = (runtime as any).actions as Action[]; - const akashActions = actions.filter((action: Action) => - action.name === "CREATE_DEPLOYMENT" || - (action.similes || []).includes("CREATE_DEPLOYMENT") - ); - - elizaLogger.info("Akash actions status:", { - totalActions: actions.length, - akashActionsCount: akashActions.length, - akashActions: akashActions.map((action: Action) => ({ - name: action.name, - similes: action.similes - })) - }); - } -} - -/** - * Helper to check if a plugin is properly loaded - */ -export function isPluginLoaded(runtime: IAgentRuntime, pluginName: string): boolean { - // Check plugins array - const plugins = (runtime as any).plugins as Plugin[]; - if (!plugins) { - elizaLogger.warn('No plugins array found in runtime'); - return false; - } - - // Look for our plugin - const plugin = plugins.find(p => p.name === pluginName); - if (!plugin) { - elizaLogger.warn(`Plugin ${pluginName} not found in runtime plugins`); - return false; - } - - // Check if actions are registered - const actions = (runtime as any).actions as Action[]; - if (!actions || !actions.length) { - elizaLogger.warn('No actions found in runtime'); - return false; - } - - // Check if plugin's actions are registered - const pluginActions = plugin.actions || []; - const registeredActions = pluginActions.every(pluginAction => - actions.some((action: Action) => action.name === pluginAction.name) - ); - - if (!registeredActions) { - elizaLogger.warn(`Not all ${pluginName} actions are registered in runtime`); - return false; - } - - elizaLogger.info(`Plugin ${pluginName} is properly loaded and registered`); - return true; -} \ No newline at end of file diff --git a/packages/plugin-akash/src/sdl/example.sdl.yml b/packages/plugin-akash/src/sdl/example.sdl.yml deleted file mode 100644 index 6e6ac83688643..0000000000000 --- a/packages/plugin-akash/src/sdl/example.sdl.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -version: "2.0" -services: - web: - image: baktun/hello-akash-world:1.0.0 - expose: - - port: 3000 - as: 80 - to: - - global: true -profiles: - compute: - web: - resources: - cpu: - units: 0.5 - memory: - size: 512Mi - storage: - size: 512Mi - placement: - dcloud: - pricing: - web: - denom: uakt - amount: 20000 - - -deployment: - web: - dcloud: - profile: web - count: 1 diff --git a/packages/plugin-akash/src/types.ts b/packages/plugin-akash/src/types.ts deleted file mode 100644 index 9e79d59eb0158..0000000000000 --- a/packages/plugin-akash/src/types.ts +++ /dev/null @@ -1,167 +0,0 @@ -import type { DirectSecp256k1HdWallet} from "@cosmjs/proto-signing"; -import type { SigningStargateClient } from "@cosmjs/stargate"; -// import { Provider } from "@elizaos/core"; -import type { IAgentRuntime, Memory } from "@elizaos/core"; -import type { SDL } from "@akashnetwork/akashjs/build/sdl"; -import type { MsgCreateDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; -import type { MsgCreateLease} from "@akashnetwork/akash-api/akash/market/v1beta4"; - -// Core wallet state type -export interface AkashWalletState { - wallet: DirectSecp256k1HdWallet; - client: SigningStargateClient; - address: string; - certificate?: { - cert: string; - privateKey: string; - publicKey: string; - }; -} - -// Provider type extending core Provider -export interface AkashProvider { - type: string; - version: string; - name: string; - description: string; - initialize: (runtime: IAgentRuntime) => Promise; - get: (runtime: IAgentRuntime, message?: Memory) => Promise; - validate: (runtime: IAgentRuntime, message?: Memory) => Promise; - process: (runtime: IAgentRuntime, message?: Memory) => Promise; -} - -// Registry type for Akash -export type AkashRegistryTypes = [string, any][]; - -// Deployment related types -export interface AkashDeploymentId { - owner: string; - dseq: string; -} - -export interface AkashDeployment { - id: AkashDeploymentId; - sdl: SDL; - deposit: string; - msg?: MsgCreateDeployment; -} - -// Lease related types -export interface AkashLeaseId { - owner: string; - dseq: string; - provider: string; - gseq: number; - oseq: number; -} - -export interface AkashLease { - id: AkashLeaseId; - state?: string; - manifestData?: any; - msg?: MsgCreateLease; -} - -// Provider types -export interface AkashProviderInfo { - owner: string; - hostUri: string; - attributes: Array<{ - key: string; - value: string; - }>; -} - -// Bid types -export interface AkashBidId { - owner: string; - dseq: string; - gseq: number; - oseq: number; - provider: string; -} - -export interface AkashBid { - id: AkashBidId; - state: string; - price: { - denom: string; - amount: string; - }; -} - -// Error handling types -export enum AKASH_ERROR_CODES { - WALLET_NOT_INITIALIZED = "WALLET_NOT_INITIALIZED", - INVALID_MNEMONIC = "INVALID_MNEMONIC", - INVALID_ADDRESS = "INVALID_ADDRESS", - INSUFFICIENT_FUNDS = "INSUFFICIENT_FUNDS", - DEPLOYMENT_FAILED = "DEPLOYMENT_FAILED", - LEASE_FAILED = "LEASE_FAILED", - PROVIDER_NOT_FOUND = "PROVIDER_NOT_FOUND", - NETWORK_ERROR = "NETWORK_ERROR", - CERTIFICATE_ERROR = "CERTIFICATE_ERROR", - MANIFEST_ERROR = "MANIFEST_ERROR", - BID_ERROR = "BID_ERROR", - MANIFEST_FAILED = "MANIFEST_FAILED", - PROVIDER_ERROR = "PROVIDER_ERROR" -} - -export class AkashError extends Error { - constructor( - message: string, - public code: AKASH_ERROR_CODES, - public originalError?: Error - ) { - super(message); - this.name = "AkashError"; - } -} - -// Provider configuration -export interface AkashConfig { - AKASH_MNEMONIC: string; - RPC_ENDPOINT: string; - CHAIN_ID?: string; - GAS_PRICE?: string; - GAS_ADJUSTMENT?: number; - CERTIFICATE_PATH?: string; -} - -// Message types -export interface AkashMessage { - type: string; - value: any; -} - -// Response types -export interface AkashTxResponse { - code: number; - height: number; - txhash: string; - rawLog: string; - data?: string; - gasUsed: number; - gasWanted: number; -} - -// Provider state types -export interface AkashProviderState { - isInitialized: boolean; - lastSync: number; - balance?: string; - address?: string; - certificate?: { - cert: string; - privateKey: string; - publicKey: string; - }; -} - -// Memory room constants -export const AKASH_MEMORY_ROOMS = { - WALLET: "00000000-0000-0000-0000-000000000001", - DEPLOYMENT: "00000000-0000-0000-0000-000000000002", - LEASE: "00000000-0000-0000-0000-000000000003", - CERTIFICATE: "00000000-0000-0000-0000-000000000004" -} as const; diff --git a/packages/plugin-akash/src/utils/paths.ts b/packages/plugin-akash/src/utils/paths.ts deleted file mode 100644 index c74151b2f7a40..0000000000000 --- a/packages/plugin-akash/src/utils/paths.ts +++ /dev/null @@ -1,133 +0,0 @@ -import * as path from 'path'; -import { fileURLToPath } from 'url'; -import { elizaLogger } from "@elizaos/core"; -import { existsSync } from 'fs'; -import { getConfig } from '../environment'; - -export const getPluginRoot = (importMetaUrl: string) => { - // elizaLogger.info("=== Starting Plugin Root Resolution ===", { - // importMetaUrl, - // isFileProtocol: importMetaUrl.startsWith('file://'), - // urlSegments: importMetaUrl.split('/') - // }); - - const currentFileUrl = importMetaUrl; - const currentFilePath = fileURLToPath(currentFileUrl); - const currentDir = path.dirname(currentFilePath); - - // Find plugin-akash directory by walking up until we find it - let dir = currentDir; - while (dir && path.basename(dir) !== 'plugin-akash' && dir !== '/') { - dir = path.dirname(dir); - } - - if (!dir || dir === '/') { - elizaLogger.error("Could not find plugin-akash directory", { - currentFilePath, - currentDir, - searchPath: dir - }); - throw new Error("Could not find plugin-akash directory"); - } - - // elizaLogger.info("Plugin Root Path Details", { - // currentFilePath, - // currentDir, - // pluginRoot: dir, - // exists: existsSync(dir), - // parentDir: path.dirname(dir), - // parentExists: existsSync(path.dirname(dir)), - // parentContents: existsSync(path.dirname(dir)) ? fs.readdirSync(path.dirname(dir)) : [] - // }); - - return dir; -}; - -export const getSrcPath = (importMetaUrl: string) => { - // elizaLogger.info("=== Resolving Src Path ==="); - const pluginRoot = getPluginRoot(importMetaUrl); - const srcPath = path.join(pluginRoot, 'src'); - - // elizaLogger.info("Src Path Details", { - // pluginRoot, - // srcPath, - // exists: existsSync(srcPath), - // contents: existsSync(srcPath) ? fs.readdirSync(srcPath) : [], - // absolutePath: path.resolve(srcPath), - // relativeToCwd: path.relative(process.cwd(), srcPath) - // }); - - return srcPath; -}; - -export const getCertificatePath = (importMetaUrl: string) => { - const srcPath = getSrcPath(importMetaUrl); - const certPath = path.join(srcPath, '.certificates', 'cert.json'); - - // elizaLogger.debug("Certificate Path Resolution", { - // srcPath, - // certPath, - // exists: existsSync(certPath) - // }); - - return certPath; -}; - -export const getDefaultSDLPath = (importMetaUrl: string) => { - // elizaLogger.info("=== Resolving SDL Path ==="); - const pluginRoot = getPluginRoot(importMetaUrl); - const srcPath = getSrcPath(importMetaUrl); - const config = getConfig(process.env.AKASH_ENV); - const sdlFileName = config.AKASH_SDL; - const sdlPath = path.join(srcPath, 'sdl', sdlFileName); - // const sdlDir = path.dirname(sdlPath); - - // Only log if file doesn't exist as a warning - if (!existsSync(sdlPath)) { - // elizaLogger.warn("SDL file not found at expected path", { - // sdlPath, - // exists: false - // }); - } - - // Try to find SDL file in nearby directories - const searchPaths = [ - sdlPath, - path.join(srcPath, sdlFileName), - path.join(pluginRoot, sdlFileName), - path.join(pluginRoot, 'sdl', sdlFileName), - path.join(pluginRoot, 'src', 'sdl', sdlFileName) - ]; - - // Only log if we find the file - for (const searchPath of searchPaths) { - if (existsSync(searchPath)) { - // elizaLogger.info("Found SDL file at", { path: searchPath }); - return searchPath; - } - } - - return sdlPath; -}; - -// Helper function to ensure a path includes plugin-akash -export const ensurePluginPath = (filePath: string, importMetaUrl: string) => { - if (!filePath.includes('plugin-akash')) { - const srcPath = getSrcPath(importMetaUrl); - return path.join(srcPath, path.basename(filePath)); - } - return filePath; -}; - -export function getDeploymentsPath(importMetaUrl: string): string { - const srcPath = getSrcPath(importMetaUrl); - const deploymentsPath = path.join(srcPath, 'deployments'); - - // elizaLogger.debug("Deployments Path Resolution", { - // srcPath, - // deploymentsPath, - // exists: existsSync(deploymentsPath) - // }); - - return deploymentsPath; -} \ No newline at end of file diff --git a/packages/plugin-akash/tsconfig.json b/packages/plugin-akash/tsconfig.json deleted file mode 100644 index e535bee0d71a2..0000000000000 --- a/packages/plugin-akash/tsconfig.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "module": "ESNext", - "target": "ESNext", - "lib": [ - "ESNext", - "DOM" - ], - "moduleResolution": "Bundler", - "allowImportingTsExtensions": true, - "emitDeclarationOnly": true, - "isolatedModules": true, - "esModuleInterop": true, - "skipLibCheck": true, - "strict": true, - "declaration": true, - "sourceMap": true, - "types": [ - "vitest/globals", - "node", - "jest" - ], - "baseUrl": ".", - "preserveSymlinks": true - }, - "include": [ - "src/**/*", - "test/actions/getDeploymentApi.test.ts" - ], - "exclude": [ - "node_modules", - "dist", - "test", - "../../packages/core/**/*" - ] -} \ No newline at end of file diff --git a/packages/plugin-akash/tsup.config.ts b/packages/plugin-akash/tsup.config.ts deleted file mode 100644 index a2b714de91033..0000000000000 --- a/packages/plugin-akash/tsup.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - format: ["esm"], - dts: true, - splitting: false, - sourcemap: true, - clean: true, -}); diff --git a/packages/plugin-akash/vitest.config.ts b/packages/plugin-akash/vitest.config.ts deleted file mode 100644 index 2b76c168780b5..0000000000000 --- a/packages/plugin-akash/vitest.config.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { defineConfig } from 'vitest/config'; -import path from 'path'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['test/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'], - exclude: ['node_modules', 'dist', '.idea', '.git', '.cache'], - root: '.', - reporters: ['verbose'], - coverage: { - reporter: ['text', 'json', 'html'], - exclude: [ - 'node_modules/', - 'test/fixtures/', - 'test/setup/' - ] - }, - setupFiles: ['./test/setup/vitest.setup.ts'] - }, - resolve: { - alias: { - '@': path.resolve(__dirname, './src') - } - } -}); \ No newline at end of file diff --git a/packages/plugin-allora/.npmignore b/packages/plugin-allora/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-allora/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-allora/README.md b/packages/plugin-allora/README.md deleted file mode 100644 index 1912853add4da..0000000000000 --- a/packages/plugin-allora/README.md +++ /dev/null @@ -1,87 +0,0 @@ -# @elizaos/plugin-allora - -Seamlessly empowers Eliza agents with real-time, advanced, self-improving AI inferences from the Allora Network. - -## Installation - -```bash -pnpm add @elizaos/plugin-allora -``` - -## Configuration - -### Environment Variables - -```env -ALLORA_API_KEY=your_api_key # Required: Allora API key -ALLORA_CHAIN_SLUG=testnet # Optional: Defaults to testnet -``` - -### Character Configuration - -Add the plugin to your character's configuration: - -```typescript -import { alloraPlugin } from "@elizaos/plugin-allora"; - -const character = { - plugins: [alloraPlugin], - settings: { - secrets: { - ALLORA_API_KEY: "your_api_key" - } - } -}; -``` - -## Features - -- **Real-time Inference Access**: Get live predictions across various topics -- **Topic Management**: Automatic discovery and caching of available topics -- **Smart Caching**: 30-minute cache duration for optimal performance -- **Natural Language Interface**: Simple conversational commands for accessing predictions - -## Usage - -Users can request inferences using natural language: - -```plaintext -"What is the predicted ETH price in 5 minutes?" -"Can you check the current BTC prediction?" -``` - -Example Response: -```plaintext -"Inference provided by Allora Network on topic ETH 5min (Topic ID: 13): 3393.364326646801085508" -``` - -## API Reference - -### Actions - -- `GET_INFERENCE`: Retrieves predictions for a specific topic - - Aliases: `GET_ALLORA_INFERENCE`, `GET_TOPIC_INFERENCE`, `ALLORA_INFERENCE`, `TOPIC_INFERENCE` - - Automatically matches user requests to available topics - - Returns formatted inference results with topic details - -### Providers - -- `topicsProvider`: Manages topic information and caching - - Provides context about available Allora Network topics - - Implements 30-minute caching for optimization - - Returns formatted topic information including names, descriptions, and status - -## Troubleshooting - -### Common Issues - -1. "No active Allora Network topic matches your request" - - Verify that your requested topic exists and is active - - Check that the topic matches the timeframe of your request - -2. API Connection Issues - - Verify your ALLORA_API_KEY is correctly set - - Check network connectivity - - Ensure the API endpoint is accessible - -For detailed information and additional implementation examples, please refer to the [Allora-Eliza integration docs](https://docs.allora.network/marketplace/integrations/eliza-os/implementation). diff --git a/packages/plugin-allora/biome.json b/packages/plugin-allora/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-allora/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-allora/package.json b/packages/plugin-allora/package.json deleted file mode 100644 index 47e4df5023db8..0000000000000 --- a/packages/plugin-allora/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@elizaos/plugin-allora", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "node-cache": "5.1.2", - "vitest": "2.1.9", - "@alloralabs/allora-sdk": "^0.1.0" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "tsup": "8.3.5", - "vitest": "2.1.8" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-allora/src/actions/getInference.ts b/packages/plugin-allora/src/actions/getInference.ts deleted file mode 100644 index 9b1c7acb8234c..0000000000000 --- a/packages/plugin-allora/src/actions/getInference.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { - type ActionExample, - composeContext, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import { z } from "zod"; -import { topicsProvider } from "../providers/topics"; -import { getInferenceTemplate } from "../templates"; -import { AlloraAPIClient, type ChainSlug } from "@alloralabs/allora-sdk"; - -interface InferenceFields { - topicId: number | null; - topicName: string | null; -} - -export const getInferenceAction: Action = { - name: "GET_INFERENCE", - similes: [ - "GET_ALLORA_INFERENCE", - "GET_TOPIC_INFERENCE", - "ALLORA_INFERENCE", - "TOPIC_INFERENCE", - ], - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - description: "Get inference from Allora Network", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback: HandlerCallback - ): Promise => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Get Allora topics information from the provider - currentState.alloraTopics = await topicsProvider.get(runtime, message, currentState); - - // Compose context for extracting the inference fields - const inferenceTopicContext = composeContext({ - state: currentState, - template: getInferenceTemplate, - }); - - // Define the schema for extracting the inference fields - const schema = z.object({ - topicId: z.number().nullable(), - topicName: z.string().nullable(), - }); - - const results = await generateObject({ - runtime, - context: inferenceTopicContext, - modelClass: ModelClass.SMALL, - schema, - }); - const inferenceFields = results.object as InferenceFields; - - if (!inferenceFields.topicId || !inferenceFields.topicName) { - callback({ - text: "There is no active Allora Network topic that matches your request.", - }); - return false; - } - - elizaLogger.info( - `Retrieving inference for topic ID: ${inferenceFields.topicId}` - ); - - try { - // Get inference from Allora API - const alloraApiClient = new AlloraAPIClient({ - chainSlug: runtime.getSetting("ALLORA_CHAIN_SLUG") as ChainSlug, - apiKey: runtime.getSetting("ALLORA_API_KEY") as string, - }); - - const inferenceRes = await alloraApiClient.getInferenceByTopicID( - inferenceFields.topicId - ); - const inferenceValue = - inferenceRes.inference_data.network_inference_normalized; - - callback({ - text: `Inference provided by Allora Network on topic ${inferenceFields.topicName} (Topic ID: ${inferenceFields.topicId}): ${inferenceValue}`, - }); - return true; - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); - const displayMessage = `There was an error fetching the inference from Allora Network: ${errorMessage}`; - - elizaLogger.error(displayMessage); - callback({ - text: displayMessage, - }); - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What is the predicted ETH price in 5 minutes?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll get the inference now...", - action: "GET_INFERENCE", - }, - }, - { - user: "{{user2}}", - content: { - text: "Inference provided by Allora Network on topic ETH 5min (ID: 13): 3393.364326646801085508", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "What is the predicted price of gold in 24 hours?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll get the inference now...", - action: "GET_INFERENCE", - }, - }, - { - user: "{{user2}}", - content: { - text: "There is no active Allora Network topic that matches your request.", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-allora/src/index.ts b/packages/plugin-allora/src/index.ts deleted file mode 100644 index 1e5a13464cdb0..0000000000000 --- a/packages/plugin-allora/src/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { getInferenceAction } from "./actions/getInference.ts"; -import { topicsProvider } from "./providers/topics.ts"; - -export const alloraPlugin: Plugin = { - name: "Allora Network plugin", - description: "Allora Network plugin for Eliza", - actions: [getInferenceAction], - evaluators: [], - providers: [topicsProvider], -}; diff --git a/packages/plugin-allora/src/providers/topics.ts b/packages/plugin-allora/src/providers/topics.ts deleted file mode 100644 index 2288c3f22e13e..0000000000000 --- a/packages/plugin-allora/src/providers/topics.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { - elizaLogger, - type IAgentRuntime, - type Memory, - type Provider, - type State, -} from "@elizaos/core"; -import NodeCache from "node-cache"; -import { AlloraAPIClient, type AlloraTopic, type ChainSlug } from "@alloralabs/allora-sdk"; - -export class TopicsProvider implements Provider { - private cache: NodeCache; - - constructor() { - this.cache = new NodeCache({ stdTTL: 30 * 60 }); // Cache TTL set to 30 minutes - } - - async get( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - const alloraTopics = await this.getAlloraTopics(runtime); - - // Format the topics into a string to be added to the prompt context - let output = 'Allora Network Topics: \n'; - for (const topic of alloraTopics) { - output += `Topic Name: ${topic.topic_name}\n`; - output += `Topic Description: ${topic.description}\n`; - output += `Topic ID: ${topic.topic_id}\n`; - output += `Topic is Active: ${topic.is_active}\n`; - output += `Topic Updated At: ${topic.updated_at}\n`; - output += '\n'; - } - - return output; - } - - private async getAlloraTopics( - runtime: IAgentRuntime - ): Promise { - const cacheKey = "allora-topics"; - const cachedValue = this.cache.get(cacheKey); - - // If the topics are aready cached, return them - if (cachedValue) { - elizaLogger.info("Retrieving Allora topics from cache"); - return cachedValue; - } - - // If the topics are not cached, retrieve them from the Allora API - const alloraApiKey = runtime.getSetting("ALLORA_API_KEY"); - const alloraChainSlug = runtime.getSetting("ALLORA_CHAIN_SLUG"); - - const alloraApiClient = new AlloraAPIClient({ - chainSlug: alloraChainSlug as ChainSlug, - apiKey: alloraApiKey as string, - }); - const alloraTopics = await alloraApiClient.getAllTopics(); - - // Cache the retrieved topics - this.cache.set(cacheKey, alloraTopics); - - return alloraTopics; - } -} - -export const topicsProvider = new TopicsProvider(); diff --git a/packages/plugin-allora/src/templates/index.ts b/packages/plugin-allora/src/templates/index.ts deleted file mode 100644 index 9c713dbd87c96..0000000000000 --- a/packages/plugin-allora/src/templates/index.ts +++ /dev/null @@ -1,28 +0,0 @@ -export const getInferenceTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. -Example response: -\`\`\`json -{ - "topicId": 1, - "topicName": "Topic Name", -} -\`\`\` - -Recent messages: -{{recentMessages}} - -Allora Network Topics: -{{alloraTopics}} - -Given the recent messages and the Allora Network Topics above, extract the following information about the requested: -- Topic ID of the topic that best matches the user's request. The topic should be active, otherwise return null. -- Topic Name of the topic that best matches the user's request. The topic should be active, otherwise return null. - -If the topic is not active or the inference timeframe is not matching the user's request, return null for both topicId and topicName. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. The result should be a valid JSON object with the following schema: -\`\`\`json -{ - "topicId": number | null, - "topicName": string | null, -} -\`\`\``; diff --git a/packages/plugin-allora/src/tests/topics.test.ts b/packages/plugin-allora/src/tests/topics.test.ts deleted file mode 100644 index 0c434ec786574..0000000000000 --- a/packages/plugin-allora/src/tests/topics.test.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from "vitest"; -import { TopicsProvider } from "../../src/providers/topics"; -import type { Memory, State } from "@elizaos/core"; - -describe("TopicsProvider", () => { - let topicsProvider: TopicsProvider; - let mockRuntime; - - beforeEach(() => { - topicsProvider = new TopicsProvider(); - mockRuntime = { - getSetting: vi.fn(), - }; - - mockRuntime.getSetting.mockImplementation((key: string) => { - const settings = { - ALLORA_API_KEY: "test-api-key", - ALLORA_CHAIN_SLUG: "testnet", - }; - return settings[key]; - }); - }); - - describe("Topics data integration", () => { - it("should format topics into expected string format", async () => { - const mockTopics = [ - { - topic_id: 1, - topic_name: "Test Topic", - description: "Test Description", - is_active: true, - updated_at: "2024-03-20T00:00:00Z", - }, - ]; - vi.spyOn( - topicsProvider as any, - "getAlloraTopics" - ).mockResolvedValue(mockTopics); - - const result = await topicsProvider.get( - mockRuntime, - {} as Memory, - {} as State - ); - - expect(result).toContain("Allora Network Topics:"); - expect(result).toContain(`Topic Name: ${mockTopics[0].topic_name}`); - expect(result).toContain( - `Topic Description: ${mockTopics[0].description}` - ); - expect(result).toContain(`Topic ID: ${mockTopics[0].topic_id}`); - expect(result).toContain( - `Topic is Active: ${mockTopics[0].is_active}` - ); - }); - }); -}); diff --git a/packages/plugin-allora/tsconfig.json b/packages/plugin-allora/tsconfig.json deleted file mode 100644 index 9bf8a22c5259c..0000000000000 --- a/packages/plugin-allora/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ], - "esModuleInterop": true, - "allowSyntheticDefaultImports": true - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-allora/tsup.config.ts b/packages/plugin-allora/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/plugin-allora/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-ankr/.npmignore b/packages/plugin-ankr/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-ankr/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-ankr/README.md b/packages/plugin-ankr/README.md deleted file mode 100644 index d4b4fa14b6f57..0000000000000 --- a/packages/plugin-ankr/README.md +++ /dev/null @@ -1,88 +0,0 @@ - -## Ankr Plugin Guide -![alt text](assets/ankr.jpg) - -
    -

    🔗 Blockchain Data Query Interface

    -
    - -### Available Actions - -The Ankr plugin provides comprehensive blockchain data querying capabilities through natural language prompts. Below are the supported actions and their usage: - -#### 1. Blockchain Information -```bash -# Get blockchain stats -Show me stats for [chain]eth[/chain] - -# Get top currencies -Show me the top currencies on [chain]eth[/chain] -``` - -#### 2. Wallet & Balance Queries -```bash -# Check wallet balance -Show me the balance for wallet [wallet]0x6B0031518934952C485d5a7E76f1729B50e67486[/wallet] on [chain]eth[/chain] - -# View wallet interactions -Show me interactions for the wallet [wallet]0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45[/wallet] -``` - -#### 3. NFT Operations -```bash -# Get NFT holders -Show me holders of NFT contract [contract]0x34d85c9cdeb23fa97cb08333b511ac86e1c4e258[/contract] token [token]112234[/token] on [chain]eth[/chain] - -# Get NFT metadata -Show me the metadata for NFT [token]1234[/token] at contract [contract]0xbc4ca0eda7647a8ab7c2061c2e118a18a936f13d[/contract] [chain]eth[/chain] - -# List NFTs by owner -Show me all NFTs owned by wallet [wallet]0x1234567890123456789012345678901234567890[/wallet] on [chain]eth[/chain] - -# View NFT transfers -Show me NFT transfers for contract [contract]0xd8da6bf26964af9d7eed9e03e53415d37aa96045[/contract] [chain]eth[/chain] [fromtimestamp]1655197483[/fromtimestamp][totimestamp]1671974699[/totimestamp] -``` - -#### 4. Token Operations -```bash -# Get token holders -Show me holders for contract [contract]0xf307910A4c7bbc79691fD374889b36d8531B08e3[/contract] on [chain]bsc[/chain] - -# Get token holder count -How many holders does [contract]0xdAC17F958D2ee523a2206206994597C13D831ec7[/contract] have? [chain]eth[/chain] - -# Check token price -What's the current price of [contract]0x8290333cef9e6d528dd5618fb97a76f268f3edd4[/contract] token [chain]eth[/chain] - -# View token transfers -Show me recent contract [contract]0xd8da6bf26964af9d7eed9e03e53415d37aa96045[/contract] transfers [chain]eth[/chain] from [fromtimestamp]1655197483[/fromtimestamp] to [totimestamp]1656061483[/totimestamp] -``` - -#### 5. Transaction Queries -```bash -# Get transactions by address -Show me the latest transactions for address [contract]0xd8da6bf26964af9d7eed9e03e53415d37aa96045[/contract] [chain]eth[/chain] - -# Get transaction details -Show me details for transaction [txHash]0x748eeb4a15ba05736a9397a07ca86f0184c0c1eca53fa901b28a412d1a3f211f[/txHash] [chain]eth[/chain] -``` - -### Tag Reference - -| Tag | Description | Example | -|-----|-------------|---------| -| `[chain]` | Blockchain identifier | eth, bsc | -| `[wallet]` | Wallet address | 0x1234... | -| `[contract]` | Contract address | 0xabcd... | -| `[token]` | Token ID | 1234 | -| `[txHash]` | Transaction hash | 0x748e... | -| `[fromtimestamp]` | Start timestamp | 1655197483 | -| `[totimestamp]` | End timestamp | 1656061483 | - -### Important Notes - -1. All addresses must be valid blockchain addresses (0x format) -2. Timestamps must be in Unix timestamp format -3. Chain names should be lowercase (eth, bsc, etc.) -4. Transaction hashes must be complete and valid -5. Include all required tags for each action typ \ No newline at end of file diff --git a/packages/plugin-ankr/biome.json b/packages/plugin-ankr/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-ankr/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-ankr/package.json b/packages/plugin-ankr/package.json deleted file mode 100644 index 3668bd245df20..0000000000000 --- a/packages/plugin-ankr/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "@elizaos/plugin-ankr", - "version": "0.25.6-alpha.1", - "description": "ANKR Plugin for ElizaOS", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsup", - "dev": "tsup --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "test": "vitest", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage", - "test:ui": "vitest --ui" - }, - "dependencies": { - "@elizaos/core": "workspace:*", - "axios": "^1.6.5", - "chalk": "^5.3.0", - "cli-table3": "^0.6.3", - "dotenv": "^16.4.1", - "ora": "^8.0.1", - "ssh2": "^1.15.0", - "@coinbase/coinbase-sdk": "^0.15.0", - "viem": "^2.0.0", - "decimal.js": "^10.4.3" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@types/dotenv": "^8.2.0", - "@types/jest": "^29.5.11", - "@types/node": "^20.11.5", - "@types/ssh2": "^1.11.18", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "@vitest/coverage-v8": "^1.2.1", - "@vitest/ui": "^0.34.6", - "tsup": "^8.0.1", - "typescript": "^5.3.3", - "vite": "^5.0.10", - "vite-tsconfig-paths": "^4.2.2", - "vitest": "^3.0.0" - }, - "peerDependencies": { - "@elizaos/core": "workspace:*" - }, - "engines": { - "node": ">=18.0.0" - } -} diff --git a/packages/plugin-ankr/src/actions/actionGetAccountBalance.ts b/packages/plugin-ankr/src/actions/actionGetAccountBalance.ts deleted file mode 100644 index 8c3d2cd56217d..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetAccountBalance.ts +++ /dev/null @@ -1,319 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetAccountBalance] ${message}`, data); - console.log(`[GetAccountBalance] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ - -interface GetAccountBalanceContent extends Content { - text: string; - filters?: { - blockchain?: string[]; - walletAddress?: string; - }; - success?: boolean; - data?: { - address: string; - balances: Array<{ - blockchain: string; - tokenName: string; - symbol: string; - balance: string; - balanceRawInteger: string; - balanceUsd: string; - tokenDecimals: number; - tokenType: string; - contractAddress?: string; - }>; - }; -} - -// Add interface for balance -interface TokenBalance { - blockchain: string; - contractAddress?: string; - tokenName: string; - tokenSymbol: string; - tokenDecimals: number; - tokenType: string; - holderAddress: string; - balance: string; - balanceRawInteger: string; - balanceUsd: string; - tokenPrice: string; -} - -// ------------------------------------------------------------------------------------------------ -// Core Action implementation -// ------------------------------------------------------------------------------------------------ -export const actionGetAccountBalance: Action = { - name: "GET_ACCOUNT_BALANCE_ANKR", - similes: ["CHECK_BALANCE", "SHOW_BALANCE", "VIEW_BALANCE", "GET_WALLET_BALANCE"], - description: "Retrieve account balance information across multiple blockchains.", - examples: [[ - { - user: "user", - content: { - text: "Show me the balance for wallet [wallet]0x1234567890123456789012345678901234567890[/wallet] on [chain]eth[/chain]", - filters: { - blockchain: ["eth"], - walletAddress: "0x1234567890123456789012345678901234567890" - } - } as GetAccountBalanceContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the balances for wallet 0x1234...7890:\n\n" + - "1. ETH (Native)\n" + - " Balance: 1.5 ETH\n" + - " USD Value: $3,000.00\n\n" + - "2. USDC (ERC20)\n" + - " Balance: 1000 USDC\n" + - " Contract: 0xa0b8...c4d5\n" + - " USD Value: $1,000.00", - success: true, - data: { - address: "0x1234567890123456789012345678901234567890", - balances: [{ - blockchain: "eth", - tokenName: "Ethereum", - symbol: "ETH", - balance: "1.5", - balanceRawInteger: "1500000000000000000", - balanceUsd: "3000.00", - tokenDecimals: 18, - tokenType: "NATIVE" - }, { - blockchain: "eth", - tokenName: "USD Coin", - symbol: "USDC", - balance: "1000", - balanceRawInteger: "1000000000", - balanceUsd: "1000.00", - tokenDecimals: 6, - tokenType: "ERC20", - contractAddress: "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48" - }] - } - } as GetAccountBalanceContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_ACCOUNT_BALANCE_ANKR") { - return true; - } - - logGranular("Validating GET_ACCOUNT_BALANCE_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetAccountBalanceContent; - - if (!content.filters?.walletAddress) { - throw new ValidationError("Wallet address is required"); - } - - if (content.filters?.blockchain && !Array.isArray(content.filters.blockchain)) { - throw new ValidationError("Blockchain must be an array"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_ACCOUNT_BALANCE_ANKR action"); - - try { - const messageContent = message.content as GetAccountBalanceContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - console.log("Debug - Message content details:", { - hasText: !!messageContent?.text, - hasFilters: !!messageContent?.filters, - textContent: messageContent?.text, - contentType: typeof messageContent?.text - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasWallet: !!parsedContent.wallet, - hasChain: !!parsedContent.chain, - wallet: parsedContent.wallet, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['wallet', 'chain']); - - // Prepare API request parameters - const requestParams = { - blockchain: [parsedContent.chain], - walletAddress: parsedContent.wallet - }; - - console.log("Debug - API request parameters:", { - params: requestParams, - endpoint: ANKR_ENDPOINTS.production.multichain - }); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getAccountBalance", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const balances = response.data.result.assets; - const address = parsedContent.wallet; - - // Format the response text - let formattedText = `Here are the balances for wallet ${address?.slice(0, 6)}...${address?.slice(-4)}:\n\n`; - - // Use the interface instead of any - balances.forEach((balance: TokenBalance, index: number) => { - formattedText += `${index + 1}. ${balance.tokenName} (${balance.tokenType})\n`; - formattedText += ` Balance: ${balance.balance} ${balance.tokenSymbol}\n`; - if (balance.contractAddress) { - formattedText += ` Contract: ${balance.contractAddress.slice(0, 6)}...${balance.contractAddress.slice(-4)}\n`; - } - formattedText += ` USD Value: $${Number.parseFloat(balance.balanceUsd).toFixed(2)}\n\n`; - }); - - // ------------------------------------------------------------------------------------------------ - // Core Callback logic - // ------------------------------------------------------------------------------------------------ - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: { - address, - balances - } - } as GetAccountBalanceContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch balance data: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch balance data"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting account balance: ${errorMessage}`, - success: false - } as GetAccountBalanceContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_ACCOUNT_BALANCE_ANKR action"); - } - } -}; - -export default actionGetAccountBalance; diff --git a/packages/plugin-ankr/src/actions/actionGetBlockchainStats.ts b/packages/plugin-ankr/src/actions/actionGetBlockchainStats.ts deleted file mode 100644 index e58c030078296..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetBlockchainStats.ts +++ /dev/null @@ -1,293 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetBlockchainStats] ${message}`, data); - console.log(`[GetBlockchainStats] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ - -interface GetBlockchainStatsContent extends Content { - text: string; - filters?: { - blockchain?: string[]; - }; - success?: boolean; - data?: { - stats: Array<{ - blockchain: string; - latestBlock: number; - totalTransactions: string; - totalAccounts: string; - tps: number; - gasPrice: string; - marketCap: string; - totalValueLocked: string; - }>; - }; -} - -// Update the interface to match actual API response -interface AnkrBlockchainStats { - blockchain: string; - totalTransactionsCount: number; - totalEventsCount: number; - latestBlockNumber: number; - blockTimeMs: number; - nativeCoinUsdPrice: string; -} - -// ------------------------------------------------------------------------------------------------ -// Core Action implementation -// ------------------------------------------------------------------------------------------------ -export const actionGetBlockchainStats: Action = { - name: "GET_BLOCKCHAIN_STATS_ANKR", - similes: ["CHAIN_STATS", "BLOCKCHAIN_INFO", "NETWORK_STATS", "CHAIN_METRICS"], - description: "Retrieve statistical information about specified blockchain networks.", - examples: [[ - { - user: "user", - content: { - text: "Show me stats for [chain]eth[/chain] blockchain", - filters: { - blockchain: ["eth"] - } - } as GetBlockchainStatsContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the current statistics for Ethereum:\n\n" + - "Latest Block: 19,234,567\n" + - "Total Transactions: 2.5B\n" + - "Active Accounts: 245M\n" + - "TPS: 15.5\n" + - "Gas Price: 25 Gwei\n" + - "Market Cap: $250B\n" + - "Total Value Locked: $45B", - success: true, - data: { - stats: [{ - blockchain: "eth", - latestBlock: 19234567, - totalTransactions: "2500000000", - totalAccounts: "245000000", - tps: 15.5, - gasPrice: "25000000000", - marketCap: "250000000000", - totalValueLocked: "45000000000" - }] - } - } as GetBlockchainStatsContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_BLOCKCHAIN_STATS_ANKR") { - return true; - } - - logGranular("Validating GET_BLOCKCHAIN_STATS_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetBlockchainStatsContent; - - if (content.filters?.blockchain && !Array.isArray(content.filters.blockchain)) { - throw new ValidationError("Blockchain must be an array"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_BLOCKCHAIN_STATS_ANKR action"); - - try { - const messageContent = message.content as GetBlockchainStatsContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - console.log("Debug - Message content details:", { - hasText: !!messageContent?.text, - hasFilters: !!messageContent?.filters, - textContent: messageContent?.text, - contentType: typeof messageContent?.text - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasChain: !!parsedContent.chain, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['chain']); - - // Prepare API request parameters - const requestParams = { - blockchain: parsedContent.chain // Changed from array to string - }; - - console.log("Debug - API request parameters:", { - params: requestParams, - endpoint: ANKR_ENDPOINTS.production.multichain - }); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getBlockchainStats", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const stats = response.data.result.stats; - - // Format the response text - let formattedText = ""; - for (const stat of stats) { - formattedText += `Statistics for ${stat.blockchain.toUpperCase()}:\n\n`; - formattedText += `Latest Block: ${stat.latestBlockNumber.toLocaleString()}\n`; - formattedText += `Total Transactions: ${(stat.totalTransactionsCount / 1e9).toFixed(1)}B\n`; - formattedText += `Total Events: ${(stat.totalEventsCount / 1e9).toFixed(1)}B\n`; - formattedText += `Block Time: ${(stat.blockTimeMs / 1000).toFixed(1)} seconds\n`; - formattedText += `Native Coin Price: $${Number(stat.nativeCoinUsdPrice).toFixed(2)}\n\n`; - } - - // Update callback data structure to match new format - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: { - stats: stats.map((stat: AnkrBlockchainStats) => ({ - blockchain: stat.blockchain, - latestBlock: stat.latestBlockNumber, - totalTransactions: stat.totalTransactionsCount.toString(), - totalEvents: stat.totalEventsCount.toString(), - blockTime: stat.blockTimeMs / 1000, - nativeCoinPrice: stat.nativeCoinUsdPrice - })) - } - } as GetBlockchainStatsContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch blockchain stats: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch blockchain stats"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting blockchain stats: ${errorMessage}`, - success: false - } as GetBlockchainStatsContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_BLOCKCHAIN_STATS_ANKR action"); - } - } -}; - -export default actionGetBlockchainStats; diff --git a/packages/plugin-ankr/src/actions/actionGetCurrencies.ts b/packages/plugin-ankr/src/actions/actionGetCurrencies.ts deleted file mode 100644 index f60b7528495f4..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetCurrencies.ts +++ /dev/null @@ -1,285 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetCurrencies] ${message}`, data); - console.log(`[GetCurrencies] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ - -interface GetCurrenciesContent extends Content { - text: string; - filters?: { - blockchain?: string; - pageSize?: number; - pageToken?: string; - }; - success?: boolean; - data?: { - currencies: Array<{ - blockchain: string; - address: string; - name: string; - symbol: string; - decimals: number; - thumbnail?: string; - }>; - syncStatus?: { - timestamp: number; - lag: string; - status: string; - }; - }; -} - -// ------------------------------------------------------------------------------------------------ -// Core Action implementation -// ------------------------------------------------------------------------------------------------ -export const actionGetCurrencies: Action = { - name: "GET_CURRENCIES_ANKR", - similes: ["LIST_CURRENCIES", "SHOW_CURRENCIES", "VIEW_CURRENCIES", "FETCH_CURRENCIES"], - description: "Retrieve information about currencies on specified blockchain networks.", - examples: [[ - { - user: "user", - content: { - text: "Show me the top currencies on [chain]eth[/chain]", - filters: { - blockchain: "eth", - pageSize: 5, - pageToken: "eyJsYXN0X2JhbGFuY2UiOiIyIn0=" - } - } as GetCurrenciesContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the top currencies on Ethereum:\n\n" + - "1. Ethereum (ETH)\n" + - " Market Cap: $250B\n" + - " Holders: 2.5M\n" + - " Total Supply: 120.5M ETH\n\n" + - "2. USD Coin (USDC)\n" + - " Contract: 0xa0b8...c4d5\n" + - " Market Cap: $45B\n" + - " Holders: 1.2M\n" + - " Total Supply: 45B USDC", - success: true, - data: { - currencies: [ - { - blockchain: "eth", - address: "0x0000000000000000000000000000000000000000", - name: "Ethereum", - symbol: "ETH", - decimals: 18 - } - ] - } - } as GetCurrenciesContent - } as ActionExample - ]], - - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_CURRENCIES_ANKR") { - return true; - } - - logGranular("Validating GET_CURRENCIES_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetCurrenciesContent; - - if (!content.filters?.blockchain) { - throw new ValidationError("Blockchain is required"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_CURRENCIES_ANKR action"); - - try { - const messageContent = message.content as GetCurrenciesContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasChain: !!parsedContent.chain, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['chain']); - - // Prepare API request parameters - const requestParams = { - blockchain: parsedContent.chain, - pageSize: messageContent.filters?.pageSize ?? 5 - }; - - console.log("Debug - API request parameters:", { - params: requestParams, - endpoint: ANKR_ENDPOINTS.production.multichain - }); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getCurrencies", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const currencies = response.data.result.currencies; - - // Format the response text - let formattedText = `Here are the top currencies from ${parsedContent.chain ? parsedContent.chain[0].toUpperCase() : 'Unknown Chain'}:\n\n`; - - let index = 0; - for (const currency of currencies) { - formattedText += [ - `${index + 1}. ${currency.name} (${currency.symbol})`, - currency.address ? ` Contract: ${currency.address.slice(0, 6)}...${currency.address.slice(-4)}` : '', - ` Decimals: ${currency.decimals}`, - currency.thumbnail ? ` Logo: ${currency.thumbnail}` : '', - '', - '' - ].filter(Boolean).join('\n'); - index++; - } - - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: { - currencies, - syncStatus: response.data.result.syncStatus - } - } as GetCurrenciesContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch currencies data: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch currencies data"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting currencies: ${errorMessage}`, - success: false - } as GetCurrenciesContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_CURRENCIES_ANKR action"); - } - } -}; - -export default actionGetCurrencies; diff --git a/packages/plugin-ankr/src/actions/actionGetInteractions.ts b/packages/plugin-ankr/src/actions/actionGetInteractions.ts deleted file mode 100644 index c7d100ba6abbf..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetInteractions.ts +++ /dev/null @@ -1,284 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetInteractions] ${message}`, data); - console.log(`[GetInteractions] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ - -interface GetInteractionsContent extends Content { - text: string; - filters?: { - blockchain?: string; // Changed from string[] to string - address?: string; // Changed from walletAddress - pageSize?: number; - pageToken?: string; - }; - success?: boolean; - data?: { - interactions: Array<{ - blockchain: string; - transactionHash: string; - blockNumber: number; - timestamp: string; - from: string; - to: string; - value: string; - gasPrice: string; - gasUsed: string; - methodName?: string; - logs: Array<{ - address: string; - topics: string[]; - data: string; - logIndex: number; - }>; - }>; - nextPageToken?: string; - }; -} - -interface AnkrAPIResponse { - blockchains: string[]; - syncStatus: { - timestamp: number; - lag: string; - status: string; - }; -} - -// ------------------------------------------------------------------------------------------------ -// Core Action implementation -// ------------------------------------------------------------------------------------------------ -export const actionGetInteractions: Action = { - name: "GET_INTERACTIONS_ANKR", - similes: ["FETCH_INTERACTIONS", "SHOW_INTERACTIONS", "VIEW_INTERACTIONS", "LIST_INTERACTIONS"], - description: "Retrieve interactions between wallets and smart contracts on specified blockchain networks.", - examples: [[ - { - user: "user", - content: { - text: "Show me interactions for the wallet [wallet]0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45[/wallet]", - filters: { - blockchain: "eth", // Changed from string[] to string - address: "0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45", - pageSize: 5, - pageToken: "eyJsYXN0X2Jsb2NrIjoiMTIzNDU2Nzg4IiwibGFzdF9pbnRlcmFjdGlvbl9pbmRleCI6IjEifQ==" - } - } as GetInteractionsContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the recent interactions:\n\n" + - "1. Transfer (2024-03-15 14:30 UTC)\n" + - " From: 0xabc...def1\n" + - " To: 0x123...5678\n" + - " Value: 1.5 ETH\n" + - " Gas Used: 21,000\n" + - " Tx Hash: 0xdef...789\n\n" + - "2. Approve (2024-03-15 14:25 UTC)\n" + - " From: 0xabc...def1\n" + - " To: 0x123...5678\n" + - " Value: 0 ETH\n" + - " Gas Used: 45,000\n" + - " Tx Hash: 0x789...012", - success: true, - data: { - interactions: [{ - blockchain: "eth", - transactionHash: "0xdef...789", - blockNumber: 17000100, - timestamp: "2024-03-15T14:30:00Z", - from: "0xabcdef1234567890abcdef1234567890abcdef12", - to: "0x1234567890abcdef1234567890abcdef12345678", - value: "1500000000000000000", - gasPrice: "20000000000", - gasUsed: "21000", - methodName: "transfer", - logs: [{ - address: "0x1234567890abcdef1234567890abcdef12345678", - topics: ["0x000...123"], - data: "0x000...456", - logIndex: 0 - }] - }] - } - } as GetInteractionsContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_INTERACTIONS_ANKR") { - return true; - } - - logGranular("Validating GET_INTERACTIONS_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetInteractionsContent; - - // Only wallet address is required based on the API - if (!content.filters?.address) { - throw new ValidationError("Wallet address is required"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_INTERACTIONS_ANKR action"); - - try { - const messageContent = message.content as GetInteractionsContent; - const parsedContent = parseAPIContent(messageContent.text); - - // Validate required fields - validateRequiredFields(parsedContent, ['wallet']); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Prepare API request parameters - const requestParams = { - blockchain: parsedContent.chain || "eth", - address: parsedContent.wallet, - pageSize: messageContent.filters?.pageSize ?? 5, - pageToken: messageContent.filters?.pageToken - }; - - try { - const response = await axios.post<{ - id: number; - jsonrpc: string; - result: AnkrAPIResponse; - }>( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getInteractions", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - // Format the response text based on the sync status - const formattedText = `Blockchain Status Information: - -Available Blockchains: ${response.data.result.blockchains.join(', ')} -Sync Status: ${response.data.result.syncStatus.status} -Lag: ${response.data.result.syncStatus.lag}`; - - if (callback) { - callback({ - text: formattedText, - success: true, - data: { - interactions: [], - syncStatus: response.data.result.syncStatus, - availableBlockchains: response.data.result.blockchains - } - } as GetInteractionsContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch interactions data: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch interactions data"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting interactions: ${errorMessage}`, - success: false - } as GetInteractionsContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_INTERACTIONS_ANKR action"); - } - }, - - -}; - -export default actionGetInteractions; diff --git a/packages/plugin-ankr/src/actions/actionGetNFTHolders.ts b/packages/plugin-ankr/src/actions/actionGetNFTHolders.ts deleted file mode 100644 index b0fbe470071b2..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetNFTHolders.ts +++ /dev/null @@ -1,286 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetNFTHolders] ${message}`, data); - console.log(`[GetNFTHolders] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ - -interface GetNFTHoldersContent extends Content { - text: string; - filters?: { - blockchain?: string; // Changed from string[] to single string - contractAddress?: string; - pageSize?: number; - pageToken?: string; - }; - success?: boolean; - data?: { - holders: Array<{ - holderAddress: string; - balance: string; - balanceRawInteger: string; - }>; - nextPageToken?: string; - blockchain?: string; - contractAddress?: string; - tokenDecimals?: number; - holdersCount?: number; - syncStatus?: { - timestamp: number; - lag: string; - status: string; - }; - }; -} - -// ------------------------------------------------------------------------------------------------ -// Core Action implementation -// ------------------------------------------------------------------------------------------------ -export const actionGetNFTHolders: Action = { - name: "GET_NFT_HOLDERS_ANKR", - similes: ["FETCH_NFT_HOLDERS", "SHOW_NFT_HOLDERS", "VIEW_NFT_HOLDERS", "LIST_NFT_HOLDERS"], - description: "Retrieve holders of specific NFTs on specified blockchain networks.", - examples: [[ - { - user: "user", - content: { - text: "Show me holders of NFT contract [contract]0x34d85c9cdeb23fa97cb08333b511ac86e1c4e258[/contract] on [chain]bsc[/chain]", - filters: { - blockchain: "bsc", // Changed from string[] to string - contractAddress: "0x34d85c9cdeb23fa97cb08333b511ac86e1c4e258", - pageSize: 5 - } - } as GetNFTHoldersContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the NFT holders:\n\n" + - "1. 0xabc...def1\n" + - " Balance: 1.5\n" + - " Raw Balance: 1500000000000000000\n\n" + - "2. 0xdef...789a\n" + - " Balance: 2.0\n" + - " Raw Balance: 2000000000000000000", - success: true, - data: { - holders: [{ - holderAddress: "0xabcdef1234567890abcdef1234567890abcdef12", - balance: "1.5", - balanceRawInteger: "1500000000000000000" - }], - blockchain: "bsc", - contractAddress: "0xf307910A4c7bbc79691fD374889b36d8531B08e3", - tokenDecimals: 18, - holdersCount: 1000, - syncStatus: { - timestamp: 1737769593, - lag: "-2m", - status: "synced" - } - } - } as GetNFTHoldersContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_NFT_HOLDERS_ANKR") { - return true; - } - - logGranular("Validating GET_NFT_HOLDERS_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetNFTHoldersContent; - - if (!content.filters?.contractAddress) { - throw new ValidationError("Contract address is required"); - } - - // Blockchain is optional, defaults to "eth" - if (content.filters?.blockchain && typeof content.filters.blockchain !== 'string') { - throw new ValidationError("Blockchain must be a string"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_NFT_HOLDERS_ANKR action"); - - try { - const messageContent = message.content as GetNFTHoldersContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasContract: !!parsedContent.contract, - hasToken: !!parsedContent.token, - hasChain: !!parsedContent.chain, - contract: parsedContent.contract, - token: parsedContent.token, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['contract']); - - const requestParams = { - blockchain: parsedContent.chain, - contractAddress: parsedContent.contract, - pageSize: messageContent.filters?.pageSize || 10, - pageToken: messageContent.filters?.pageToken - }; - - console.log("Debug - API request parameters:", { - params: requestParams, - endpoint: endpoint - }); - - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getNFTHolders", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - const result = response.data.result; - const formattedText = - `NFT Holders: -Total Holders: ${result.holders.length} - -${result.holders.map((holderAddress: string, index: number) => - `${index + 1}. ${holderAddress}` -).join('\n')} - -${result.nextPageToken ? 'More holders available. Use the page token to see more.\n' : ''} -${result.syncStatus ? `Sync Status: -Last Update: ${new Date(result.syncStatus.timestamp * 1000).toLocaleString()} -Lag: ${result.syncStatus.lag} -Status: ${result.syncStatus.status}` : ''}`; - - logGranular("Formatted response text", { formattedText }); - - if (callback) { - logGranular("Sending success callback with formatted text"); - callback({ - text: formattedText, - success: true, - data: { - holders: result.holders.map((address: string) => ({ - holderAddress: address, - balance: "1", // Default values since not provided in response - balanceRawInteger: "1" - })), - nextPageToken: result.nextPageToken, - syncStatus: result.syncStatus - } - } as GetNFTHoldersContent); - } - - return true; - - } catch (error: unknown) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting NFT holders: ${errorMessage}`, - success: false - } as GetNFTHoldersContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_NFT_HOLDERS_ANKR action"); - } - } -}; - -export default actionGetNFTHolders; diff --git a/packages/plugin-ankr/src/actions/actionGetNFTMetadata.ts b/packages/plugin-ankr/src/actions/actionGetNFTMetadata.ts deleted file mode 100644 index 02cb89c540ae2..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetNFTMetadata.ts +++ /dev/null @@ -1,301 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetNFTMetadata] ${message}`, data); - console.log(`[GetNFTMetadata] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ - -interface GetNFTMetadataContent extends Content { - text: string; - filters?: { - blockchain?: string; - contractAddress?: string; - tokenId?: string; - }; - success?: boolean; - data?: { - metadata: { - blockchain: string; - contractAddress: string; - contractType: string; - tokenId: string; - }; - attributes: { - contractType: string; - tokenUrl: string; - imageUrl: string; - name: string; - description: string; - traits: Array<{ - trait_type: string; - value: string; - }>; - }; - }; -} - -export const actionGetNFTMetadata: Action = { - name: "GET_NFT_METADATA_ANKR", - similes: ["GET_NFT_INFO", "SHOW_NFT_DETAILS", "VIEW_NFT", "NFT_METADATA"], - description: "Get detailed metadata for a specific NFT including traits, images, and contract information.", - examples: [[ - { - user: "user", - content: { - text: "Show me the metadata for NFT [token]1234[/token] at contract [contract]0xbc4ca0eda7647a8ab7c2061c2e118a18a936f13d[/contract] [chain]eth[/chain]", - filters: { - blockchain: "eth", - contractAddress: "0xbc4ca0eda7647a8ab7c2061c2e118a18a936f13d", - tokenId: "1234" - } - } as GetNFTMetadataContent - } as ActionExample, - { - user: "assistant", - content: { - text: "NFT Metadata for Bored Ape #1234:\n\n" + - "Collection: Bored Ape Yacht Club\n" + - "Contract: 0xbc4c...f13d (ERC721)\n\n" + - "Description: A unique Bored Ape NFT living on the Ethereum blockchain\n\n" + - "Traits:\n" + - "- Background: Blue\n" + - "- Fur: Dark Brown\n" + - "- Eyes: Bored\n" + - "- Mouth: Grin\n", - success: true, - data: { - metadata: { - blockchain: "eth", - contractAddress: "0xbc4ca0eda7647a8ab7c2061c2e118a18a936f13d", - contractType: "ERC721", - tokenId: "1234" - }, - attributes: { - contractType: "ERC721", - tokenUrl: "ipfs://QmeSjSinHpPnmXmspMjwiXyN6zS4E9zccariGR3jxcaWtq/1234", - imageUrl: "ipfs://QmRRPWG96cmgTn2qSzjwr2qvfNEuhunv6FNeMFGa9bx6mQ", - name: "Bored Ape #1234", - description: "A unique Bored Ape NFT living on the Ethereum blockchain", - traits: [ - { trait_type: "Background", value: "Blue" }, - { trait_type: "Fur", value: "Dark Brown" }, - { trait_type: "Eyes", value: "Bored" }, - { trait_type: "Mouth", value: "Grin" } - ] - } - } - } as GetNFTMetadataContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_NFT_METADATA_ANKR") { - return true; - } - - logGranular("Validating GET_NFT_METADATA_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetNFTMetadataContent; - - if (!content.filters?.blockchain || !content.filters?.contractAddress || !content.filters?.tokenId) { - throw new ValidationError("Blockchain, contract address, and token ID are required"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_NFT_METADATA_ANKR action"); - - try { - const messageContent = message.content as GetNFTMetadataContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasContract: !!parsedContent.contract, - hasToken: !!parsedContent.token, - hasChain: !!parsedContent.chain, - contract: parsedContent.contract, - token: parsedContent.token, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['contract', 'token', 'chain']); - - // Prepare API request parameters - const requestParams = { - blockchain: parsedContent.chain, - contractAddress: parsedContent.contract, - tokenId: parsedContent.token - }; - - console.log("Debug - API request parameters:", { - params: requestParams, - endpoint: ANKR_ENDPOINTS.production.multichain - }); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getNFTMetadata", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const nftData = response.data.result; - - // Format the response text - let formattedText = `NFT Metadata for ${nftData.attributes.name}:\n\n`; - formattedText += `Collection: ${nftData.attributes.name.split('#')[0].trim()}\n`; - formattedText += `Contract: ${nftData.metadata.contractAddress.slice(0, 6)}...${nftData.metadata.contractAddress.slice(-4)} (${nftData.metadata.contractType})\n\n`; - - if (nftData.attributes.description) { - formattedText += `Description: ${nftData.attributes.description}\n\n`; - } - - if (nftData.attributes.traits && nftData.attributes.traits.length > 0) { - formattedText += "Traits:\n"; - for (const trait of nftData.attributes.traits as { trait_type: string; value: string }[]) { - formattedText += `- ${trait.trait_type}: ${trait.value}\n`; - } - } - - if (nftData.attributes.imageUrl) { - formattedText += `\nImage URL: ${nftData.attributes.imageUrl}\n`; - } - - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: nftData - } as GetNFTMetadataContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch NFT metadata: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch NFT metadata"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting NFT metadata: ${errorMessage}`, - success: false - } as GetNFTMetadataContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_NFT_METADATA_ANKR action"); - } - }, - - -}; - -export default actionGetNFTMetadata; diff --git a/packages/plugin-ankr/src/actions/actionGetNFTTransfers.ts b/packages/plugin-ankr/src/actions/actionGetNFTTransfers.ts deleted file mode 100644 index 6afaa8e44cc37..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetNFTTransfers.ts +++ /dev/null @@ -1,346 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetNFTTransfers] ${message}`, data); - console.log(`[GetNFTTransfers] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; - -interface GetNFTTransfersContent extends Content { - text: string; - filters?: { - blockchain?: string; - contractAddress?: string; - fromTimestamp?: number; - toTimestamp?: number; - pageSize?: number; - pageToken?: string; - }; - success?: boolean; - data?: { - transfers: Array<{ - fromAddress: string; - toAddress: string; - contractAddress: string; - value: string; - valueRawInteger: string; - blockchain: string; - tokenName: string; - tokenSymbol: string; - tokenDecimals: number; - thumbnail: string; - transactionHash: string; - blockHeight: number; - timestamp: number; - }>; - syncStatus?: { - timestamp?: number; - lag?: string; - status?: string; - } | null; - }; -} - -// Add type definition for transfer -interface NFTTransfer { - tokenName: string; - tokenSymbol: string; - fromAddress: string; - toAddress: string; - value: string; - timestamp: number; - transactionHash: string; - thumbnail?: string; -} - -export const actionGetNFTTransfers: Action = { - name: "GET_NFT_TRANSFERS_ANKR", - similes: ["LIST_NFT_TRANSFERS", "SHOW_NFT_TRANSFERS", "VIEW_NFT_TRANSFERS", "GET_NFT_HISTORY"], - description: "Get NFT transfer history for a specific address or contract on eth.", - - - - // Fix the example data to match the interface - examples: [[ - { - user: "user", - content: { - text: "Show me NFT transfers for contract [contract]0x34d85c9cdeb23fa97cb08333b511ac86e1c4e258[/contract] [chain]eth[/chain] [fromtimestamp]1655197483[/fromtimestamp][totimestamp]1671974699[/totimestamp]", - filters: { - blockchain: "eth", - contractAddress: "0x34d85c9cdeb23fa97cb08333b511ac86e1c4e258", - pageSize: 5 - } - } as GetNFTTransfersContent - } as ActionExample, - { - user: "assistant", - content: { - text: "NFT Transfers:\n\n" + - "1. Transfer of Token #1234\n" + - " From: 0xabcd...ef01\n" + - " To: 0x9876...4321\n" + - " Time: 1/24/2024, 10:30:15 AM\n" + - " Token: CoolNFT #123\n\n" + - "2. Transfer of Token #456\n" + - " From: 0x9876...3210\n" + - " To: 0xfedc...ba98\n" + - " Time: 1/24/2024, 10:15:22 AM\n" + - " Token: CoolNFT #456\n", - success: true, - data: { - transfers: [ - { - fromAddress: "0xabcdef0123456789abcdef0123456789abcdef01", - toAddress: "0x9876543210fedcba9876543210fedcba98765432", - contractAddress: "0x34d85c9cdeb23fa97cb08333b511ac86e1c4e258", - value: "1", - valueRawInteger: "1", - blockchain: "eth", - tokenName: "CoolNFT", - tokenSymbol: "COOL", - tokenDecimals: 18, - thumbnail: "https://example.com/nft/123.png", - transactionHash: "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", - blockHeight: 123456789, - timestamp: 1706093415 - }, - { - fromAddress: "0x9876543210987654321098765432109876543210", - toAddress: "0xfedcba9876543210fedcba9876543210fedcba98", - contractAddress: "0x34d85c9cdeb23fa97cb08333b511ac86e1c4e258", - value: "1", - valueRawInteger: "1", - blockchain: "eth", - tokenName: "CoolNFT", - tokenSymbol: "COOL", - tokenDecimals: 18, - thumbnail: "https://example.com/nft/456.png", - transactionHash: "0x9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedcba", - blockHeight: 123456788, - timestamp: 1706092522 - } - ], - syncStatus: { - timestamp: 1706093415, - lag: "0s", - status: "synced" - } - } - } as GetNFTTransfersContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_NFT_TRANSFERS_ANKR") { - return true; - } - - logGranular("Validating GET_NFT_TRANSFERS_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetNFTTransfersContent; - - if (!content.filters?.blockchain || !content.filters?.contractAddress) { - throw new ValidationError("Blockchain and contract address are required"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_NFT_TRANSFERS_ANKR action"); - - try { - const messageContent = message.content as GetNFTTransfersContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - console.log("Debug - Message content details:", { - hasText: !!messageContent?.text, - hasFilters: !!messageContent?.filters, - textContent: messageContent?.text, - contentType: typeof messageContent?.text - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasContract: !!parsedContent.contract, - hasChain: !!parsedContent.chain, - hasFromTimestamp: !!parsedContent.fromTimestamp, - hasToTimestamp: !!parsedContent.toTimestamp, - contract: parsedContent.contract, - chain: parsedContent.chain, - fromTimestamp: parsedContent.fromTimestamp, - toTimestamp: parsedContent.toTimestamp, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['contract', 'chain', 'fromTimestamp', 'toTimestamp']); - - // Prepare API request parameters - const requestParams = { - address: parsedContent.contract, - blockchain: [parsedContent.chain], - fromTimestamp: parsedContent.fromTimestamp, - toTimestamp: parsedContent.toTimestamp - }; - - console.log("Debug - API request parameters:", { - params: requestParams, - endpoint: ANKR_ENDPOINTS.production.multichain - }); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getTokenTransfers", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const transfers = response.data.result.transfers; - - // Format the response text - let formattedText = "Token Transfers:\n\n"; - transfers.forEach((transfer: NFTTransfer, index: number) => { - formattedText += `${index + 1}. Transfer of ${transfer.tokenName} (${transfer.tokenSymbol})\n`; - formattedText += ` From: ${transfer.fromAddress.slice(0, 6)}...${transfer.fromAddress.slice(-4)}\n`; - formattedText += ` To: ${transfer.toAddress.slice(0, 6)}...${transfer.toAddress.slice(-4)}\n`; - formattedText += ` Amount: ${transfer.value}\n`; - formattedText += ` Time: ${new Date(transfer.timestamp * 1000).toLocaleString()}\n`; - formattedText += ` Tx Hash: ${transfer.transactionHash}\n`; - if (transfer.thumbnail) { - formattedText += ` Token Icon: ${transfer.thumbnail}\n`; - } - formattedText += "\n"; - }); - - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: { - transfers, - syncStatus: response.data.result.syncStatus - } - } as GetNFTTransfersContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch NFT transfers: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch NFT transfers"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting NFT transfers: ${errorMessage}`, - success: false - } as GetNFTTransfersContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_NFT_TRANSFERS_ANKR action"); - } - } -}; - -export default actionGetNFTTransfers; diff --git a/packages/plugin-ankr/src/actions/actionGetNFTsByOwner.ts b/packages/plugin-ankr/src/actions/actionGetNFTsByOwner.ts deleted file mode 100644 index 13b8c1d139324..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetNFTsByOwner.ts +++ /dev/null @@ -1,322 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetNFTsByOwner] ${message}`, data); - console.log(`[GetNFTsByOwner] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ - -interface GetNFTsByOwnerContent extends Content { - text: string; - filters?: { - blockchain?: string[]; - walletAddress?: string; - pageSize?: number; - pageToken?: string; - }; - success?: boolean; - data?: { - owner: string; - assets: Array<{ - blockchain: string; - name: string; - tokenId: string; - tokenUrl: string; - imageUrl: string; - collectionName: string; - symbol: string; - contractType: string; - contractAddress: string; - quantity?: string; // Added for ERC1155 support - }>; - syncStatus?: { - timestamp?: number; - lag?: string; - status?: string; - } | null; - }; -} - -type NFTAsset = { - blockchain: string; - name: string; - tokenId: string; - tokenUrl: string; - imageUrl: string; - collectionName: string; - symbol: string; - contractType: string; - contractAddress: string; - quantity?: string; -}; - -// ------------------------------------------------------------------------------------------------ -// Core Action implementation -// ------------------------------------------------------------------------------------------------ -export const actionGetNFTsByOwner: Action = { - name: "GET_NFTS_BY_OWNER_ANKR", - similes: ["LIST_NFTS", "SHOW_NFTS", "VIEW_NFTS", "FETCH_NFTS", "GET_OWNED_NFTS"], - description: "Retrieve all NFTs owned by a specific wallet address across multiple blockchains with detailed metadata.", - examples: [[ - { - user: "user", - content: { - text: "Show me all NFTs owned by wallet [wallet]0x1234567890123456789012345678901234567890[/wallet] on [chain]eth[/chain]", - filters: { - blockchain: ["eth"], - walletAddress: "0x1234567890123456789012345678901234567890", - pageSize: 10 - } - } as GetNFTsByOwnerContent - } as ActionExample, - { - user: "assistant", - content: { - text: "NFTs owned by 0x1234567890123456789012345678901234567890:\n\n" + - "1. Bored Ape #1234\n" + - " Collection: Bored Ape Yacht Club\n" + - " Contract: 0xbc4c...f13d\n" + - " Token ID: 1234\n\n" + - "2. CryptoPunk #5678\n" + - " Collection: CryptoPunks\n" + - " Contract: 0x2505...42a2\n" + - " Token ID: 5678\n", - success: true, - data: { - owner: "0x1234567890123456789012345678901234567890", - assets: [ - { - blockchain: "eth", - name: "Bored Ape #1234", - tokenId: "1234", - tokenUrl: "ipfs://QmeSjSinHpPnmXmspMjwiXyN6zS4E9zccariGR3jxcaWtq/1234", - imageUrl: "ipfs://QmRRPWG96cmgTn2qSzjwr2qvfNEuhunv6FNeMFGa9bx6mQ", - collectionName: "Bored Ape Yacht Club", - symbol: "BAYC", - contractType: "ERC721", - contractAddress: "0xbc4ca0eda7647a8ab7c2061c2e118a18a936f13d" - }, - { - blockchain: "eth", - name: "CryptoPunk #5678", - tokenId: "5678", - tokenUrl: "https://cryptopunks.app/cryptopunks/details/5678", - imageUrl: "https://cryptopunks.app/cryptopunks/image/5678", - collectionName: "CryptoPunks", - symbol: "PUNK", - contractType: "ERC721", - contractAddress: "0x2505...42a2" - } - ] - } - } as GetNFTsByOwnerContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_NFTS_BY_OWNER_ANKR") { - return true; - } - - logGranular("Validating GET_NFTS_BY_OWNER_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetNFTsByOwnerContent; - - if (!content.filters?.blockchain || !content.filters?.walletAddress) { - throw new ValidationError("Blockchain and wallet address are required"); - } - - if (content.filters?.blockchain && !Array.isArray(content.filters.blockchain)) { - throw new ValidationError("Blockchain must be an array"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_NFTS_BY_OWNER_ANKR action"); - - try { - const messageContent = message.content as GetNFTsByOwnerContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasWallet: !!parsedContent.wallet, - hasChain: !!parsedContent.chain, - wallet: parsedContent.wallet, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['wallet', 'chain']); - - // Prepare API request parameters - const requestParams = { - blockchain: [parsedContent.chain], // API expects array - walletAddress: parsedContent.wallet, - pageSize: messageContent.filters?.pageSize ?? 10, - pageToken: messageContent.filters?.pageToken - }; - - console.log("Debug - API request parameters:", requestParams); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getNFTsByOwner", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const { owner, assets, syncStatus } = response.data.result; - - // Format the response text - let formattedText = `NFTs owned by ${owner}:\n\n`; - - for (const [index, nft] of (assets as NFTAsset[]).entries()) { - formattedText += `${index + 1}. ${nft.name || 'Unnamed NFT'}\n`; - if (nft.collectionName) { - formattedText += ` Collection: ${nft.collectionName}\n`; - } - formattedText += ` Contract: ${nft.contractAddress.slice(0, 6)}...${nft.contractAddress.slice(-4)} (${nft.contractType})\n`; - formattedText += ` Token ID: ${nft.tokenId}\n`; - if (nft.quantity) { - formattedText += ` Quantity: ${nft.quantity}\n`; - } - if (nft.tokenUrl) { - formattedText += ` Metadata URL: ${nft.tokenUrl}\n`; - } - formattedText += '\n'; - } - - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: { - owner, - assets, - syncStatus - } - } as GetNFTsByOwnerContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch NFTs data: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch NFTs data"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting NFTs: ${errorMessage}`, - success: false - } as GetNFTsByOwnerContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_NFTS_BY_OWNER_ANKR action"); - } - }, - - -}; - -export default actionGetNFTsByOwner; - diff --git a/packages/plugin-ankr/src/actions/actionGetTokenHolders.ts b/packages/plugin-ankr/src/actions/actionGetTokenHolders.ts deleted file mode 100644 index 185b09aeb8e70..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetTokenHolders.ts +++ /dev/null @@ -1,297 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetTokenHolders] ${message}`, data); - console.log(`[GetTokenHolders] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; - -interface GetTokenHoldersContent extends Content { - text: string; - filters?: { - blockchain?: string; - contractAddress?: string; - pageSize?: number; - pageToken?: string; - }; - success?: boolean; - data?: { - nextPageToken: string; - blockchain: string; - contractAddress: string; - tokenDecimals: number; - holders: Array<{ - holderAddress: string; - balance: string; - balanceRawInteger: string; - }>; - holdersCount: number; - syncStatus: { - timestamp: number; - lag: string; - status: string; - }; - }; -} - -// Define holder type -type TokenHolder = { - holderAddress: string; - balance: string; - balanceRawInteger: string; -}; - -export const actionGetTokenHolders: Action = { - name: "GET_TOKEN_HOLDERS_ANKR", - similes: ["LIST_HOLDERS", "SHOW_HOLDERS", "TOKEN_HOLDERS", "FIND_HOLDERS"], - description: "Get a list of token holders for any ERC20 or ERC721 token contract.", - examples: [[ - { - user: "user", - content: { - text: "Show me holders for contract [contract]0xf307910A4c7bbc79691fD374889b36d8531B08e3[/contract] on [chain]bsc[/chain]", - } as GetTokenHoldersContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Token Holders on BSC:\n" + - "Total Holders: 1,234\n\n" + - "1. 0xabcd...ef01\n" + - " Balance: 1,000,000\n\n" + - "2. 0x1234...5678\n" + - " Balance: 500,000\n\n" + - "3. 0x9876...4321\n" + - " Balance: 250,000\n\n" + - "\nSync Status: completed (0s)", - success: true, - data: { - nextPageToken: "eyJsYXN0X2Jsb2NrIjoiMTIzNDU2Nzg4In0=", - blockchain: "bsc", - contractAddress: "0xf307910A4c7bbc79691fD374889b36d8531B08e3", - tokenDecimals: 18, - holders: [ - { - holderAddress: "0xabcdef0123456789abcdef0123456789abcdef01", - balance: "1000000", - balanceRawInteger: "1000000000000000000000000" - }, - { - holderAddress: "0x1234567890123456789012345678901234567890", - balance: "500000", - balanceRawInteger: "500000000000000000000000" - }, - { - holderAddress: "0x9876543210987654321098765432109876543210", - balance: "250000", - balanceRawInteger: "250000000000000000000000" - } - ], - holdersCount: 1234, - syncStatus: { - timestamp: 1706093415, - lag: "0s", - status: "completed" - } - } - } as GetTokenHoldersContent - } as ActionExample - ]], - - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_TOKEN_HOLDERS_ANKR") { - return true; - } - - logGranular("Validating GET_TOKEN_HOLDERS_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetTokenHoldersContent; - const parsedContent = parseAPIContent(content.text); - - if (!parsedContent.chain || !parsedContent.contract) { - throw new ValidationError("Blockchain and contract address are required"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - - // Fix the handler to use proper types - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_TOKEN_HOLDERS_ANKR action"); - - try { - const messageContent = message.content as GetTokenHoldersContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - // Parse the prompt using our API content parser - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasContract: !!parsedContent.contract, - hasChain: !!parsedContent.chain, - contract: parsedContent.contract, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['contract', 'chain']); - - // Prepare API request parameters - const requestParams = { - blockchain: parsedContent.chain, - contractAddress: parsedContent.contract, - pageSize: 10 - }; - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getTokenHolders", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const result = response.data.result; - const holders = result.holders as Array<{ - holderAddress: string; - balance: string; - balanceRawInteger: string; - }>; - - // Format the response text - let formattedText = `Token Holders on ${parsedContent.chain?.toUpperCase() || 'UNKNOWN'}:\n`; - formattedText += `Total Holders: ${result.holdersCount.toLocaleString()}\n\n`; - - holders.forEach((holder: TokenHolder, index: number) => { - const balance = Number(holder.balance).toLocaleString(); - formattedText += `${index + 1}. ${holder.holderAddress.slice(0, 6)}...${holder.holderAddress.slice(-4)}\n`; - formattedText += ` Balance: ${balance}\n\n`; - }); - - if (result.syncStatus) { - formattedText += `\nSync Status: ${result.syncStatus.status} (${result.syncStatus.lag})\n`; - } - - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: result - } as GetTokenHoldersContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch token holders: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch token holders"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting token holders: ${errorMessage}`, - success: false - } as GetTokenHoldersContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_TOKEN_HOLDERS_ANKR action"); - } - } -}; - -export default actionGetTokenHolders; diff --git a/packages/plugin-ankr/src/actions/actionGetTokenHoldersCount.ts b/packages/plugin-ankr/src/actions/actionGetTokenHoldersCount.ts deleted file mode 100644 index b5b4f062b7544..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetTokenHoldersCount.ts +++ /dev/null @@ -1,286 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetTokenHoldersCount] ${message}`, data); - console.log(`[GetTokenHoldersCount] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; - -interface GetTokenHoldersCountContent extends Content { - text: string; - filters?: { - blockchain?: string; - contractAddress?: string; - }; - success?: boolean; - data?: { - blockchain: string; - contractAddress: string; - tokenDecimals: number; - holderCountHistory: Array<{ - holderCount: number; - totalAmount: string; - totalAmountRawInteger: string; - lastUpdatedAt: string; - }>; - latestHoldersCount: number; - syncStatus: { - timestamp: number; - lag: string; - status: string; - }; - }; -} - -export const actionGetTokenHoldersCount: Action = { - name: "GET_TOKEN_HOLDERS_COUNT_ANKR", - similes: ["COUNT_HOLDERS", "TOTAL_HOLDERS", "HOLDERS_COUNT", "NUMBER_OF_HOLDERS"], - description: "Get the total number of holders and historical data for a specific token.", - // Fix the example data to match the interface - examples: [[ - { - user: "user", - content: { - text: "How many holders does [contract]0xdAC17F958D2ee523a2206206994597C13D831ec7[/contract] have? [chain]eth[/chain]", - filters: { - blockchain: "eth", - contractAddress: "0xdAC17F958D2ee523a2206206994597C13D831ec7" - } - } as GetTokenHoldersCountContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Token Holders Count on ETH:\n\n" + - "Current Holders: 500,000\n\n" + - "Historical Data:\n" + - "1. 1/24/2024\n" + - " Holders: 500,000\n" + - " Total Amount: 1,000,000\n\n" + - "Sync Status: completed (0s)", - success: true, - data: { - blockchain: "eth", - contractAddress: "0xdAC17F958D2ee523a2206206994597C13D831ec7", - tokenDecimals: 18, - holderCountHistory: [ - { - holderCount: 500000, - totalAmount: "1000000", - totalAmountRawInteger: "1000000000000000000000000", - lastUpdatedAt: "2024-01-24T10:30:15Z" - } - ], - latestHoldersCount: 500000, - syncStatus: { - timestamp: 1706093415, - lag: "0s", - status: "completed" - } - } - } as GetTokenHoldersCountContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_TOKEN_HOLDERS_COUNT_ANKR") { - return true; - } - - logGranular("Validating GET_TOKEN_HOLDERS_COUNT_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetTokenHoldersCountContent; - const parsedContent = parseAPIContent(content.text); - - if (!parsedContent.chain || !parsedContent.contract) { - throw new ValidationError("Blockchain and contract address are required"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_TOKEN_HOLDERS_COUNT_ANKR action"); - - try { - const messageContent = message.content as GetTokenHoldersCountContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - // Parse the prompt using our API content parser - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasContract: !!parsedContent.contract, - hasChain: !!parsedContent.chain, - contract: parsedContent.contract, - chain: parsedContent.chain, - matches: parsedContent.raw.matches - }); - - // Validate required fields - validateRequiredFields(parsedContent, ['contract', 'chain']); - - // Prepare API request parameters - const requestParams = { - blockchain: parsedContent.chain, - contractAddress: parsedContent.contract, - pageSize: 10 - }; - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getTokenHoldersCount", - params: requestParams, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const result = response.data.result; - - // Format the response text - let formattedText = `Token Holders Count on ${parsedContent.chain?.toUpperCase() || 'UNKNOWN'}:\n\n`; - formattedText += `Current Holders: ${result.latestHoldersCount.toLocaleString()}\n\n`; - formattedText += "Historical Data:\n"; - - result.holderCountHistory.forEach((history: { - holderCount: number; - totalAmount: string; - totalAmountRawInteger: string; - lastUpdatedAt: string; - }, index: number) => { - const date = new Date(history.lastUpdatedAt).toLocaleDateString(); - formattedText += ` -${index + 1}. ${date} - Holders: ${history.holderCount.toLocaleString()} - Total Amount: ${Number(history.totalAmount).toLocaleString()}`; - }); - - if (result.syncStatus) { - formattedText += ` - -Sync Status: ${result.syncStatus.status} (${result.syncStatus.lag})`; - } - - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: result - } as GetTokenHoldersCountContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch token holders count: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch token holders count"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting token holders count: ${errorMessage}`, - success: false - } as GetTokenHoldersCountContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_TOKEN_HOLDERS_COUNT_ANKR action"); - } - } -}; - -export default actionGetTokenHoldersCount; diff --git a/packages/plugin-ankr/src/actions/actionGetTokenPrice.ts b/packages/plugin-ankr/src/actions/actionGetTokenPrice.ts deleted file mode 100644 index f636c82184bc7..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetTokenPrice.ts +++ /dev/null @@ -1,236 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetTokenPrice] ${message}`, data); - console.log(`[GetTokenPrice] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; - -interface GetTokenPriceContent extends Content { - text: string; - filters?: { - blockchain?: string; - contractAddress?: string; - }; - success?: boolean; - data?: { - blockchain: string; - contractAddress: string; - usdPrice: string; - syncStatus: { - timestamp: number; - lag: string; - status: string; - }; - }; -} - -export const actionGetTokenPrice: Action = { - name: "GET_TOKEN_PRICE_ANKR", - similes: ["CHECK_PRICE", "TOKEN_PRICE", "CRYPTO_PRICE", "PRICE_CHECK"], - description: "Get the current USD price for any token on eth blockchain.", - - - - examples: [[ - { - user: "user", - content: { - text: "What's the current price of [contract]0x8290333cef9e6d528dd5618fb97a76f268f3edd4[/contract] token [chain]eth[/chain]", - filters: { - blockchain: "eth", - contractAddress: "0x8290333cef9e6d528dd5618fb97a76f268f3edd4" - } - } as GetTokenPriceContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Current token price on eth:\n\n" + - "Price: $0.03024 USD\n" + - "Contract: 0x8290...3edd4\n" + - "Sync Status: synced (lag: -8s)", - success: true, - data: { - blockchain: "eth", - contractAddress: "0x8290333cef9e6d528dd5618fb97a76f268f3edd4", - usdPrice: "0.030239944206509556547", - syncStatus: { - timestamp: 1737760907, - lag: "-8s", - status: "synced" - } - } - } as GetTokenPriceContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_TOKEN_PRICE_ANKR") { - return true; - } - - logGranular("Validating GET_TOKEN_PRICE_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetTokenPriceContent; - const parsedContent = parseAPIContent(content.text); - - if (!parsedContent.chain || !parsedContent.contract) { - throw new ValidationError("Blockchain and contract address are required"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_TOKEN_PRICE_ANKR action"); - - try { - const messageContent = message.content as GetTokenPriceContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - const parsedContent = parseAPIContent(messageContent.text); - validateRequiredFields(parsedContent, ['contract', 'chain']); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getTokenPrice", - params: { - blockchain: parsedContent.chain, - contractAddress: parsedContent.contract - }, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const result = response.data.result; - const price = Number(result.usdPrice).toFixed(5); - - const formattedText = `Current token price on ${parsedContent.chain}:\n\n` + - `Price: $${price} USD\n` + - `Contract: ${result.contractAddress.slice(0, 6)}...${result.contractAddress.slice(-4)}\n` + - `Sync Status: ${result.syncStatus.status} (lag: ${result.syncStatus.lag})`; - - if (callback) { - callback({ - text: formattedText, - success: true, - data: result - } as GetTokenPriceContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch token price: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch token price"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting token price: ${errorMessage}`, - success: false - } as GetTokenPriceContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_TOKEN_PRICE_ANKR action"); - } - } -}; - -export default actionGetTokenPrice; diff --git a/packages/plugin-ankr/src/actions/actionGetTokenTransfers.ts b/packages/plugin-ankr/src/actions/actionGetTokenTransfers.ts deleted file mode 100644 index ce97e6a6bf968..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetTokenTransfers.ts +++ /dev/null @@ -1,321 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetTokenTransfers] ${message}`, data); - console.log(`[GetTokenTransfers] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; - -interface GetTokenTransfersContent extends Content { - text: string; - filters?: { - blockchain?: string; - contractAddress?: string; - fromTimestamp?: number; - toTimestamp?: number; - pageSize?: number; - pageToken?: string; - }; - success?: boolean; - data?: { - transfers: Array<{ - fromAddress: string; - toAddress: string; - contractAddress: string; - value: string; - valueRawInteger: string; - blockchain: string; - tokenName: string; - tokenSymbol: string; - tokenDecimals: number; - thumbnail: string; - transactionHash: string; - blockHeight: number; - timestamp: number; - }>; - syncStatus?: { - timestamp: number; - lag: string; - status: string; - }; - }; -} - -// Define transfer type -type TokenTransfer = { - fromAddress: string; - toAddress: string; - contractAddress: string; - value: string; - valueRawInteger: string; - blockchain: string; - tokenName: string; - tokenSymbol: string; - tokenDecimals: number; - thumbnail: string; - transactionHash: string; - blockHeight: number; - timestamp: number; -}; - -export const actionGetTokenTransfers: Action = { - name: "GET_TOKEN_TRANSFERS_ANKR", - similes: ["LIST_TRANSFERS", "SHOW_TRANSFERS", "TOKEN_MOVEMENTS", "TRANSFER_HISTORY"], - description: "Get transfer history for a specific token or address on eth.", - examples: [[ - { - user: "user", - content: { - text: "Show me recent contract [contract]0xff970a61a04b1ca14834a43f5de4533ebddb5cc8[/contract] transfers [chain]eth[/chain] from [fromtimestamp]1655197483[/fromtimestamp] to [totimestamp]1656061483[/totimestamp]", - filters: { - blockchain: "eth", - contractAddress: "0xff970a61a04b1ca14834a43f5de4533ebddb5cc8", - pageSize: 5, - fromTimestamp: 1655197483, - toTimestamp: 1656061483 - } - } as GetTokenTransfersContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the 5 most recent USDC transfers on eth:\n\n" + - "1. Transfer\n" + - " From: 0x1234...5678\n" + - " To: 0xabcd...ef01\n" + - " Amount: 10,000 USDC\n" + - " Time: 2024-01-24 10:30:15\n\n" + - "2. Transfer\n" + - " From: 0x9876...5432\n" + - " To: 0xfedc...ba98\n" + - " Amount: 5,000 USDC\n" + - " Time: 2024-01-24 10:29:45", - success: true, - data: { - transfers: [{ - fromAddress: "0x1234567890123456789012345678901234567890", - toAddress: "0xabcdef0123456789abcdef0123456789abcdef01", - contractAddress: "0xff970a61a04b1ca14834a43f5de4533ebddb5cc8", - value: "10000.0", - valueRawInteger: "10000000000000000000000", - blockchain: "eth", - tokenName: "USD Coin", - tokenSymbol: "USDC", - tokenDecimals: 6, - thumbnail: "", - transactionHash: "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", - blockHeight: 123456789, - timestamp: 1706093415 - }], - syncStatus: { - timestamp: 1706093415, - lag: "0s", - status: "completed" - } - } - } as GetTokenTransfersContent - } as ActionExample - ]], - - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_TOKEN_TRANSFERS_ANKR") { - return true; - } - - logGranular("Validating GET_TOKEN_TRANSFERS_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetTokenTransfersContent; - const parsedContent = parseAPIContent(content.text); - - if (!parsedContent.chain || !parsedContent.contract) { - throw new ValidationError("Blockchain and contract address are required"); - } - - if (parsedContent.fromTimestamp && parsedContent.toTimestamp) { - if (parsedContent.fromTimestamp > parsedContent.toTimestamp) { - throw new ValidationError("From timestamp must be less than to timestamp"); - } - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_TOKEN_TRANSFERS_ANKR action"); - - try { - const messageContent = message.content as GetTokenTransfersContent; - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - // Parse the prompt using our API content parser - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasContract: !!parsedContent.contract, - hasChain: !!parsedContent.chain, - hasFromTimestamp: !!parsedContent.fromTimestamp, - hasToTimestamp: !!parsedContent.toTimestamp, - contract: parsedContent.contract, - chain: parsedContent.chain, - fromTimestamp: parsedContent.fromTimestamp, - toTimestamp: parsedContent.toTimestamp - }); - - validateRequiredFields(parsedContent, ['contract', 'chain', 'fromTimestamp', 'toTimestamp']); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getTokenTransfers", - params: { - address: parsedContent.contract, - blockchain: [parsedContent.chain], - fromTimestamp: parsedContent.fromTimestamp, - toTimestamp: parsedContent.toTimestamp, - pageSize: messageContent.filters?.pageSize || 10 - }, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - logGranular("Received response from Ankr API", { - statusCode: response.status, - data: response.data - }); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const result = response.data.result; - - let formattedText = `Token Transfers on ${parsedContent.chain?.toUpperCase() || 'UNKNOWN'}:\n\n`; - - result.transfers.forEach((transfer: TokenTransfer, index: number) => { - const date = new Date(transfer.timestamp * 1000).toLocaleString(); - const value = Number(transfer.value).toLocaleString(); - - formattedText += `${index + 1}. Transfer\n`; - formattedText += ` From: ${transfer.fromAddress.slice(0, 6)}...${transfer.fromAddress.slice(-4)}\n`; - formattedText += ` To: ${transfer.toAddress.slice(0, 6)}...${transfer.toAddress.slice(-4)}\n`; - formattedText += ` Amount: ${value} ${transfer.tokenSymbol}\n`; - formattedText += ` Token: ${transfer.tokenName}\n`; - formattedText += ` Time: ${date}\n\n`; - }); - - if (result.syncStatus) { - formattedText += `\nSync Status: ${result.syncStatus.status} (lag: ${result.syncStatus.lag})\n`; - } - - if (callback) { - logGranular("Sending success callback with formatted text", { formattedText }); - callback({ - text: formattedText, - success: true, - data: { - transfers: result.transfers, - nextPageToken: result.nextPageToken - } - } as GetTokenTransfersContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch token transfers: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch token transfers"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting token transfers: ${errorMessage}`, - success: false - } as GetTokenTransfersContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_TOKEN_TRANSFERS_ANKR action"); - } - } -}; - -export default actionGetTokenTransfers; diff --git a/packages/plugin-ankr/src/actions/actionGetTransactionsByAddress.ts b/packages/plugin-ankr/src/actions/actionGetTransactionsByAddress.ts deleted file mode 100644 index 9fadc7a7b5e9c..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetTransactionsByAddress.ts +++ /dev/null @@ -1,333 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetTransactionsByAddress] ${message}`, data); - console.log(`[GetTransactionsByAddress] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; - -interface GetTransactionsByAddressContent extends Content { - text: string; - filters?: { - blockchain?: string; - address?: string; - pageSize?: number; - includeLogs?: boolean; - }; - success?: boolean; - data?: { - nextPageToken?: string; - transactions: Array<{ - blockHash: string; - blockNumber: string; - from: string; - to: string; - hash: string; - value: string; - gas: string; - gasPrice: string; - gasUsed: string; - input: string; - nonce: string; - timestamp: string; - status: string; - blockchain: string; - logs?: Array<{ - address: string; - topics: string[]; - data: string; - blockNumber: string; - transactionHash: string; - logIndex: string; - timestamp: string; - }>; - }>; - syncStatus?: { - timestamp: number; - lag: string; - status: string; - }; - }; -} - -// Define transaction type -type Transaction = { - blockHash: string; - blockNumber: string; - from: string; - to: string; - hash: string; - value: string; - gas: string; - gasPrice: string; - gasUsed: string; - input: string; - nonce: string; - timestamp: string; - status: string; - blockchain: string; - logs?: Array<{ - address: string; - topics: string[]; - data: string; - blockNumber: string; - transactionHash: string; - logIndex: string; - timestamp: string; - }>; -}; - -export const actionGetTransactionsByAddress: Action = { - name: "GET_TRANSACTIONS_BY_ADDRESS_ANKR", - similes: ["LIST_TXS", "SHOW_TXS", "VIEW_TRANSACTIONS", "GET_ADDRESS_TXS"], - description: "Get transactions for a specific address on the blockchain", - examples: [[ - { - user: "user", - content: { - text: "Show me the latest transactions for address [contract]0xd8da6bf26964af9d7eed9e03e53415d37aa96045[/contract] [chain]eth[/chain]", - filters: { - blockchain: "eth", - address: "0xd8da6bf26964af9d7eed9e03e53415d37aa96045", - pageSize: 2, - includeLogs: true - } - } as GetTransactionsByAddressContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the latest transactions for the address on eth:\n\n" + - "1. Transfer Out\n" + - " To: 0x1234...5678\n" + - " Amount: 1.5 ETH\n" + - " Time: 2024-01-24 10:30:15\n" + - " Status: Success\n\n" + - "2. Contract Interaction\n" + - " Contract: 0xabcd...ef01 (Uniswap V3)\n" + - " Method: swapExactTokensForTokens\n" + - " Time: 2024-01-24 10:15:22\n" + - " Status: Success", - success: true, - data: { - transactions: [{ - blockchain: "eth", - from: "0xd8da6bf26964af9d7eed9e03e53415d37aa96045", - to: "0x1234567890123456789012345678901234567890", - hash: "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", - value: "1500000000000000000", - gas: "21000", - gasPrice: "100000000", - gasUsed: "21000", - timestamp: "2024-01-24T10:30:15Z", - status: "1", - blockNumber: "123456789", - blockHash: "0x9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedcba" - }] - } - } as GetTransactionsByAddressContent - } as ActionExample - ]], - - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_TRANSACTIONS_BY_ADDRESS_ANKR") { - return true; - } - - logGranular("Validating GET_TRANSACTIONS_BY_ADDRESS_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetTransactionsByAddressContent; - const parsedContent = parseAPIContent(content.text); - - if (!parsedContent.chain || !parsedContent.contract) { - throw new ValidationError("Blockchain and address are required"); - } - - // Validate pageSize if provided - if (content.filters?.pageSize && (content.filters.pageSize < 1 || content.filters.pageSize > 100)) { - throw new ValidationError("Page size must be between 1 and 100"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_TRANSACTIONS_BY_ADDRESS_ANKR action"); - - try { - const messageContent = message.content as GetTransactionsByAddressContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - console.log("Debug - Message content details:", { - hasText: !!messageContent?.text, - hasFilters: !!messageContent?.filters, - textContent: messageContent?.text, - contentType: typeof messageContent?.text - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - // Parse the prompt using our API content parser - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasContract: !!parsedContent.contract, - hasChain: !!parsedContent.chain, - contract: parsedContent.contract, - chain: parsedContent.chain - }); - - validateRequiredFields(parsedContent, ['contract', 'chain']); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getTransactionsByAddress", - params: { - blockchain: [parsedContent.chain], - address: parsedContent.contract, - pageSize: messageContent.filters?.pageSize || 5, - includeLogs: messageContent.filters?.includeLogs || true - }, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const result = response.data.result; - let formattedText = `Transactions for ${parsedContent.contract} on ${parsedContent.chain?.toUpperCase() || 'UNKNOWN'}:\n\n`; - - result.transactions.forEach((tx: Transaction, index: number) => { - const date = new Date(Number.parseInt(tx.timestamp, 16) * 1000).toLocaleString(); - const value = Number.parseInt(tx.value, 16) / 1e18; - const status = tx.status === "0x1" ? "Success" : "Failed"; - - formattedText += `${index + 1}. Transaction\n`; - formattedText += ` Hash: ${tx.hash.slice(0, 6)}...${tx.hash.slice(-4)}\n`; - formattedText += ` From: ${tx.from.slice(0, 6)}...${tx.from.slice(-4)}\n`; - formattedText += ` To: ${tx.to.slice(0, 6)}...${tx.to.slice(-4)}\n`; - formattedText += ` Value: ${value.toFixed(4)} ETH\n`; - formattedText += ` Status: ${status}\n`; - formattedText += ` Time: ${date}\n\n`; - }); - - if (callback) { - callback({ - text: formattedText, - success: true, - data: { - transactions: result.transactions, - nextPageToken: result.nextPageToken, - syncStatus: result.syncStatus - } - } as GetTransactionsByAddressContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch transactions: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch transactions"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting transactions: ${errorMessage}`, - success: false - } as GetTransactionsByAddressContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_TRANSACTIONS_BY_ADDRESS_ANKR action"); - } - } -}; - -export default actionGetTransactionsByAddress; diff --git a/packages/plugin-ankr/src/actions/actionGetTransactionsByHash.ts b/packages/plugin-ankr/src/actions/actionGetTransactionsByHash.ts deleted file mode 100644 index ce6c58e440ff3..0000000000000 --- a/packages/plugin-ankr/src/actions/actionGetTransactionsByHash.ts +++ /dev/null @@ -1,296 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { Action, elizaLogger } from "@elizaos/core"; -import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import axios from 'axios'; -import { getConfig, validateankrConfig, ANKR_ENDPOINTS } from '../environment'; -import { APIError, ConfigurationError, ValidationError } from '../error/base'; -import { parseAPIContent, validateRequiredFields } from '../validator/apiParseValidation'; -// ------------------------------------------------------------------------------------------------ -// Granular Logger -// ------------------------------------------------------------------------------------------------ -// Get configuration for granular logging -const config = getConfig(); -const GRANULAR_LOG = config.ANKR_GRANULAR_LOG; - -// Enhanced logging helper -const logGranular = (message: string, data?: unknown) => { - if (GRANULAR_LOG) { - elizaLogger.debug(`[GetTransactionsByHash] ${message}`, data); - console.log(`[GetTransactionsByHash] ${message}`, data ? JSON.stringify(data, null, 2) : ''); - } -}; - -interface GetTransactionsByHashContent extends Content { - text: string; - filters?: { - blockchain?: string; - transactionHash?: string; - includeLogs?: boolean; - }; - success?: boolean; - data?: { - transactions: Array<{ - blockHash: string; - blockNumber: string; - blockchain: string; - from: string; - to: string; - hash: string; - value: string; - gas: string; - gasPrice: string; - gasUsed: string; - input: string; - nonce: string; - timestamp: string; - status: string; - type: string; - v: string; - r: string; - s: string; - transactionIndex: string; - cumulativeGasUsed: string; - }>; - syncStatus?: { - timestamp: number; - lag: string; - status: string; - } | null; - }; -} - -export const actionGetTransactionsByHash: Action = { - name: "GET_TRANSACTIONS_BY_HASH_ANKR", - similes: ["GET_TX", "SHOW_TRANSACTION", "VIEW_TX", "TRANSACTION_DETAILS"], - description: "Get detailed information about a transaction by its hash", - examples: [[ - { - user: "user", - content: { - text: "Show me details for transaction [txHash]0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef[/txHash] [chain]eth[/chain]", - filters: { - blockchain: "eth", - transactionHash: "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", - includeLogs: true - } - } as GetTransactionsByHashContent - } as ActionExample, - { - user: "assistant", - content: { - text: "Here are the details for the transaction on eth:\n\n" + - "Transaction: 0x1234...cdef\n" + - "Status: Success\n" + - "From: 0xabcd...ef01\n" + - "To: 0x9876...5432\n" + - "Value: 1.5 ETH\n" + - "Gas Used: 150,000\n" + - "Gas Price: 0.1 Gwei\n" + - "Block: 123456789\n" + - "Timestamp: 2024-01-24 10:30:15", - success: true, - data: { - transactions: [{ - blockchain: "eth", - from: "0xabcdef0123456789abcdef0123456789abcdef01", - to: "0x9876543210987654321098765432109876543210", - hash: "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", - value: "1500000000000000000", - gas: "21000", - gasPrice: "100000000", - gasUsed: "21000", - timestamp: "2024-01-24T10:30:15Z", - status: "1", - blockNumber: "123456789", - blockHash: "0xfedcba0987654321fedcba0987654321fedcba0987654321fedcba0987654321" - }] - } - } as GetTransactionsByHashContent - } as ActionExample - ]], - // ------------------------------------------------------------------------------------------------ - // Core Validation implementation - // ------------------------------------------------------------------------------------------------ - validate: async (_runtime: IAgentRuntime, message: Memory): Promise => { - if (message.content?.type !== "GET_TRANSACTIONS_BY_HASH_ANKR") { - return true; - } - - logGranular("Validating GET_TRANSACTIONS_BY_HASH_ANKR action", { - content: message.content - }); - - try { - const content = message.content as GetTransactionsByHashContent; - const parsedContent = parseAPIContent(content.text); - - if (!parsedContent.chain || !parsedContent.txHash) { - throw new ValidationError("Blockchain and transaction hash are required"); - } - - // Validate transaction hash format - if (!/^0x[a-fA-F0-9]{64}$/.test(parsedContent.txHash)) { - throw new ValidationError("Invalid transaction hash format"); - } - - logGranular("Validation successful"); - return true; - } catch (error) { - logGranular("Validation failed", { error }); - if (error instanceof ValidationError) { - throw error; - } - throw new ValidationError(error instanceof Error ? error.message : "Unknown validation error"); - } - }, - - // ------------------------------------------------------------------------------------------------ - // Core Handler implementation - // ------------------------------------------------------------------------------------------------ - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise => { - logGranular("Executing GET_TRANSACTIONS_BY_HASH_ANKR action"); - - try { - const messageContent = message.content as GetTransactionsByHashContent; - console.log("Debug - Full message content:", { - fullContent: message.content, - rawText: messageContent?.text, - type: message.content?.type, - allKeys: Object.keys(message.content || {}) - }); - - console.log("Debug - Message content details:", { - hasText: !!messageContent?.text, - hasFilters: !!messageContent?.filters, - textContent: messageContent?.text, - contentType: typeof messageContent?.text - }); - - const config = await validateankrConfig(runtime); - console.log("Debug - Config validated:", { - hasWallet: !!config.ANKR_WALLET, - env: config.ANKR_ENV - }); - - const wallet = config.ANKR_WALLET; - if (!wallet) { - throw new ConfigurationError("ANKR_WALLET not found in environment variables"); - } - - const endpoint = `https://rpc.ankr.com/multichain/${wallet}`; - - // Parse the prompt using our API content parser - console.log("Debug - Raw prompt:", { - text: messageContent.text, - promptLength: messageContent.text?.length, - }); - - // Parse the prompt using our API content parser - const parsedContent = parseAPIContent(messageContent.text); - console.log("Debug - Parsed API content:", { - hasTx: !!parsedContent.txHash, - hasChain: !!parsedContent.chain, - tx: parsedContent.txHash, - chain: parsedContent.chain - }); - - validateRequiredFields(parsedContent, ['txHash', 'chain']); - - try { - const response = await axios.post( - endpoint, - { - jsonrpc: "2.0", - method: "ankr_getTransactionsByHash", - params: { - blockchain: parsedContent.chain, - transactionHash: parsedContent.txHash, - includeLogs: true - }, - id: 1 - }, - { - headers: { - 'Content-Type': 'application/json' - } - } - ); - - if (response.data.error) { - throw new APIError(`Ankr API error: ${response.data.error.message}`); - } - - const transaction = response.data.result.transactions[0]; - const timestamp = new Date(Number.parseInt(transaction.timestamp, 16) * 1000).toLocaleString(); - const value = Number.parseInt(transaction.value, 16) / 1e18; - const gasPrice = Number.parseInt(transaction.gasPrice, 16) / 1e9; - const gasUsed = Number.parseInt(transaction.gasUsed, 16); - const blockNumber = Number.parseInt(transaction.blockNumber, 16); - const status = transaction.status === "0x1" ? "Success" : "Failed"; - - let formattedText = `Transaction Details on ${parsedContent.chain?.toUpperCase() || 'UNKNOWN'}:\n\n`; - formattedText += `Hash: ${transaction.hash}\n`; - formattedText += `Status: ${status}\n`; - formattedText += `From: ${transaction.from.slice(0, 6)}...${transaction.from.slice(-4)}\n`; - formattedText += `To: ${transaction.to.slice(0, 6)}...${transaction.to.slice(-4)}\n`; - formattedText += `Value: ${value.toFixed(6)} ETH\n`; - formattedText += `Gas Used: ${gasUsed.toLocaleString()}\n`; - formattedText += `Gas Price: ${gasPrice.toFixed(2)} Gwei\n`; - formattedText += `Block: ${blockNumber.toLocaleString()}\n`; - formattedText += `Time: ${timestamp}`; - - if (callback) { - callback({ - text: formattedText, - success: true, - data: response.data.result - } as GetTransactionsByHashContent); - } - - return true; - - } catch (error) { - logGranular("API request failed", { error }); - if (axios.isAxiosError(error)) { - throw new APIError( - `Failed to fetch transaction: ${error.message}`, - error.response?.status - ); - } - throw new APIError("Failed to fetch transaction"); - } - - } catch (error) { - logGranular("Handler execution failed", { error }); - - if (callback) { - const errorMessage = error instanceof Error ? error.message : "Unknown error occurred"; - callback({ - text: `Error getting transaction: ${errorMessage}`, - success: false - } as GetTransactionsByHashContent); - } - - if (error instanceof ConfigurationError || - error instanceof ValidationError || - error instanceof APIError) { - throw error; - } - - throw new APIError("Failed to execute GET_TRANSACTIONS_BY_HASH_ANKR action"); - } - } -}; - -export default actionGetTransactionsByHash; diff --git a/packages/plugin-ankr/src/assets/ankr.jpg b/packages/plugin-ankr/src/assets/ankr.jpg deleted file mode 100644 index b0c1c03fcc2f1..0000000000000 Binary files a/packages/plugin-ankr/src/assets/ankr.jpg and /dev/null differ diff --git a/packages/plugin-ankr/src/assets/ankr_b.jpg b/packages/plugin-ankr/src/assets/ankr_b.jpg deleted file mode 100644 index dfc252b34f896..0000000000000 Binary files a/packages/plugin-ankr/src/assets/ankr_b.jpg and /dev/null differ diff --git a/packages/plugin-ankr/src/environment.ts b/packages/plugin-ankr/src/environment.ts deleted file mode 100644 index 4043c63eaad8c..0000000000000 --- a/packages/plugin-ankr/src/environment.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -// Environment Variables -let ENV= "production"; - -// ANKR API Configuration -export const ANKR_ENDPOINTS = { - production: { - multichain: "https://rpc.ankr.com/multichain/", - }, - -} as const; - -export const ankrEnvSchema = z.object({ - // API Configuration - ANKR_ENV: z.enum(["production", "staging"]).default("production"), - ANKR_WALLET: z.string().min(1, "ANKR_WALLET is required"), - - // Request Configuration - ANKR_MAX_RETRIES: z.string().transform(Number).default("3"), - ANKR_RETRY_DELAY: z.string().transform(Number).default("1000"), - ANKR_TIMEOUT: z.string().transform(Number).default("5000"), - - // Logging Configuration - ANKR_GRANULAR_LOG: z.boolean().default(true), - ANKR_LOG_LEVEL: z.enum(["error", "warn", "info", "debug"]).default("info"), - - // Runtime Configuration - ANKR_RUNTIME_CHECK_MODE: z.boolean().default(false), - ANKR_SPASH: z.boolean().default(false) -}); - -export type ankrConfig = z.infer; - -export function getConfig( - env: string | undefined | null = ENV || - process.env.ANKR_ENV -): ankrConfig { - ENV = env || "production"; - - return { - ANKR_ENV: (env as "production" | "staging") || "production", - ANKR_WALLET: process.env.ANKR_WALLET || "", - ANKR_MAX_RETRIES: Number(process.env.ANKR_MAX_RETRIES || "3"), - ANKR_RETRY_DELAY: Number(process.env.ANKR_RETRY_DELAY || "1000"), - ANKR_TIMEOUT: Number(process.env.ANKR_TIMEOUT || "5000"), - ANKR_GRANULAR_LOG: process.env.ANKR_GRANULAR_LOG === "true" || false, - ANKR_LOG_LEVEL: (process.env.ANKR_LOG_LEVEL as "error" | "warn" | "info" | "debug") || "info", - ANKR_RUNTIME_CHECK_MODE: process.env.RUNTIME_CHECK_MODE === "true" || false, - ANKR_SPASH: process.env.ANKR_SPASH === "true" || false - }; -} - -export async function validateankrConfig( - runtime: IAgentRuntime -): Promise { - try { - const envConfig = getConfig( - runtime.getSetting("ankr_ENV") ?? undefined - ); - - const config = { - ANKR_ENV: process.env.ANKR_ENV || runtime.getSetting("ANKR_ENV") || envConfig.ANKR_ENV, - ANKR_WALLET: process.env.ANKR_WALLET || runtime.getSetting("ANKR_WALLET") || envConfig.ANKR_WALLET, - ANKR_MAX_RETRIES: process.env.ANKR_MAX_RETRIES || runtime.getSetting("ANKR_MAX_RETRIES") || envConfig.ANKR_MAX_RETRIES.toString(), - ANKR_RETRY_DELAY: process.env.ANKR_RETRY_DELAY || runtime.getSetting("ANKR_RETRY_DELAY") || envConfig.ANKR_RETRY_DELAY.toString(), - ANKR_TIMEOUT: process.env.ANKR_TIMEOUT || runtime.getSetting("ANKR_TIMEOUT") || envConfig.ANKR_TIMEOUT.toString(), - ANKR_GRANULAR_LOG: process.env.ANKR_GRANULAR_LOG === "true" || false, - ANKR_LOG_LEVEL: process.env.ANKR_LOG_LEVEL || runtime.getSetting("ANKR_LOG_LEVEL") || envConfig.ANKR_LOG_LEVEL, - ANKR_RUNTIME_CHECK_MODE: process.env.RUNTIME_CHECK_MODE === "true" || false, - ANKR_SPASH: process.env.ANKR_SPASH === "true" || false - }; - - return ankrEnvSchema.parse(config); - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); - throw new Error(`Failed to validate ANKR configuration: ${errorMessage}`); - } -} - -// Export endpoints configuration -export const getEndpoints = (env: string = ENV) => - ANKR_ENDPOINTS[env as keyof typeof ANKR_ENDPOINTS]; - -// Helper to get full endpoint URL -export function getEndpointUrl(endpoint: string, env: string = ENV): string { - const endpoints = getEndpoints(env); - const parts = endpoint.split('.'); - let current: Record = endpoints; - - for (const part of parts) { - if (current[part] === undefined) { - throw new Error(`Invalid endpoint path: ${endpoint}`); - } - current = current[part] as Record; - } - - if (typeof current !== 'string') { - throw new Error(`Invalid endpoint path: ${endpoint}`); - } - - return current; -} \ No newline at end of file diff --git a/packages/plugin-ankr/src/error/base.ts b/packages/plugin-ankr/src/error/base.ts deleted file mode 100644 index 632b42ff3300e..0000000000000 --- a/packages/plugin-ankr/src/error/base.ts +++ /dev/null @@ -1,47 +0,0 @@ -export class HyperbolicError extends Error { - constructor(message: string) { - super(message); - this.name = 'HyperbolicError'; - Object.setPrototypeOf(this, HyperbolicError.prototype); - } -} - -export class ConfigurationError extends HyperbolicError { - constructor(message: string) { - super(message); - this.name = 'ConfigurationError'; - Object.setPrototypeOf(this, ConfigurationError.prototype); - } -} - -export class APIError extends HyperbolicError { - constructor(message: string, public statusCode?: number) { - super(message); - this.name = 'APIError'; - Object.setPrototypeOf(this, APIError.prototype); - } -} - -export class ValidationError extends HyperbolicError { - constructor(message: string) { - super(message); - this.name = 'ValidationError'; - Object.setPrototypeOf(this, ValidationError.prototype); - } -} - -export class SSHError extends HyperbolicError { - constructor(message: string) { - super(message); - this.name = 'SSHError'; - Object.setPrototypeOf(this, SSHError.prototype); - } -} - -export class GPUError extends HyperbolicError { - constructor(message: string) { - super(message); - this.name = 'GPUError'; - Object.setPrototypeOf(this, GPUError.prototype); - } -} diff --git a/packages/plugin-ankr/src/error/configuration.ts b/packages/plugin-ankr/src/error/configuration.ts deleted file mode 100644 index 4b57ca56c3d0b..0000000000000 --- a/packages/plugin-ankr/src/error/configuration.ts +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Error class for configuration-related errors - */ -export class ConfigurationError extends Error { - constructor(message: string) { - super(message); - this.name = 'ConfigurationError'; - } -} diff --git a/packages/plugin-ankr/src/error/validation.ts b/packages/plugin-ankr/src/error/validation.ts deleted file mode 100644 index 0fd2d3a722133..0000000000000 --- a/packages/plugin-ankr/src/error/validation.ts +++ /dev/null @@ -1,6 +0,0 @@ -export class ValidationError extends Error { - constructor(message: string) { - super(message); - this.name = 'ValidationError'; - } -} diff --git a/packages/plugin-ankr/src/index.ts b/packages/plugin-ankr/src/index.ts deleted file mode 100644 index 9ebdb716f72c1..0000000000000 --- a/packages/plugin-ankr/src/index.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { Plugin } from "@elizaos/core"; -import chalk from 'chalk'; -import Table from 'cli-table3'; -import ora from 'ora'; -import { getConfig } from "./environment"; -import { actionGetTokenHoldersCount } from "./actions/actionGetTokenHoldersCount"; -import { actionGetTokenPrice } from "./actions/actionGetTokenPrice"; -import { actionGetTokenTransfers } from "./actions/actionGetTokenTransfers"; -import { actionGetAccountBalance } from "./actions/actionGetAccountBalance"; -import { actionGetTransactionsByAddress } from "./actions/actionGetTransactionsByAddress"; -import { actionGetTransactionsByHash } from "./actions/actionGetTransactionsByHash"; -import { actionGetBlockchainStats } from "./actions/actionGetBlockchainStats"; -import { actionGetCurrencies } from "./actions/actionGetCurrencies"; -import { actionGetInteractions } from "./actions/actionGetInteractions"; -import { actionGetNFTHolders } from "./actions/actionGetNFTHolders"; -import { actionGetNFTTransfers } from "./actions/actionGetNFTTransfers"; -import { actionGetNFTMetadata } from "./actions/actionGetNFTMetadata"; -import { actionGetNFTsByOwner } from "./actions/actionGetNFTsByOwner"; - - - -// Start the loader -const spinner = ora({ - text: chalk.cyan('Initializing ANKR Plugin...'), - spinner: 'dots12', - color: 'cyan' - }).start(); - -const actions = [ - actionGetTokenHoldersCount, - actionGetTokenPrice, - actionGetTokenTransfers, - actionGetAccountBalance, - actionGetTransactionsByAddress, - actionGetTransactionsByHash, - actionGetBlockchainStats, - actionGetCurrencies, - actionGetInteractions, - actionGetNFTHolders, - actionGetNFTTransfers, - actionGetNFTMetadata, - actionGetNFTsByOwner, -]; - -// Get NVIDIA_NIM_SPASH from config -const ANKR_SPASH = getConfig().ANKR_WALLET; - -// Initial banner -// Only show splash screen if NVIDIA_NIM_SPASH is true -if (ANKR_SPASH) { - // Initial banner with chalk styling - console.log(`\n${chalk.cyan('┌────────────────────────────────────────┐')}`); - console.log(chalk.cyan('│') + chalk.yellow.bold(' ANKR PLUGIN ') + chalk.cyan(' │')); - console.log(chalk.cyan('├────────────────────────────────────────┤')); - console.log(chalk.cyan('│') + chalk.white(' Initializing ANKR Services... ') + chalk.cyan('│')); - console.log(chalk.cyan('│') + chalk.white(' Version: 1.0.0 ') + chalk.cyan('│')); - console.log(chalk.cyan('└────────────────────────────────────────┘')); - - // Stop the loader - spinner.succeed(chalk.green('ANKR Plugin initialized successfully!')); - - // Create a beautiful table for actions - const actionTable = new Table({ - head: [ - chalk.cyan('Action'), - chalk.cyan('H'), - chalk.cyan('V'), - chalk.cyan('E'), - chalk.cyan('Similes') - ], - style: { - head: [], - border: ['cyan'] - } - }); - - // Format and add action information - for (const action of actions) { - actionTable.push([ - chalk.white(action.name), - typeof action.handler === 'function' ? chalk.green('✓') : chalk.red('✗'), - typeof action.validate === 'function' ? chalk.green('✓') : chalk.red('✗'), - action.examples?.length > 0 ? chalk.green('✓') : chalk.red('✗'), - chalk.gray(action.similes?.join(', ') || 'none') - ]); - } - - // Display the action table - console.log(`\n${actionTable.toString()}`); - - // Plugin status with a nice table - const statusTable = new Table({ - style: { - border: ['cyan'] - } - }); - - statusTable.push( - [chalk.cyan('Plugin Status')], - [chalk.white('Name : ') + chalk.yellow('plugin-ankr')], - [chalk.white('Actions : ') + chalk.green(actions.length.toString())], - [chalk.white('Status : ') + chalk.green('Loaded & Ready')] - ); - - console.log(`\n${statusTable.toString()}\n`); - } else { - // Stop the loader silently if splash is disabled - spinner.stop(); - } - - const ankrPlugin: Plugin = { - name: "plugin-ankr", - description: "Ankr Plugin for web3", - actions: actions, - evaluators: [] - }; - -export { ankrPlugin }; -export default ankrPlugin; diff --git a/packages/plugin-ankr/src/types/types.ts b/packages/plugin-ankr/src/types/types.ts deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/packages/plugin-ankr/src/validator/apiParseValidation.ts b/packages/plugin-ankr/src/validator/apiParseValidation.ts deleted file mode 100644 index 07a8ccd778467..0000000000000 --- a/packages/plugin-ankr/src/validator/apiParseValidation.ts +++ /dev/null @@ -1,262 +0,0 @@ -// ------------------------------------------------------------------------------------------------ -// Essential Imports -// ------------------------------------------------------------------------------------------------ -import { elizaLogger } from "@elizaos/core"; -import { APIError, ValidationError } from '../error/base'; - -// ------------------------------------------------------------------------------------------------ -// Types -// ------------------------------------------------------------------------------------------------ -interface ParsedAPIContent { - wallet?: string; - chain?: string; - contract?: string; - token?: string; - txHash?: string; - block?: string; - block2?: string; - fromTimestamp?: number; - toTimestamp?: number; - raw: { - text: string; - matches: { - wallet: boolean; - chain: boolean; - contract: boolean; - token: boolean; - txHash: boolean; - block: boolean; - block2: boolean; - fromTimestamp: boolean; - toTimestamp: boolean; - }; - }; -} - -// ------------------------------------------------------------------------------------------------ -// Constants -// ------------------------------------------------------------------------------------------------ -const SUPPORTED_CHAINS = [ - 'eth', 'ethereum', - 'bsc', 'bnb', - 'polygon', 'matic', - 'avalanche', 'avax', - 'optimism', 'op', - 'base' -]; - -const ADDRESS_REGEX = /^0x[a-fA-F0-9]{40}$/; -const TX_HASH_REGEX = /^0x[a-fA-F0-9]{64}$/; - -// ------------------------------------------------------------------------------------------------ -// Helper Functions -// ------------------------------------------------------------------------------------------------ -const normalizeChainName = (chain: string): string => { - chain = chain.toLowerCase().trim(); - switch (chain) { - case 'eth': - case 'ethereum': - return 'eth'; - case 'bsc': - case 'bnb': - return 'bsc'; - case 'polygon': - case 'matic': - return 'polygon'; - case 'avalanche': - case 'avax': - return 'avalanche'; - case 'optimism': - case 'op': - return 'optimism'; - case 'base': - return 'base'; - default: - throw new ValidationError(`Unsupported blockchain: ${chain}`); - } -}; - -const validateAddress = (address: string): boolean => { - return ADDRESS_REGEX.test(address); -}; - -const validateTxHash = (hash: string): boolean => { - return TX_HASH_REGEX.test(hash); -}; - -// Add block number validation -const validateBlockNumber = (block: string): boolean => { - return /^\d+$/.test(block); -}; - -// Add timestamp validation -const validateTimestamp = (timestamp: string): boolean => { - const num = parseInt(timestamp, 10); - return !isNaN(num) && num > 0; -}; - -// Add token ID validation -const validateTokenId = (tokenId: string): boolean => { - return tokenId.trim() !== ''; // Just ensure it's not empty -}; - -// ------------------------------------------------------------------------------------------------ -// Main Parser -// ------------------------------------------------------------------------------------------------ -/** - * Parses API-related content from text, extracting wallet addresses, chain names, - * contract addresses, token addresses, and transaction hashes. - * - * @param text The input text containing tagged content - * @returns ParsedAPIContent object containing extracted and validated information - * - * @example - * Input text: "Check balance for [wallet]0x123...[/wallet] on [chain]eth[/chain]" - */ -export function parseAPIContent(text: string): ParsedAPIContent { - try { - const parsed: ParsedAPIContent = { - raw: { - text, - matches: { - wallet: false, - chain: false, - contract: false, - token: false, - txHash: false, - block: false, - block2: false, - fromTimestamp: false, - toTimestamp: false - } - } - }; - - // Parse wallet address - const walletMatch = text.match(/\[wallet\]([\s\S]*?)\[\/wallet\]/); - if (walletMatch) { - const wallet = walletMatch[1].trim(); - if (!validateAddress(wallet)) { - throw new ValidationError(`Invalid wallet address: ${wallet}`); - } - parsed.wallet = wallet; - parsed.raw.matches.wallet = true; - } - - // Parse chain name - const chainMatch = text.match(/\[chain\]([\s\S]*?)\[\/chain\]/); - if (chainMatch) { - const chain = chainMatch[1].trim(); - parsed.chain = normalizeChainName(chain); - parsed.raw.matches.chain = true; - } - - // Parse contract address - const contractMatch = text.match(/\[contract\]([\s\S]*?)\[\/contract\]/); - if (contractMatch) { - const contract = contractMatch[1].trim(); - if (!validateAddress(contract)) { - throw new ValidationError(`Invalid contract address: ${contract}`); - } - parsed.contract = contract; - parsed.raw.matches.contract = true; - } - - // Parse token ID (modified from token address) - const tokenMatch = text.match(/\[token\]([\s\S]*?)\[\/token\]/); - if (tokenMatch) { - const token = tokenMatch[1].trim(); - if (!validateTokenId(token)) { - throw new ValidationError(`Invalid token ID: ${token}`); - } - parsed.token = token; - parsed.raw.matches.token = true; - } - - // Parse transaction hash - const txMatch = text.match(/\[txHash\]([\s\S]*?)\[\/txHash\]/); - if (txMatch) { - const txHash = txMatch[1].trim(); - if (!validateTxHash(txHash)) { - throw new ValidationError(`Invalid transaction hash: ${txHash}`); - } - parsed.txHash = txHash; - parsed.raw.matches.txHash = true; - } - - // Parse block number - const blockMatch = text.match(/\[block\]([\s\S]*?)\[\/block\]/); - if (blockMatch) { - const block = blockMatch[1].trim(); - if (!validateBlockNumber(block)) { - throw new ValidationError(`Invalid block number: ${block}`); - } - parsed.block = block; - parsed.raw.matches.block = true; - } - - // Parse second block number if present - const block2Match = text.match(/\[block2\]([\s\S]*?)\[\/block2\]/); - if (block2Match) { - const block2 = block2Match[1].trim(); - if (!validateBlockNumber(block2)) { - throw new ValidationError(`Invalid block number: ${block2}`); - } - parsed.block2 = block2; - parsed.raw.matches.block2 = true; - } - - // Parse fromTimestamp - const fromTimestampMatch = text.match(/\[fromtimestamp\]([\s\S]*?)\[\/fromtimestamp\]/); - if (fromTimestampMatch) { - const timestamp = fromTimestampMatch[1].trim(); - if (!validateTimestamp(timestamp)) { - throw new ValidationError(`Invalid from timestamp: ${timestamp}`); - } - parsed.fromTimestamp = parseInt(timestamp, 10); - parsed.raw.matches.fromTimestamp = true; - } - - // Parse toTimestamp - const toTimestampMatch = text.match(/\[totimestamp\]([\s\S]*?)\[\/totimestamp\]/); - if (toTimestampMatch) { - const timestamp = toTimestampMatch[1].trim(); - if (!validateTimestamp(timestamp)) { - throw new ValidationError(`Invalid to timestamp: ${timestamp}`); - } - parsed.toTimestamp = parseInt(timestamp, 10); - parsed.raw.matches.toTimestamp = true; - } - - return parsed; - - } catch (error) { - elizaLogger.error("API content parsing failed", { - error: error instanceof Error ? error.message : String(error) - }); - throw error; - } -} - -// ------------------------------------------------------------------------------------------------ -// Validation Helpers -// ------------------------------------------------------------------------------------------------ -export function validateRequiredFields( - parsed: ParsedAPIContent, - required: Array -): void { - const missing = required.filter(field => !parsed.raw.matches[field]); - if (missing.length > 0) { - throw new ValidationError( - `Missing required fields: ${missing.join(', ')}. Please provide them in the format [field]value[/field]` - ); - } -} - -export function validateChainSupport(chain: string): void { - if (!SUPPORTED_CHAINS.includes(chain.toLowerCase())) { - throw new ValidationError( - `Unsupported blockchain: ${chain}. Supported chains: ${SUPPORTED_CHAINS.join(', ')}` - ); - } -} \ No newline at end of file diff --git a/packages/plugin-ankr/tsconfig.json b/packages/plugin-ankr/tsconfig.json deleted file mode 100644 index 682bc903c21e7..0000000000000 --- a/packages/plugin-ankr/tsconfig.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "module": "ESNext", - "target": "ESNext", - "lib": [ - "ESNext", - "DOM" - ], - "moduleResolution": "Bundler", - "allowImportingTsExtensions": true, - "isolatedModules": true, - "esModuleInterop": true, - "skipLibCheck": true, - "strict": true, - "declaration": true, - "sourceMap": true, - "types": [ - "vitest/globals", - "node" - ], - "baseUrl": ".", - "preserveSymlinks": true, - "allowSyntheticDefaultImports": true - }, - "include": [ - "src/**/*" - ], - "exclude": [ - "node_modules", - "dist", - "test", - "../../packages/core/**/*", - "src/examples/**/*", - "extra/**/*", - "extra/hyperbolic_agentkit_TS/**/*" - ] -} \ No newline at end of file diff --git a/packages/plugin-ankr/tsup.config.ts b/packages/plugin-ankr/tsup.config.ts deleted file mode 100644 index a2b714de91033..0000000000000 --- a/packages/plugin-ankr/tsup.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - format: ["esm"], - dts: true, - splitting: false, - sourcemap: true, - clean: true, -}); diff --git a/packages/plugin-ankr/vitest.config.ts b/packages/plugin-ankr/vitest.config.ts deleted file mode 100644 index a0787d4b2fcb5..0000000000000 --- a/packages/plugin-ankr/vitest.config.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { defineConfig } from 'vitest/config'; -import path from 'node:path'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['test/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'], - exclude: ['node_modules', 'dist', '.idea', '.git', '.cache'], - root: '.', - reporters: ['verbose'], - coverage: { - reporter: ['text', 'json', 'html'], - exclude: [ - 'node_modules/', - 'test/fixtures/', - 'test/setup/' - ] - }, - setupFiles: ['./test/setup/vitest.setup.ts'] - }, - resolve: { - alias: { - '@': path.resolve(__dirname, './src') - } - } -}); \ No newline at end of file diff --git a/packages/plugin-anyone/.npmignore b/packages/plugin-anyone/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-anyone/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-anyone/README.md b/packages/plugin-anyone/README.md deleted file mode 100644 index 19881825fa070..0000000000000 --- a/packages/plugin-anyone/README.md +++ /dev/null @@ -1,69 +0,0 @@ -# @elizaos/plugin-anyone - -A plugin for integrating Anyone protocol proxy services into Eliza agents. - -## Installation - -```bash -pnpm add @elizaos/plugin-anyone -``` - -## Features - -- Start and stop Anyone client services -- Automatic proxy configuration for axios -- SOCKS proxy support (port 9050) -- Clean proxy cleanup and restoration - -## Usage - -Add the plugin to your agent's configuration: - -```typescript -import { anyonePlugin } from "@elizaos/plugin-anyone"; - -const character = { - plugins: [anyonePlugin] -}; -``` - -### Available Actions - -#### START_ANYONE -Starts the Anyone client and configures proxy settings. - -Example commands: -```plaintext -"Can you start Anyone for me?" -"Initialize the Anyone client please" -"Launch Anyone for me" -``` - -#### STOP_ANYONE -Stops the Anyone client and cleans up proxy settings. - -Example commands: -```plaintext -"Can you stop Anyone for me?" -"Please shut down Anyone" -"Close Anyone for me" -``` - -## Technical Details - -The plugin provides two main services: - -1. `AnyoneClientService`: Manages the Anyone client instance - - Singleton pattern implementation - - Handles client initialization and cleanup - - Configures SOCKS proxy on port 9050 - -2. `AnyoneProxyService`: Handles axios proxy configuration - - Preserves original axios settings - - Automatically applies proxy settings - - Provides clean restoration of original config - -## Dependencies - -- @anyone-protocol/anyone-client: ^0.4.3 -- axios: ^1.7.9 diff --git a/packages/plugin-anyone/__tests__/actions/startAnyone.test.ts b/packages/plugin-anyone/__tests__/actions/startAnyone.test.ts deleted file mode 100644 index 582ec14d94df2..0000000000000 --- a/packages/plugin-anyone/__tests__/actions/startAnyone.test.ts +++ /dev/null @@ -1,103 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { startAnyone } from '../../src/actions/startAnyone'; -import { AnyoneClientService } from '../../src/services/AnyoneClientService'; -import { AnyoneProxyService } from '../../src/services/AnyoneProxyService'; - -vi.mock('../../src/services/AnyoneClientService', () => ({ - AnyoneClientService: { - initialize: vi.fn(), - getInstance: vi.fn(), - stop: vi.fn(), - } -})); - -vi.mock('../../src/services/AnyoneProxyService', () => ({ - AnyoneProxyService: { - getInstance: vi.fn(() => ({ - initialize: vi.fn(), - cleanup: vi.fn() - })) - } -})); - -describe('startAnyone Action', () => { - const mockRuntime = { - getSetting: vi.fn(), - getState: vi.fn(), - setState: vi.fn(), - }; - - const mockMessage = { - content: { - text: 'Start Anyone', - type: 'START_ANYONE' - } - }; - - const mockState = {}; - const mockCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('validate', () => { - it('should validate successfully', async () => { - const result = await startAnyone.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - }); - - describe('handler', () => { - it('should initialize AnyoneClientService and AnyoneProxyService', async () => { - const mockProxyInstance = { - initialize: vi.fn(), - cleanup: vi.fn() - }; - vi.mocked(AnyoneProxyService.getInstance).mockReturnValue(mockProxyInstance); - - const result = await startAnyone.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(AnyoneClientService.initialize).toHaveBeenCalled(); - expect(AnyoneProxyService.getInstance).toHaveBeenCalled(); - expect(mockProxyInstance.initialize).toHaveBeenCalled(); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Started Anyone' - }); - expect(result).toBe(true); - }); - - it('should handle initialization errors gracefully', async () => { - const error = new Error('Initialization failed'); - vi.mocked(AnyoneClientService.initialize).mockRejectedValue(error); - - await expect( - startAnyone.handler(mockRuntime, mockMessage, mockState, {}, mockCallback) - ).rejects.toThrow('Initialization failed'); - }); - }); - - describe('metadata', () => { - it('should have correct name and similes', () => { - expect(startAnyone.name).toBe('START_ANYONE'); - expect(startAnyone.similes).toEqual(['ANYONE']); - }); - - it('should have valid examples', () => { - expect(Array.isArray(startAnyone.examples)).toBe(true); - expect(startAnyone.examples.length).toBeGreaterThan(0); - - startAnyone.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - expect(example.length).toBe(2); - expect(example[1].content.action).toBe('START_ANYONE'); - }); - }); - }); -}); diff --git a/packages/plugin-anyone/__tests__/actions/stopAnyone.test.ts b/packages/plugin-anyone/__tests__/actions/stopAnyone.test.ts deleted file mode 100644 index f3d4dc541e68f..0000000000000 --- a/packages/plugin-anyone/__tests__/actions/stopAnyone.test.ts +++ /dev/null @@ -1,102 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { stopAnyone } from '../../src/actions/stopAnyone'; -import { AnyoneClientService } from '../../src/services/AnyoneClientService'; -import { AnyoneProxyService } from '../../src/services/AnyoneProxyService'; - -vi.mock('../../src/services/AnyoneClientService', () => ({ - AnyoneClientService: { - initialize: vi.fn(), - getInstance: vi.fn(), - stop: vi.fn(), - } -})); - -vi.mock('../../src/services/AnyoneProxyService', () => ({ - AnyoneProxyService: { - getInstance: vi.fn(() => ({ - initialize: vi.fn(), - cleanup: vi.fn() - })) - } -})); - -describe('stopAnyone Action', () => { - const mockRuntime = { - getSetting: vi.fn(), - getState: vi.fn(), - setState: vi.fn(), - }; - - const mockMessage = { - content: { - text: 'Stop Anyone', - type: 'STOP_ANYONE' - } - }; - - const mockState = {}; - const mockCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('validate', () => { - it('should validate successfully', async () => { - const result = await stopAnyone.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - }); - - describe('handler', () => { - it('should stop AnyoneClientService and cleanup AnyoneProxyService', async () => { - const mockProxyInstance = { - initialize: vi.fn(), - cleanup: vi.fn() - }; - vi.mocked(AnyoneProxyService.getInstance).mockReturnValue(mockProxyInstance); - - const result = await stopAnyone.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockProxyInstance.cleanup).toHaveBeenCalled(); - expect(AnyoneClientService.stop).toHaveBeenCalled(); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Stopped Anyone and cleaned up proxy' - }); - expect(result).toBe(true); - }); - - it('should handle cleanup errors gracefully', async () => { - const error = new Error('Cleanup failed'); - vi.mocked(AnyoneClientService.stop).mockRejectedValue(error); - - await expect( - stopAnyone.handler(mockRuntime, mockMessage, mockState, {}, mockCallback) - ).rejects.toThrow('Cleanup failed'); - }); - }); - - describe('metadata', () => { - it('should have correct name and similes', () => { - expect(stopAnyone.name).toBe('STOP_ANYONE'); - expect(stopAnyone.similes).toEqual(['STOP_PROXY']); - }); - - it('should have valid examples', () => { - expect(Array.isArray(stopAnyone.examples)).toBe(true); - expect(stopAnyone.examples.length).toBeGreaterThan(0); - - stopAnyone.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - expect(example.length).toBe(2); - expect(example[1].content.action).toBe('STOP_ANYONE'); - }); - }); - }); -}); diff --git a/packages/plugin-anyone/biome.json b/packages/plugin-anyone/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-anyone/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-anyone/package.json b/packages/plugin-anyone/package.json deleted file mode 100644 index d8ee7cb8747cb..0000000000000 --- a/packages/plugin-anyone/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "@elizaos/plugin-anyone", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@anyone-protocol/anyone-client": "^0.4.3", - "@elizaos/core": "workspace:*", - "axios": "^1.7.9", - "tsup": "8.3.5" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "vitest": "^3.0.0", - "@vitest/coverage-v8": "^1.2.1" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest", - "test:coverage": "vitest run --coverage", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-anyone/src/actions/index.ts b/packages/plugin-anyone/src/actions/index.ts deleted file mode 100644 index 64f64801d8c54..0000000000000 --- a/packages/plugin-anyone/src/actions/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./startAnyone.ts"; -export * from "./stopAnyone.ts"; diff --git a/packages/plugin-anyone/src/actions/startAnyone.ts b/packages/plugin-anyone/src/actions/startAnyone.ts deleted file mode 100644 index 57ea69379b043..0000000000000 --- a/packages/plugin-anyone/src/actions/startAnyone.ts +++ /dev/null @@ -1,92 +0,0 @@ -import type { - ActionExample, - HandlerCallback, - IAgentRuntime, - Memory, - State, - Action, -} from "@elizaos/core"; -import { AnyoneClientService } from "../services/AnyoneClientService"; -import { AnyoneProxyService } from "../services/AnyoneProxyService"; - -export const startAnyone: Action = { - name: "START_ANYONE", - similes: ["ANYONE"], - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - description: "Start the Anyone client and proxy service", - handler: async ( - _runtime: IAgentRuntime, - _message: Memory, - _state: State, - _options: { [key: string]: unknown }, - _callback: HandlerCallback - ): Promise => { - await AnyoneClientService.initialize(); - //lint says unused - //const anon = AnyoneClientService.getInstance(); - const proxyService = AnyoneProxyService.getInstance(); - await proxyService.initialize(); - - _callback({ - text: 'Started Anyone', - }); - - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "Can you start Anyone for me?" }, - }, - { - user: "{{user2}}", - content: { - text: "I'll start Anyone right away", - action: "START_ANYONE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Initialize the Anyone client please" }, - }, - { - user: "{{user2}}", - content: { - text: "Starting Anyone now", - action: "START_ANYONE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "I need to start using Anyone" }, - }, - { - user: "{{user2}}", - content: { - text: "I'll help you start Anyone", - action: "START_ANYONE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Launch Anyone for me" }, - }, - { - user: "{{user2}}", - content: { - text: "I'll launch Anyone for you now", - action: "START_ANYONE", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-anyone/src/actions/stopAnyone.ts b/packages/plugin-anyone/src/actions/stopAnyone.ts deleted file mode 100644 index 349e73098a8fe..0000000000000 --- a/packages/plugin-anyone/src/actions/stopAnyone.ts +++ /dev/null @@ -1,91 +0,0 @@ -import type { - ActionExample, - HandlerCallback, - IAgentRuntime, - Memory, - State, - Action, -} from "@elizaos/core"; -import { AnyoneClientService } from "../services/AnyoneClientService"; -import { AnyoneProxyService } from "../services/AnyoneProxyService"; - -export const stopAnyone: Action = { - name: "STOP_ANYONE", - similes: ["STOP_PROXY"], - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - description: "Stop the Anyone client and proxy service", - handler: async ( - _runtime: IAgentRuntime, - _message: Memory, - _state: State, - _options: { [key: string]: unknown }, - _callback: HandlerCallback - ): Promise => { - const proxyService = AnyoneProxyService.getInstance(); - proxyService.cleanup(); - - await AnyoneClientService.stop(); - - _callback({ - text: 'Stopped Anyone and cleaned up proxy', - }); - - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "Can you stop Anyone for me?" }, - }, - { - user: "{{user2}}", - content: { - text: "I'll stop Anyone right away", - action: "STOP_ANYONE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Please shut down Anyone" }, - }, - { - user: "{{user2}}", - content: { - text: "Stopping Anyone now", - action: "STOP_ANYONE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "I need to stop using Anyone" }, - }, - { - user: "{{user2}}", - content: { - text: "I'll help you stop Anyone", - action: "STOP_ANYONE", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Close Anyone for me" }, - }, - { - user: "{{user2}}", - content: { - text: "I'll close Anyone for you now", - action: "STOP_ANYONE", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-anyone/src/index.ts b/packages/plugin-anyone/src/index.ts deleted file mode 100644 index 509463f4d0c15..0000000000000 --- a/packages/plugin-anyone/src/index.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { startAnyone } from "./actions/startAnyone.ts"; -import { stopAnyone } from "./actions/stopAnyone.ts"; -export * as actions from "./actions"; - -export const anyonePlugin: Plugin = { - name: "anyone", - description: "Proxy requests through Anyone", - actions: [startAnyone, stopAnyone], -}; diff --git a/packages/plugin-anyone/src/services/AnyoneClientService.ts b/packages/plugin-anyone/src/services/AnyoneClientService.ts deleted file mode 100644 index 4db63300266f4..0000000000000 --- a/packages/plugin-anyone/src/services/AnyoneClientService.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { Anon } from "@anyone-protocol/anyone-client"; - -export class AnyoneClientService { - private static instance: Anon | null = null; - - static getInstance(): Anon | null { - return this.instance; - } - - static async initialize(): Promise { - if (!this.instance) { - this.instance = new Anon({ - displayLog: true, - socksPort: 9050, - autoTermsAgreement: true, - }); - await this.instance.start(); - } - } - - static async stop(): Promise { - if (this.instance) { - await this.instance.stop(); - this.instance = null; - } - } -} diff --git a/packages/plugin-anyone/src/services/AnyoneProxyService.ts b/packages/plugin-anyone/src/services/AnyoneProxyService.ts deleted file mode 100644 index fde164d20521e..0000000000000 --- a/packages/plugin-anyone/src/services/AnyoneProxyService.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { AnonSocksClient } from "@anyone-protocol/anyone-client"; -import axios from "axios"; -import type { AxiosDefaults, AxiosStatic } from "axios"; -import { AnyoneClientService } from "./AnyoneClientService"; - -export class AnyoneProxyService { - private static instance: AnyoneProxyService | null = null; - private sockClient: AnonSocksClient | null = null; - private originalAxios: Partial | null = null; - private originalDefaults: Partial | null = null; - - static getInstance(): AnyoneProxyService { - if (!AnyoneProxyService.instance) { - AnyoneProxyService.instance = new AnyoneProxyService(); - } - return AnyoneProxyService.instance; - } - - async initialize(): Promise { - await AnyoneClientService.initialize(); - const anon = AnyoneClientService.getInstance(); - if (!anon) { - throw new Error("Anyone client not initialized"); - } - - this.sockClient = new AnonSocksClient(anon); - - // Store original axios configuration - this.originalDefaults = { ...axios.defaults } as typeof axios.defaults; - this.originalAxios = { - request: axios.request, - get: axios.get, - post: axios.post, - put: axios.put, - delete: axios.delete, - patch: axios.patch, - }; - - // Create new defaults object instead of modifying existing one - axios.defaults = { - ...axios.defaults, - ...this.sockClient.axios.defaults, - } as typeof axios.defaults; - - // Apply proxy methods - axios.request = this.sockClient.axios.request.bind( - this.sockClient.axios - ); - axios.get = this.sockClient.axios.get.bind(this.sockClient.axios); - axios.post = this.sockClient.axios.post.bind(this.sockClient.axios); - axios.put = this.sockClient.axios.put.bind(this.sockClient.axios); - axios.delete = this.sockClient.axios.delete.bind(this.sockClient.axios); - axios.patch = this.sockClient.axios.patch.bind(this.sockClient.axios); - } - - cleanup(): void { - if (this.originalAxios && this.originalDefaults) { - // Create fresh axios defaults - axios.defaults = { ...this.originalDefaults } as typeof axios.defaults; - - // Create fresh bindings - axios.request = this.originalAxios.request.bind(axios); - axios.get = this.originalAxios.get.bind(axios); - axios.post = this.originalAxios.post.bind(axios); - axios.put = this.originalAxios.put.bind(axios); - axios.delete = this.originalAxios.delete.bind(axios); - axios.patch = this.originalAxios.patch.bind(axios); - - this.originalAxios = null; - this.originalDefaults = null; - } - AnyoneProxyService.instance = null; - } -} diff --git a/packages/plugin-anyone/tsconfig.json b/packages/plugin-anyone/tsconfig.json deleted file mode 100644 index 834c4dce26957..0000000000000 --- a/packages/plugin-anyone/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-anyone/tsup.config.ts b/packages/plugin-anyone/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/plugin-anyone/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-anyone/vitest.config.ts b/packages/plugin-anyone/vitest.config.ts deleted file mode 100644 index e11899a25b646..0000000000000 --- a/packages/plugin-anyone/vitest.config.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - mockReset: true, - clearMocks: true, - restoreMocks: true, - }, -}); diff --git a/packages/plugin-apro/README.MD b/packages/plugin-apro/README.MD deleted file mode 100644 index 354f70d8d1a63..0000000000000 --- a/packages/plugin-apro/README.MD +++ /dev/null @@ -1,172 +0,0 @@ - -# @elizaos/plugin-apro - -Foundation plugin that enables advanced agent interactions, data verification, and price queries on the Eliza OS platform. It streamlines agent creation, verification processes, and provides a flexible framework for building robust agent-based solutions. - -## Overview - -The Apro plugin bridges agent-based logic with the Eliza ecosystem. It handles agent registration, data verification, and price queries, empowering both automated and user-driven workflows. - -## Features - -### Agent Operations -- **Agent Creation**: Deploy new agents with custom settings -- **Registration**: Register agents on-chain or via standardized processes -- **Multi-Signer Framework**: Supports threshold-based approval flows - -### Data Verification -- **Chain Validation**: Verify data authenticity on-chain -- **Transaction Execution**: Handle verification logic with built-in security checks -- **Auto-Hashing**: Convert raw data to hashed formats when needed -- **Metadata Parsing**: Validate content type, encoding, and compression - -### Price Queries -- **Live Price Data**: Fetch price information for various pairs -- **Format Validation**: Normalize user query inputs to standard trading-pair formats -- **APIs Integration**: Retrieve real-time or near-real-time pricing information - -## Security Features - -### Access Control -- **Private Key Management**: Safe usage of private keys for transaction signing -- **Environment Variables**: Secure injection of credentials -- **On-Chain Validation**: Leverage on-chain contract checks - -### Verification -- **Input Validation**: Strict schema checks before on-chain operations -- **Transaction Receipts**: Provide verifiable transaction details -- **Error Handling**: Detailed error logs for quick debugging - -## Installation - -```bash -npm install @elizaos/plugin-apro -``` - -## Configuration - -Configure the plugin by setting environment variables or runtime settings: -- APRO_RPC_URL -- APRO_PROXY_ADDRESS -- APRO_PRIVATE_KEY -- APRO_CONVERTER_ADDRESS -- APRO_AUTO_HASH_DATA - -## Usage - -### Basic Setup -```typescript -import { aproPlugin } from "@elizaos/plugin-apro"; - -// Initialize the plugin -const runtime = await initializeRuntime({ - plugins: [aproPlugin], -}); -``` - -### Actions - -#### CREATE_AND_REGISTER_AGENT -Creates and registers an agent using specified settings. - -```typescript -const result = await runtime.executeAction("CREATE_AND_REGISTER_AGENT", { - signers: [...], - threshold: 3, - agentHeader: { ... }, - // ...other fields... -}); -``` - -#### VERIFY -Verifies data on-chain via the Agent SDK. - -```typescript -const result = await runtime.executeAction("VERIFY", { - payload: { - data: "0x...hexData", - signatures: [...], - }, - agent: "0x...agentAddress", - digest: "0x...digestString", -}); -``` - -#### PRICE_QUERY -Fetches live price data for a specified trading pair. - -```typescript -const result = await runtime.executeAction("PRICE_QUERY", { - pair: "BTC/USD", -}); -``` - -## Performance Optimization - -1. **Cache Management** - - Implement caching for frequent queries - - Monitor retrieval times and cache hits - -2. **Network Efficiency** - - Batch requests where possible - - Validate response parsing to reduce overhead - -## System Requirements -- Node.js 16.x or higher -- Sufficient network access to on-chain endpoints -- Basic configuration of environment variables -- Minimum 4GB RAM recommended - -## Troubleshooting - -1. **Invalid Agent Settings** - - Ensure signers and threshold are correct - - Validate agentHeader for proper UUIDs and numeric values - -2. **Verification Failures** - - Check the input data formats - - Confirm environment variables are set - -3. **Price Query Errors** - - Verify the trading pair format - - Check external API availability - -## Safety & Security - -1. **Credential Management** - - Store private keys securely - - Do not commit secrets to version control - -2. **Transaction Limits** - - Configure thresholds to mitigate abuse - - Log transaction attempts and failures - -3. **Monitoring & Logging** - - Track unusual activity - - Maintain detailed audit logs - -## Support - -For issues or feature requests: -1. Check existing documentation -2. Submit a GitHub issue with relevant details -3. Include transaction logs and system info if applicable - -## Contributing - -We welcome pull requests! Refer to the project’s CONTRIBUTING.md and open discussions to coordinate efforts. - -## Credits - -- [APRO](https://www.apro.com/) - Plugin sponsor and partner -- [ai-agent-sdk-js](https://github.com/APRO-com/ai-agent-sdk-js) - Underlying agent SDK -- [ethers.js](https://docs.ethers.io/) - Transaction and contract interaction -- Community contributors for feedback and testing - -For more information about Apro plugin capabilities: - -- [Apro Documentation](https://docs.apro.com/en) - -## License - -This plugin is part of the Eliza project. Refer to the main project repository for licensing details. \ No newline at end of file diff --git a/packages/plugin-apro/__tests__/actions/attpsPriceQuery.test.ts b/packages/plugin-apro/__tests__/actions/attpsPriceQuery.test.ts deleted file mode 100644 index 179b829f31cb3..0000000000000 --- a/packages/plugin-apro/__tests__/actions/attpsPriceQuery.test.ts +++ /dev/null @@ -1,174 +0,0 @@ -// Mock declarations must come first -vi.mock('@elizaos/core'); -vi.mock('ai-agent-sdk-js'); - -import { vi, describe, it, expect, beforeEach } from 'vitest'; -import type { IAgentRuntime, Memory, State } from '@elizaos/core'; -import { generateObject } from '@elizaos/core'; -import { attpsPriceQuery } from '../../src/actions/attpsPriceQuery'; - -describe('attpsPriceQuery', () => { - const mockRuntime: IAgentRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getSetting: vi.fn() - } as unknown as IAgentRuntime; - - const mockMessage: Memory = { - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { - text: 'query price' - } - } as Memory; - - const mockState: State = {}; - const mockCallback = vi.fn(); - const mockFetch = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - vi.mocked(mockRuntime.composeState).mockResolvedValue(mockState); - vi.mocked(mockRuntime.updateRecentMessageState).mockResolvedValue(mockState); - global.fetch = mockFetch; - }); - - describe('validate', () => { - it('should always return true', async () => { - const result = await attpsPriceQuery.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - }); - - describe('handler', () => { - const mockPriceQuery = { - sourceAgentId: 'test-source-agent', - feedId: 'test-feed' - }; - - const mockPriceResponse = { - code: 0, - message: 'success', - result: { - askPrice: '100.50', - bidPrice: '100.40', - midPrice: '100.45', - validTimeStamp: '1234567890' - } - }; - - it('should successfully fetch price data', async () => { - // Mock generateObject to return price query params - vi.mocked(generateObject).mockResolvedValueOnce({ - object: mockPriceQuery - }); - - // Mock successful API response - mockFetch.mockResolvedValueOnce({ - json: () => Promise.resolve(mockPriceResponse) - }); - - const result = await attpsPriceQuery.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Ask price: 100.5') - })); - expect(mockFetch).toHaveBeenCalledWith( - expect.stringContaining('sourceAgentId=test-source-agent') - ); - }); - - it('should handle price query params generation failure', async () => { - // Mock generateObject to throw an error - vi.mocked(generateObject).mockRejectedValueOnce( - new Error('Failed to generate params') - ); - - await attpsPriceQuery.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Failed to generate price query params') - }); - }); - - it('should handle API error response', async () => { - // Mock generateObject to return price query params - vi.mocked(generateObject).mockResolvedValueOnce({ - object: mockPriceQuery - }); - - // Mock API error response - mockFetch.mockResolvedValueOnce({ - json: () => Promise.resolve({ - code: 1, - message: 'API Error' - }) - }); - - await attpsPriceQuery.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Error fetching price data, error: API Error' - }); - }); - - it('should handle network failure', async () => { - // Mock generateObject to return price query params - vi.mocked(generateObject).mockResolvedValueOnce({ - object: mockPriceQuery - }); - - // Mock network failure - mockFetch.mockRejectedValueOnce(new Error('Network error')); - - await attpsPriceQuery.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Error fetching price data, error: Network error' - }); - }); - }); - - describe('metadata', () => { - it('should have correct name and description', () => { - expect(attpsPriceQuery.name).toBe('ATTPS_PRICE_QUERY'); - expect(attpsPriceQuery.description).toContain('Call remote API to fetch price data'); - }); - - it('should have valid examples', () => { - expect(Array.isArray(attpsPriceQuery.examples)).toBe(true); - expect(attpsPriceQuery.examples.length).toBeGreaterThan(0); - - attpsPriceQuery.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - expect(example.length).toBe(2); - expect(example[1].content.action).toBe('ATTPS_PRICE_QUERY'); - }); - }); - }); -}); diff --git a/packages/plugin-apro/__tests__/actions/createAndRegisterAgent.test.ts b/packages/plugin-apro/__tests__/actions/createAndRegisterAgent.test.ts deleted file mode 100644 index 1dcaf85e7ddd2..0000000000000 --- a/packages/plugin-apro/__tests__/actions/createAndRegisterAgent.test.ts +++ /dev/null @@ -1,155 +0,0 @@ -// Mock declarations must come first -vi.mock('@elizaos/core'); -vi.mock('ai-agent-sdk-js', () => { - const mockCreateAndRegisterAgent = vi.fn(); - return { - AgentSDK: vi.fn().mockImplementation(() => ({ - createAndRegisterAgent: mockCreateAndRegisterAgent - })), - parseNewAgentAddress: vi.fn().mockReturnValue('test-agent-address') - }; -}); -vi.mock('../../src/types', () => ({ - isAgentSettings: vi.fn().mockReturnValue(true), - AgentSettingsSchema: {} -})); - -import { vi, describe, it, expect, beforeEach } from 'vitest'; -import type { IAgentRuntime, Memory, State } from '@elizaos/core'; -import { generateObject } from '@elizaos/core'; -import { createAndRegisterAgent } from '../../src/actions/createAndRegisterAgent'; -import { AgentSDK } from 'ai-agent-sdk-js'; - -describe('createAndRegisterAgent', () => { - const mockRuntime: IAgentRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getSetting: vi.fn() - } as unknown as IAgentRuntime; - - const mockMessage: Memory = { - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { - text: 'create agent' - } - } as Memory; - - const mockState: State = {}; - const mockCallback = vi.fn(); - const mockTx = { - hash: 'test-hash', - wait: vi.fn().mockResolvedValue({ hash: 'test-hash' }) - }; - const mockAgentSettings = { - name: 'test-agent', - description: 'test description', - settings: { - key: 'value' - } - }; - - beforeEach(() => { - vi.clearAllMocks(); - vi.mocked(mockRuntime.composeState).mockResolvedValue(mockState); - vi.mocked(mockRuntime.updateRecentMessageState).mockResolvedValue(mockState); - }); - - describe('validate', () => { - it('should always return true', async () => { - const result = await createAndRegisterAgent.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - }); - - describe('handler', () => { - it('should successfully create and register agent', async () => { - // Mock generateObject to return agent settings - vi.mocked(generateObject).mockResolvedValueOnce({ - object: mockAgentSettings - }); - - // Mock successful registration - const mockAgent = { - createAndRegisterAgent: vi.fn().mockResolvedValue(mockTx) - }; - vi.mocked(AgentSDK).mockImplementation(() => mockAgent); - - await createAndRegisterAgent.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Agent created and registered successfully: test-agent-address' - }); - expect(mockAgent.createAndRegisterAgent).toHaveBeenCalledWith({agentSettings: mockAgentSettings}); - }); - - it('should handle agent settings generation failure', async () => { - // Mock generateObject to throw an error - vi.mocked(generateObject).mockRejectedValueOnce( - new Error('Failed to generate settings') - ); - - await createAndRegisterAgent.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Failed to generate Agent settings. Please provide valid input.' - }); - }); - - it('should handle registration failure', async () => { - // Mock generateObject to return agent settings - vi.mocked(generateObject).mockResolvedValueOnce({ - object: mockAgentSettings - }); - - // Mock registration failure - const mockAgent = { - createAndRegisterAgent: vi.fn().mockRejectedValue(new Error('Registration failed')) - }; - vi.mocked(AgentSDK).mockImplementation(() => mockAgent); - - await createAndRegisterAgent.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Error creating agent: Registration failed' - }); - }); - }); - - describe('metadata', () => { - it('should have correct name and description', () => { - expect(createAndRegisterAgent.name).toBe('CREATE_AND_REGISTER_AGENT'); - expect(createAndRegisterAgent.description).toContain('Create and register an agent with APRO'); - }); - - it('should have valid examples', () => { - expect(Array.isArray(createAndRegisterAgent.examples)).toBe(true); - expect(createAndRegisterAgent.examples.length).toBeGreaterThan(0); - - createAndRegisterAgent.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - expect(example.length).toBe(2); - expect(example[1].content.action).toBe('CREATE_AND_REGISTER_AGENT'); - }); - }); - }); -}); diff --git a/packages/plugin-apro/__tests__/actions/verifyData.test.ts b/packages/plugin-apro/__tests__/actions/verifyData.test.ts deleted file mode 100644 index b8dd65c708506..0000000000000 --- a/packages/plugin-apro/__tests__/actions/verifyData.test.ts +++ /dev/null @@ -1,169 +0,0 @@ -// Mock declarations must come first -vi.mock('@elizaos/core', () => ({ - Action: class {}, - composeContext: vi.fn(), - elizaLogger: { - info: vi.fn(), - error: vi.fn() - }, - generateObject: vi.fn(), - ModelClass: { - LARGE: 'LARGE' - } -})); - -vi.mock('ai-agent-sdk-js', () => { - const mockVerify = vi.fn(); - return { - AgentSDK: vi.fn().mockImplementation(() => ({ - verify: mockVerify - })) - }; -}); - -import { vi, describe, it, expect, beforeEach } from 'vitest'; -import type { IAgentRuntime, Memory, State } from '@elizaos/core'; -import { generateObject } from '@elizaos/core'; -import { verifyData } from '../../src/actions/verifyData'; -import { AgentSDK } from 'ai-agent-sdk-js'; - -describe('verifyData', () => { - const mockRuntime: IAgentRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getSetting: vi.fn() - } as unknown as IAgentRuntime; - - const mockMessage: Memory = { - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { - text: 'verify data' - } - } as Memory; - - const mockState: State = {}; - const mockCallback = vi.fn(); - const mockVerify = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - vi.mocked(mockRuntime.composeState).mockResolvedValue(mockState); - vi.mocked(mockRuntime.updateRecentMessageState).mockResolvedValue(mockState); - }); - - describe('validate', () => { - it('should always return true', async () => { - const result = await verifyData.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - }); - - describe('handler', () => { - const mockVerifyParams = { - agent: 'test-agent', - digest: 'test-digest', - payload: { - data: 'test-data', - dataHash: 'test-hash', - signatures: [{ - r: 'test-r', - s: 'test-s', - v: 27 - }] - } - }; - - it('should successfully verify data', async () => { - // Mock generateObject to return verify params - vi.mocked(generateObject).mockResolvedValueOnce({ - object: mockVerifyParams - }); - - const mockTx = { - hash: 'test-hash', - wait: vi.fn().mockResolvedValue({ hash: 'test-hash' }) - }; - - const mockAgent = { - verify: vi.fn().mockResolvedValue(mockTx) - }; - vi.mocked(AgentSDK).mockImplementation(() => mockAgent); - - await verifyData.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Success: Data verified successfully. Transaction ID: test-hash' - }); - expect(mockAgent.verify).toHaveBeenCalledWith(mockVerifyParams); - }); - - it('should handle verify params generation failure', async () => { - // Mock generateObject to throw an error - vi.mocked(generateObject).mockRejectedValueOnce( - new Error('Failed to generate params') - ); - - await verifyData.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Failed to generate verify params. Please provide valid input.' - }); - }); - - it('should handle verification failure', async () => { - // Mock generateObject to return verify params - vi.mocked(generateObject).mockResolvedValueOnce({ - object: mockVerifyParams - }); - - const mockAgent = { - verify: vi.fn().mockRejectedValue(new Error('Verification failed')) - }; - vi.mocked(AgentSDK).mockImplementation(() => mockAgent); - - await verifyData.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Error verifying data: Verification failed' - }); - }); - }); - - describe('metadata', () => { - it('should have correct name and description', () => { - expect(verifyData.name).toBe('VERIFY'); - expect(verifyData.description).toContain('Verify data with APRO'); - }); - - it('should have valid examples', () => { - expect(Array.isArray(verifyData.examples)).toBe(true); - expect(verifyData.examples.length).toBeGreaterThan(0); - - verifyData.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - expect(example.length).toBe(2); - expect(example[1].content.action).toBe('VERIFY'); - }); - }); - }); -}); diff --git a/packages/plugin-apro/__tests__/index.test.ts b/packages/plugin-apro/__tests__/index.test.ts deleted file mode 100644 index 03f70546f9364..0000000000000 --- a/packages/plugin-apro/__tests__/index.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { vi, describe, it, expect } from 'vitest'; - -vi.mock('@elizaos/core', () => ({ - Plugin: class {}, - Action: class {}, - composeContext: vi.fn(), - elizaLogger: { - info: vi.fn(), - error: vi.fn() - }, - generateObject: vi.fn(), - ModelClass: { - LARGE: 'LARGE' - } -})); - -vi.mock('ai-agent-sdk-js', () => ({ - AgentSDK: { - createAndRegisterAgent: vi.fn(), - verify: vi.fn() - }, - AgentSettings: class {}, - VerifyParams: class {}, - parseNewAgentAddress: vi.fn() -})); - -import { aproPlugin } from '../src'; - -describe('aproPlugin', () => { - it('should have correct plugin metadata', () => { - expect(aproPlugin.name).toBe('apro'); - expect(aproPlugin.description).toBe('Apro Plugin for Eliza'); - }); - - it('should register all required actions', () => { - expect(aproPlugin.actions).toHaveLength(3); - - const actionNames = aproPlugin.actions.map(action => action.name); - expect(actionNames).toContain('CREATE_AND_REGISTER_AGENT'); - expect(actionNames).toContain('VERIFY'); - expect(actionNames).toContain('ATTPS_PRICE_QUERY'); - }); - - it('should have correct similes for each action', () => { - const createAction = aproPlugin.actions.find(a => a.name === 'CREATE_AND_REGISTER_AGENT'); - expect(createAction?.similes).toContain('CREATE_AGENT'); - expect(createAction?.similes).toContain('REGISTER_AGENT'); - - const verifyAction = aproPlugin.actions.find(a => a.name === 'VERIFY'); - expect(verifyAction?.similes).toContain('VERIFY_DATA'); - - const priceAction = aproPlugin.actions.find(a => a.name === 'ATTPS_PRICE_QUERY'); - expect(priceAction?.similes).toContain('ATTPS_PRICE_FETCH'); - }); -}); diff --git a/packages/plugin-apro/biome.json b/packages/plugin-apro/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-apro/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-apro/package.json b/packages/plugin-apro/package.json deleted file mode 100644 index 016ded0200932..0000000000000 --- a/packages/plugin-apro/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@elizaos/plugin-apro", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "ai-agent-sdk-js": "^0.0.2", - "@ethersproject/contracts": "^5.7.0", - "@ethersproject/providers": "^5.7.0", - "ethers": "^5.7.2" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "tsup": "8.3.5", - "vitest": "2.1.9" - } -} diff --git a/packages/plugin-apro/src/actions/attpsPriceQuery.ts b/packages/plugin-apro/src/actions/attpsPriceQuery.ts deleted file mode 100644 index e9374c676ad42..0000000000000 --- a/packages/plugin-apro/src/actions/attpsPriceQuery.ts +++ /dev/null @@ -1,118 +0,0 @@ -import type { Action, HandlerCallback, IAgentRuntime, Memory, State } from "@elizaos/core"; -import { composeContext, elizaLogger, generateObject, ModelClass } from "@elizaos/core"; -import { attpsPriceQueryTemplate } from "../templates"; -import type { AttpsPriceQuery, AttpsPriceQueryResponse } from "../types"; -import { AttpsPriceQuerySchema, isAttpsPriceQuery } from "../types"; - -async function fetchPriceData(sourceAgentId: string, feedId: string) { - const response = await fetch(`https://ai-agent-test.apro.com/api/ai-agent/price-detail?sourceAgentId=${sourceAgentId}&feedId=${feedId}`); - const { result, code, message } = await response.json(); - if (code !== 0) { - throw new Error(message); - } - return result as AttpsPriceQueryResponse; -} - -function cleanNumber(numStr: string) { - return Number.parseFloat(numStr).toString(); -} - -export const attpsPriceQuery: Action = { - name: "ATTPS_PRICE_QUERY", - similes: [ - 'ATTPS_PRICE_FETCH', - ], - description: "Call remote API to fetch price data for a given source agent id and feed id.", - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Generate price query params - let attpsPriceQuery: AttpsPriceQuery; - try { - const response = await generateObject({ - runtime, - context: composeContext({ - state: currentState, - template: attpsPriceQueryTemplate, - }), - modelClass: ModelClass.LARGE, - schema: AttpsPriceQuerySchema, - }); - attpsPriceQuery = response.object as AttpsPriceQuery; - elizaLogger.info('The price query params received:', attpsPriceQuery); - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - elizaLogger.error('Failed to generate price query params:', errorMessage); - if (callback) { - callback({ - text: 'Failed to generate price query params. Please provide valid input.', - }); - } - return; - } - - // Validate price query params - if (!isAttpsPriceQuery(attpsPriceQuery)) { - elizaLogger.error('Invalid price query params:', attpsPriceQuery); - if (callback) { - callback({ - text: 'Invalid price query params. Please provide valid input.', - }); - } - return; - } - - // Fetch price data - try { - const { sourceAgentId, feedId } = attpsPriceQuery; - const priceData = await fetchPriceData(sourceAgentId, feedId); - elizaLogger.info('The Price data received:', priceData); - - const message = `Ask price: ${cleanNumber(priceData.askPrice)}\nBid price: ${cleanNumber(priceData.bidPrice)}\nMid price: ${cleanNumber(priceData.midPrice)}\nTimestamp: ${priceData.validTimeStamp}`; - if (callback) { - callback({ - text: `Here is the price data:\n${message}`, - }); - } - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - elizaLogger.error('Error fetching price data:', errorMessage); - if (callback) { - callback({ - text: `Error fetching price data: ${errorMessage}`, - }); - } - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Can you fetch price data for source agent id ... and feed id ...?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll fetch price data for you. Give me a moment.", - action: 'ATTPS_PRICE_QUERY', - }, - } - ], - ], -} \ No newline at end of file diff --git a/packages/plugin-apro/src/actions/createAndRegisterAgent.ts b/packages/plugin-apro/src/actions/createAndRegisterAgent.ts deleted file mode 100644 index f31aa16866e13..0000000000000 --- a/packages/plugin-apro/src/actions/createAndRegisterAgent.ts +++ /dev/null @@ -1,148 +0,0 @@ -import type { Action, HandlerCallback, IAgentRuntime, Memory, State } from "@elizaos/core"; -import { composeContext, elizaLogger, generateObject, ModelClass } from "@elizaos/core"; -import { AgentSDK, parseNewAgentAddress } from "ai-agent-sdk-js"; -import type { AgentSettings } from "ai-agent-sdk-js"; -import { createAgentTemplate } from "../templates"; -import type { ContractTransactionResponse } from "ethers"; -import { AgentSettingsSchema, isAgentSettings } from "../types"; - -export const createAndRegisterAgent: Action = { - name: "CREATE_AND_REGISTER_AGENT", - similes: [ - 'CREATE_AGENT', - 'REGISTER_AGENT', - ], - description: "Create and register an agent with APRO. User must provide agent settings.", - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Generate agent settings - let agentSettings: AgentSettings - try { - const agentSettingsDetail = await generateObject({ - runtime, - context: composeContext({ - state: currentState, - template: createAgentTemplate, - }), - modelClass: ModelClass.LARGE, - schema: AgentSettingsSchema, - }); - agentSettings = agentSettingsDetail.object as AgentSettings; - elizaLogger.info('The Agent settings received:', agentSettings); - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - elizaLogger.error('Failed to generate Agent settings:', errorMessage); - if (callback) { - callback({ - text: 'Failed to generate Agent settings. Please provide valid input.', - }); - } - return; - } - - // Validate agent settings - if (!isAgentSettings(agentSettings)) { - elizaLogger.error('Invalid Agent settings:', agentSettings); - if (callback) { - callback({ - text: 'Invalid Agent settings. Please provide valid input.', - }); - } - return; - } - - // Create SDK agent - let agent: AgentSDK; - try { - agent = new AgentSDK({ - proxyAddress: runtime.getSetting('APRO_PROXY_ADDRESS') ?? process.env.APRO_PROXY_ADDRESS, - rpcUrl: runtime.getSetting('APRO_RPC_URL') ?? process.env.APRO_RPC_URL, - privateKey: runtime.getSetting('APRO_PRIVATE_KEY') ?? process.env.APRO_PRIVATE_KEY, - }); - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - elizaLogger.error('Failed to create Agent SDK:', errorMessage); - if (callback) { - callback({ - text: 'Failed to create Agent SDK. Please check the apro plugin configuration.', - }); - } - return; - } - - // Create and register agent - let tx: ContractTransactionResponse; - try { - tx = await agent.createAndRegisterAgent({agentSettings}); - elizaLogger.info('Successfully send create and register agent transaction:', tx.hash); - - const receipt = await tx.wait(); - const agentAddress = parseNewAgentAddress(receipt); - - elizaLogger.info(`Created agent at address: ${agentAddress}`); - if (callback) { - callback({ text: `Agent created and registered successfully: ${agentAddress}` }); - } - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - elizaLogger.error(`Error creating agent: ${errorMessage}`); - if (callback) { - const message = tx?.hash - ? `Error creating agent: ${errorMessage}. Transaction hash: ${tx.hash}` - : `Error creating agent: ${errorMessage}`; - await callback({ text: message }); - } - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: `I want to Create and register apro ai-agent with the following settings: - { - signers: [ - '0x003CD3bD8Ac5b045be8E49d4dfd9928E1765E471', - '0xdE3701195b9823E41b3fc2c98922A94399E2a01C', - '0xB54E5D4faa950e8B6a01ed5a790Ac260c81Ad224', - '0x48eE063a6c67144E09684ac8AD9a0044836f348B', - '0xbBbCc052F1277dd94e88e8E5BD6D7FF9a29BaC98' - ], - threshold: 3, - converterAddress: "0x24c36e9996eb84138Ed7cAa483B4c59FF7640E5C", - agentHeader: { - sourceAgentName: 'ElizaOS Test Agent', - targetAgentId: '1105302c-7556-49b2-b6fe-3aedba9c0682', - messageType: 0, - priority: 1, - ttl: 3600, - }, - }` - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll help you create and register the agent.", - action: "CREATE_AND_REGISTER_AGENT", - }, - }, - ] - ], -} \ No newline at end of file diff --git a/packages/plugin-apro/src/actions/priceQuery.ts b/packages/plugin-apro/src/actions/priceQuery.ts deleted file mode 100644 index 5c5869b45047d..0000000000000 --- a/packages/plugin-apro/src/actions/priceQuery.ts +++ /dev/null @@ -1,125 +0,0 @@ -import type { Action, HandlerCallback, IAgentRuntime, Memory, State } from "@elizaos/core"; -import { composeContext, elizaLogger, generateObject, ModelClass } from "@elizaos/core"; -import { priceQueryTemplate } from "../templates"; -import type { PriceData, PriceQueryParams } from "../types"; -import { isPriceQueryParams, PriceQueryParamsSchema } from "../types"; - -async function fetchPriceData(pair: string) { - const response = await fetch(`https://live-api.apro.com/api/live-stream/reports?pair=${pair}`); - const { result } = await response.json(); - return result as PriceData[]; -} - -export const priceQuery: Action = { - name: "PRICE_QUERY", - similes: [ - 'PRICE_FETCH', - ], - description: "Call remote API to fetch price data for a given pair.", - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Generate price query params - let priceQueryParams: PriceQueryParams; - try { - const response = await generateObject({ - runtime, - context: composeContext({ - state: currentState, - template: priceQueryTemplate, - }), - modelClass: ModelClass.LARGE, - schema: PriceQueryParamsSchema, - }); - priceQueryParams = response.object as PriceQueryParams; - elizaLogger.info('The price query params received:', priceQueryParams); - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - elizaLogger.error('Failed to generate price query params:', errorMessage); - if (callback) { - callback({ - text: 'Failed to generate price query params. Please provide valid input.', - }); - } - return; - } - - // Validate price query params - if (!isPriceQueryParams(priceQueryParams)) { - elizaLogger.error('Invalid price query params:', priceQueryParams); - if (callback) { - callback({ - text: 'Invalid price query params. Please provide valid input.', - }); - } - return; - } - - // Fetch price data - try { - const { pair } = priceQueryParams; - const priceData = await fetchPriceData(pair); - elizaLogger.info('The Price data received:', priceData); - - if (!priceData || priceData.length === 0) { - elizaLogger.error('No price data found for pair:', pair); - if (callback) { - callback({ - text: `No price data found for pair ${pair}.`, - }); - } - return; - } - - const priceDataString = priceData.map((data) => { - return `Feed ID: ${data.feedId}\nBid Price: ${data.bidPrice}\nMid Price: ${data.midPrice}\nAsk Price: ${data.askPrice}\nTimestamp: ${data.timestamp}`; - }).join('\n\n'); - - if (callback) { - callback({ - text: `Price data for pair ${pair}: \n${priceDataString}`, - }); - } - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - elizaLogger.error('Error fetching price data:', errorMessage); - if (callback) { - callback({ - text: `Error fetching price data: ${errorMessage}`, - }); - } - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Can you fetch price data for pair BTC/USD?", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll fetch price data for pair BTC/USD.", - action: 'PRICE_QUERY', - }, - } - ], - ], -} \ No newline at end of file diff --git a/packages/plugin-apro/src/actions/verifyData.ts b/packages/plugin-apro/src/actions/verifyData.ts deleted file mode 100644 index b92fe0c429fd3..0000000000000 --- a/packages/plugin-apro/src/actions/verifyData.ts +++ /dev/null @@ -1,131 +0,0 @@ -import { type Action, composeContext, elizaLogger, generateObject, type HandlerCallback, type IAgentRuntime, type Memory, ModelClass, type State } from "@elizaos/core"; -import { AgentSDK, type VerifyParams } from "ai-agent-sdk-js"; -import { verifyDataTemplate } from "../templates"; -import { isVerifyParams, VerifyParamsSchema } from "../types"; -import type { ContractTransactionResponse } from "ethers"; - -export const verifyData: Action = { - name: "VERIFY", - similes: [ - 'VERIFY_DATA', - ], - description: "Verify data with APRO. User must provide data to verify.", - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Generate verify params - let verifyParams: VerifyParams; - try { - const response = await generateObject({ - runtime, - context: composeContext({ - state: currentState, - template: verifyDataTemplate, - }), - modelClass: ModelClass.LARGE, - schema: VerifyParamsSchema, - }); - - verifyParams = response.object as VerifyParams; - elizaLogger.info('The verify params received:', verifyParams); - } catch (error: unknown) { - if (error instanceof Error) { - elizaLogger.error('Failed to generate verify params:', error.message); - } else { - elizaLogger.error('Failed to generate verify params:', String(error)); - } - callback({ - text: 'Failed to generate verify params. Please provide valid input.', - }); - return; - } - - // Validate verify params - if (!isVerifyParams(verifyParams)) { - elizaLogger.error('Invalid verify params:', verifyParams); - callback({ - text: 'Invalid verify params. Please provide valid input.', - }); - return; - } - - // Create SDK agent - let agent: AgentSDK - try { - agent = new AgentSDK({ - proxyAddress: runtime.getSetting('APRO_PROXY_ADDRESS') ?? process.env.APRO_PROXY_ADDRESS, - rpcUrl: runtime.getSetting('APRO_RPC_URL') ?? process.env.APRO_RPC_URL, - privateKey: runtime.getSetting('APRO_PRIVATE_KEY') ?? process.env.APRO_PRIVATE_KEY, - autoHashData: (runtime.getSetting('APRO_AUTO_HASH_DATA') ?? process.env.APRO_AUTO_HASH_DATA) === 'true', - converterAddress: runtime.getSetting('APRO_CONVERTER_ADDRESS') ?? process.env.APRO_CONVERTER_ADDRESS, - }); - } catch (error: unknown) { - if (error instanceof Error) { - elizaLogger.error('Failed to create Agent SDK:', error.message); - } else { - elizaLogger.error('Failed to create Agent SDK:', String(error)); - } - callback({ - text: 'Failed to create Agent SDK. Please check the apro plugin configuration.', - }); - return; - } - - // Verify data - let tx: ContractTransactionResponse - try { - tx = await agent.verify(verifyParams) - elizaLogger.info('Data verification transaction sent. Transaction ID:', tx.hash); - - const receipt = await tx.wait(); - elizaLogger.info('Data verification transaction confirmed. Transaction ID:', receipt.hash); - - callback({ - text: `Success: Data verified successfully. Transaction ID: ${receipt.hash}`, - }) - } catch (error: unknown) { - if (error instanceof Error) { - elizaLogger.error(`Error verify data: ${error.message}`); - let message = `Error verifying data: ${error.message}`; - if (tx?.hash) { - message = `${message} Transaction hash: ${tx.hash}`; - } - callback({ - text: message, - }) - } - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "I want to verify data: ...", - }, - }, - { - user: "{{user2}}", - content: { - text: "Sure, I'll verify the data.", - action: "VERIFY", - }, - }, - ] - ], -}; \ No newline at end of file diff --git a/packages/plugin-apro/src/index.ts b/packages/plugin-apro/src/index.ts deleted file mode 100644 index 17bc4f6c4b1ec..0000000000000 --- a/packages/plugin-apro/src/index.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { createAndRegisterAgent } from "./actions/createAndRegisterAgent"; -import { verifyData } from "./actions/verifyData"; -import { attpsPriceQuery } from "./actions/attpsPriceQuery"; - -export const aproPlugin: Plugin = { - name: "apro", - description: "Apro Plugin for Eliza", - actions: [ - createAndRegisterAgent, - verifyData, - attpsPriceQuery, - ], - evaluators: [], - providers: [], -}; - -export default aproPlugin; \ No newline at end of file diff --git a/packages/plugin-apro/src/templates.ts b/packages/plugin-apro/src/templates.ts deleted file mode 100644 index 180e6f62c88e4..0000000000000 --- a/packages/plugin-apro/src/templates.ts +++ /dev/null @@ -1,238 +0,0 @@ -export const createAgentTemplate = ` -TASK: Extract ONLY the explicitly mentioned details from the user's input messages to create an agent configuration. DO NOT generate, infer or create any data that is not explicitly present in the input. - -RULES: -1. ONLY extract information that is explicitly present in the user's messages -2. Use null for ANY field where the exact required information is not present -3. Do not make assumptions or generate random values -4. If no valid data can be extracted, return a JSON with all null values -5. Only accept properly formatted addresses and UUIDs - do not create or infer them - -REQUIRED FIELDS: -- signers: Array of valid Ethereum addresses (42-char hex starting with '0x') -- threshold: Explicit number mentioned as threshold -- converterAddress: Valid Ethereum address (42-char hex starting with '0x') -- agentHeader: Object containing: - * messageId: Valid UUID format only - * sourceAgentId: Valid UUID format only - * sourceAgentName: Explicit agent name - * targetAgentId: Valid UUID format only - * messageType: Explicit number - * priority: Explicit number - * ttl: Explicit number in seconds - -OUTPUT FORMAT: -\`\`\`json -{ - "signers": [ - "" - ], - "threshold": null, - "converterAddress": null, - "agentHeader": { - "messageId": null, - "sourceAgentId": null, - "sourceAgentName": null, - "targetAgentId": null, - "messageType": null, - "priority": null, - "ttl": null - } -} -\`\`\` - -VALIDATION: -- Ethereum addresses must be 42 characters starting with '0x' -- UUIDs must match standard UUID format -- Numbers must be explicitly mentioned in the context -- Do not include any fields or values that are not explicitly mentioned in the user's input - -Context messages: -{{recentMessages}} -`; - -export const verifyDataTemplate = ` -TASK: STRICTLY extract ONLY explicitly mentioned verification details from the user's input messages. DO NOT generate, infer, or create any data that is not explicitly present in the input. - -STRICT RULES: -1. ONLY extract information that is EXPLICITLY present in the user's messages -2. Set null for ANY field where the exact required information is not present -3. DO NOT create, generate, or infer any values -4. Return all fields as null if no valid data can be extracted -5. Only accept properly formatted hexadecimal strings and numbers -6. Reject and set to null any values that don't match the required format - -REQUIRED FORMATS: -1. Hexadecimal strings must: - - Start with '0x' - - Contain only valid hex characters (0-9, a-f, A-F) - - Match the expected length for their purpose - -2. Ethereum addresses must: - - Be exactly 42 characters long - - Start with '0x' - - Contain only valid hex characters - -3. Numbers must: - - Be explicitly mentioned - - Be valid integers - - Be in the appropriate range for their purpose - -FIELD SPECIFICATIONS: -payload: - - data: Must be valid hex string starting with '0x' - - dataHash: Must be valid hex string starting with '0x' - - signatures: Array of objects, each containing: - * r: 64-character hex string (without '0x') - * s: 64-character hex string (without '0x') - * v: Integer number - - metadata: - * contentType: String matching known content types - * encoding: String or null - * compression: String or null - -agent: Must be valid 42-character Ethereum address -digest: Must be valid hex string starting with '0x' - -OUTPUT FORMAT: -\`\`\`json -{ - "payload": { - "data": null, - "dataHash": null, - "signatures": [], - "metadata": { - "contentType": null, - "encoding": null, - "compression": null - } - }, - "agent": null, - "digest": null -} -\`\`\` - -VALIDATION RULES: -1. For hex strings: - - Verify proper '0x' prefix where required - - Verify correct length - - Verify only valid hex characters - -2. For signatures: - - Only include if complete r, s, v values are present - - Verify r and s are valid 64-character hex strings - - Verify v is a valid integer - -3. For metadata: - - Only include contentType if it matches known formats - - Set encoding and compression to null if not explicitly specified - -4. General: - - Do not attempt to calculate or derive missing values - - Do not fill in partial information - - Return empty arrays instead of null for array fields when no valid items exist - -Input context to process: -{{recentMessages}} - -Remember: When in doubt, use null. Never generate fake data. -`; - -export const priceQueryTemplate = ` -TASK: Extract cryptocurrency trading pair information from user input. Extract pairs that follow the specified format patterns, regardless of whether the symbols represent actual cryptocurrencies. - -TRADING PAIR RULES: -1. Format Requirements: - - Must contain two symbols separated by a delimiter - - Acceptable delimiters: '/', '-', '_', or space - - Convert all pairs to standardized FORMAT: BASE/QUOTE - - Convert all letters to uppercase - -2. Symbol Requirements: - - Must be 2-5 characters long - - Must contain only letters - - Must be uppercase in output - -3. Pattern Recognition Examples: - - "ABC/USD" -> Valid, return "ABC/USD" - - "ABC-USD" -> Convert to "ABC/USD" - - "ABC USD" -> Convert to "ABC/USD" - - "ABCUSD" -> Convert to "ABC/USD" - - "ABCoin/USD" -> Invalid (symbol too long) - - "ABC to USD" -> Convert to "ABC/USD" - - "123/USD" -> Invalid (contains numbers) - - "A/USD" -> Invalid (symbol too short) - - "ABCDEF/USD" -> Invalid (symbol too long) - -VALIDATION: -1. REJECT and return null if: - - Only one symbol is mentioned - - Symbols are longer than 5 characters - - Symbols are shorter than 2 characters - - Symbols contain non-letter characters - - Format is completely unrecognizable - - More than two symbols are mentioned - -OUTPUT FORMAT: -\`\`\`json -{ - "pair": null -} -\`\`\` - -IMPORTANT NOTES: -1. DO NOT modify or correct user-provided symbols -2. DO NOT validate if symbols represent real cryptocurrencies -3. ONLY check format compliance -4. When format is invalid, return null -5. Accept ANY symbols that meet format requirements - -Input context to process: -{{recentMessages}} -`; - -export const attpsPriceQueryTemplate = ` - -TASK: Extract source agent and message identifiers from user input. Validate and format according to specified patterns. - -PARAMETER RULES: - -1. sourceAgentId Requirements: - - Format: UUID v4 format (8-4-4-4-12 hexadecimal) - - Case insensitive input but output must be lowercase - - Example: "b660e3f4-bbfe-4acb-97bd-c0869a7ea142" - -2. feedId Requirements: - - Format: 64-character hexadecimal prefixed with 0x - - Must be exactly 66 characters long including prefix - - Example: "0x0003665949c883f9e0f6f002eac32e00bd59dfe6c34e92a91c37d6a8322d6489" - -VALIDATION: - -1. REJECT and set to null if: - - Invalid UUID structure for sourceAgentId - - feedId length ≠ 66 characters - - feedId missing 0x prefix - - Contains non-hexadecimal characters - - Extra/missing hyphens in UUID - - Incorrect segment lengths - -OUTPUT FORMAT: -\`\`\`json -{ - "sourceAgentId": null, - "feedId": null -} -\`\`\` - -PROCESSING RULES: -1. Normalize sourceAgentId to lowercase -2. Preserve original feedId casing -3. Strict format validation before acceptance -4. Partial matches should return null -5. Return null for ambiguous formats - -Input context to process: -{{recentMessages}} - -`; \ No newline at end of file diff --git a/packages/plugin-apro/src/types.ts b/packages/plugin-apro/src/types.ts deleted file mode 100644 index 821eac3efbc04..0000000000000 --- a/packages/plugin-apro/src/types.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { z } from "zod"; - -export interface AgentSettings { - signers: string[] - threshold: number - converterAddress: string - agentHeader: { - messageId?: string - sourceAgentId?: string - sourceAgentName: string - targetAgentId: string - timestamp?: number - messageType: number - priority: number - ttl: number - } -} - -export const AgentSettingsSchema = z.object({ - signers: z.array(z.string()), - threshold: z.number(), - converterAddress: z.string(), - agentHeader: z.object({ - messageId: z.string().nullish(), - sourceAgentId: z.string().nullish(), - sourceAgentName: z.string(), - targetAgentId: z.string(), - timestamp: z.number().nullish(), - messageType: z.number(), - priority: z.number(), - ttl: z.number(), - }), -}); - -export const isAgentSettings = (value: unknown): value is AgentSettings => { - return AgentSettingsSchema.safeParse(value).success; -} - -interface Signature { - r: string - s: string - v: 1 | 0 | 27 | 28 - } - -interface MessagePayload { - data: string - dataHash?: string - signatures: Signature[] - metadata?: Metadata - } - -export interface VerifyParams { - agent: string - digest: string - payload: MessagePayload -} - -export const VerifyParamsSchema = z.object({ - agent: z.string(), - digest: z.string(), - payload: z.object({ - data: z.string(), - dataHash: z.string().nullish(), - signatures: z.array(z.object({ - r: z.string(), - s: z.string(), - v: z.number(), - })), - metadata: z.object({ - contentType: z.string().nullish(), - encoding: z.string().nullish(), - compression: z.string().nullish(), - }).nullish(), - }), -}); - -export const isVerifyParams = (value: unknown): value is VerifyParams => { - return VerifyParamsSchema.safeParse(value).success; -} - -export interface PriceQueryParams { - pair: string -} - -export const PriceQueryParamsSchema = z.object({ - pair: z.string(), -}); - -export const isPriceQueryParams = (value: unknown): value is PriceQueryParams => { - return PriceQueryParamsSchema.safeParse(value).success; -} - -export interface PriceData { - feedId: string - pair: string - networks: string[] - bidPrice: string - askPrice: string - midPrice: string - bidPriceChange: number - askPriceChange: number - midPriceChange: number - timestamp: number -} - -export const AttpsPriceQuerySchema = z.object({ - sourceAgentId: z.string(), - feedId: z.string(), -}); - -export const isAttpsPriceQuery = (value: unknown): value is AttpsPriceQuery => { - return AttpsPriceQuerySchema.safeParse(value).success; -} - -export interface AttpsPriceQuery { - sourceAgentId: string - feedId: string -} - -export interface AttpsPriceQueryResponse { - feedId: string - validTimeStamp: number - observeTimeStamp: number - nativeFee: number - tokenFee: number - expireTimeStamp: number - midPrice: string - askPrice: string - bidPrice: string -} \ No newline at end of file diff --git a/packages/plugin-apro/tsconfig.json b/packages/plugin-apro/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/plugin-apro/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-apro/tsup.config.ts b/packages/plugin-apro/tsup.config.ts deleted file mode 100644 index 7c51f1a4ca22d..0000000000000 --- a/packages/plugin-apro/tsup.config.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-apro/vitest.config.ts b/packages/plugin-apro/vitest.config.ts deleted file mode 100644 index 19d97aa7c6d22..0000000000000 --- a/packages/plugin-apro/vitest.config.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - }, - resolve: { - alias: [ - { - find: /^@elizaos\/core$/, - replacement: '../core/src/index.ts' - }, - { - find: /^ai-agent-sdk-js$/, - replacement: '../node_modules/ai-agent-sdk-js/src/index.ts' - } - ] - } -}); diff --git a/packages/plugin-aptos/.npmignore b/packages/plugin-aptos/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-aptos/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-aptos/README.md b/packages/plugin-aptos/README.md deleted file mode 100644 index 15c6ae3668d4a..0000000000000 --- a/packages/plugin-aptos/README.md +++ /dev/null @@ -1,249 +0,0 @@ -# @elizaos/plugin-aptos - -A plugin for interacting with the Aptos blockchain network within the ElizaOS ecosystem. - -## Description - -The Aptos plugin enables seamless token transfers and wallet management on the Aptos blockchain. It provides functionality to transfer APT tokens and monitor wallet balances with real-time price tracking. - -## Installation - -```bash -pnpm install @elizaos/plugin-aptos -``` - -## Configuration - -The plugin requires the following environment variables to be set: - -```typescript -APTOS_PRIVATE_KEY= -APTOS_NETWORK=<"mainnet" | "testnet"> -``` - -## Usage - -### Basic Integration - -```typescript -import { - aptosPlugin, - WalletProvider, - TransferAptosToken, -} from "@elizaos/plugin-aptos"; -``` - -### Transfer Examples - -```typescript -// The plugin responds to natural language commands like: - -"Send 69 APT tokens to 0x4f2e63be8e7fe287836e29cde6f3d5cbc96eefd0c0e3f3747668faa2ae7324b0"; -"Transfer APT to [address]"; -"Pay [amount] APT to [recipient]"; -``` - -## API Reference - -### Actions - -#### SEND_TOKEN - -Transfers APT tokens from the agent's wallet to another address. - -**Aliases:** - -- TRANSFER_TOKEN -- TRANSFER_TOKENS -- SEND_TOKENS -- SEND_APT -- PAY - -**Configuration:** - -```typescript -{ - APT_DECIMALS: 8; // Decimal places for APT token -} -``` - -### Providers - -#### WalletProvider - -Provides wallet information and portfolio tracking. - -**Features:** - -- Real-time APT price tracking -- Portfolio value calculation -- Cached wallet information (5-minute TTL) -- Formatted portfolio reports - -## Common Issues & Troubleshooting - -1. **Transaction Failures** - - - Verify wallet has sufficient APT balance - - Check recipient address format - - Ensure private key is correctly set - - Verify network connectivity - -2. **Price Fetching Issues** - - Check connection to DexScreener API - - Verify cache functionality - - Monitor retry mechanism (3 attempts with exponential backoff) - -## Security Best Practices - -1. **Private Key Management** - - Store private key securely using environment variables - - Never commit private keys to version control - - Use separate wallets for development and production - - Monitor wallet activity regularly - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run tests: - -```bash -pnpm run test -``` - -5. Development mode: - -```bash -pnpm run dev -``` - -## Dependencies - -- @aptos-labs/ts-sdk: ^1.26.0 -- bignumber.js: 9.1.2 -- node-cache: 5.1.2 - -## Future Enhancements - -The following features and improvements are planned for future releases: - -1. **Advanced Token Operations** - - - Batch token transfers - - Token creation templates - - NFT minting and management - - Token metadata management - - Custom tokenomics implementation - - Token upgrade mechanisms - -2. **DeFi Integration** - - - Liquidity pool management - - Yield farming automation - - Staking optimization - - AMM integration - - Cross-chain bridges - - Price impact analysis - -3. **Move Contract Management** - - - Contract deployment tools - - Contract verification - - Contract upgrade system - - Testing framework - - Gas optimization tools - - Security audit integration - -4. **Wallet Enhancements** - - - Multi-wallet support - - Hardware wallet integration - - Transaction batching - - Address book management - - Custom signature schemes - - Account abstraction - -5. **Price Feed Improvements** - - - Additional data sources - - Real-time price alerts - - Historical data analysis - - Custom price aggregation - - Price prediction tools - - Market sentiment analysis - -6. **Developer Tools** - - - Enhanced debugging capabilities - - Move language IDE integration - - Documentation generator - - Performance profiling - - Testing utilities - - Deployment automation - -7. **Security Features** - - - Transaction simulation - - Risk assessment tools - - Rate limiting controls - - Fraud detection - - Emergency shutdown - - Multi-signature support - -8. **Analytics and Monitoring** - - Transaction tracking - - Portfolio analytics - - Network statistics - - Gas usage optimization - - Performance metrics - - Custom reporting tools - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Aptos](https://aptoslabs.com/): Layer 1 blockchain platform -- [@aptos-labs/ts-sdk](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/typescript/sdk): Official TypeScript SDK -- [Petra Wallet](https://petra.app/): Aptos wallet integration -- [DexScreener](https://dexscreener.com/): Price feed integration -- [Move Language](https://github.com/move-language/move): Smart contract language - -Special thanks to: - -- The Aptos Labs team for developing the blockchain -- The Petra Wallet development team -- The DexScreener team for price data -- The Move language developers -- The Aptos Developer community -- The Eliza community for their contributions and feedback - -For more information about Aptos capabilities: - -- [Aptos Documentation](https://aptos.dev/) -- [Move Language Guide](https://move-language.github.io/move/) -- [Petra Wallet Docs](https://petra.app/docs) -- [DexScreener API](https://docs.dexscreener.com/) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-aptos/biome.json b/packages/plugin-aptos/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-aptos/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-aptos/package.json b/packages/plugin-aptos/package.json deleted file mode 100644 index 190bb4667217e..0000000000000 --- a/packages/plugin-aptos/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "@elizaos/plugin-aptos", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@aptos-labs/ts-sdk": "^1.26.0", - "@elizaos/core": "workspace:*", - "bignumber.js": "9.1.2", - "node-cache": "5.1.2", - "tsup": "8.3.5", - "vitest": "2.1.9" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "devDependencies": { - "@biomejs/biome": "1.9.4" - }, - "peerDependencies": { - "form-data": "4.0.1", - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-aptos/src/actions/transfer.ts b/packages/plugin-aptos/src/actions/transfer.ts deleted file mode 100644 index 8eefe8c8c0634..0000000000000 --- a/packages/plugin-aptos/src/actions/transfer.ts +++ /dev/null @@ -1,230 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import { - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import { composeContext } from "@elizaos/core"; -import { generateObjectDeprecated } from "@elizaos/core"; -import { - Account, - Aptos, - AptosConfig, - Ed25519PrivateKey, - type Network, - PrivateKey, - PrivateKeyVariants, -} from "@aptos-labs/ts-sdk"; -import { walletProvider } from "../providers/wallet"; - -export interface TransferContent extends Content { - recipient: string; - amount: string | number; -} - -function isTransferContent(content: unknown): content is TransferContent { - elizaLogger.log("Content for transfer", content); - if (typeof content !== "object" || content === null) { - return false; - } - - const c = content as Record; - return ( - typeof c.recipient === "string" && - (typeof c.amount === "string" || - typeof c.amount === "number") - ); -} - -const transferTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - -Example response: -\`\`\`json -{ - "recipient": "0x2badda48c062e861ef17a96a806c451fd296a49f45b272dee17f85b0e32663fd", - "amount": "1000" -} -\`\`\` - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested token transfer: -- Recipient wallet address -- Amount to transfer - -Respond with a JSON markdown block containing only the extracted values.`; - -export default { - name: "SEND_TOKEN", - similes: [ - "TRANSFER_TOKEN", - "TRANSFER_TOKENS", - "SEND_TOKENS", - "SEND_APT", - "PAY", - ], - validate: async (_runtime: IAgentRuntime, message: Memory) => { - elizaLogger.log("Validating apt transfer from user:", message.userId); - //add custom validate logic here - /* - const adminIds = runtime.getSetting("ADMIN_USER_IDS")?.split(",") || []; - //elizaLogger.log("Admin IDs from settings:", adminIds); - - const isAdmin = adminIds.includes(message.userId); - - if (isAdmin) { - //elizaLogger.log(`Authorized transfer from user: ${message.userId}`); - return true; - } - else - { - //elizaLogger.log(`Unauthorized transfer attempt from user: ${message.userId}`); - return false; - } - */ - return false; - }, - description: "Transfer tokens from the agent's wallet to another address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting SEND_TOKEN handler..."); - - const walletInfo = await walletProvider.get(runtime, message, state); - state.walletInfo = walletInfo; - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose transfer context - const transferContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - - // Generate transfer content - const content = await generateObjectDeprecated({ - runtime, - context: transferContext, - modelClass: ModelClass.SMALL, - }); - - // Validate transfer content - if (!isTransferContent(content)) { - elizaLogger.error("Invalid content for TRANSFER_TOKEN action."); - if (callback) { - callback({ - text: "Unable to process transfer request. Invalid content provided.", - content: { error: "Invalid transfer content" }, - }); - } - return false; - } - - try { - const privateKey = runtime.getSetting("APTOS_PRIVATE_KEY"); - const aptosAccount = Account.fromPrivateKey({ - privateKey: new Ed25519PrivateKey( - PrivateKey.formatPrivateKey( - privateKey, - PrivateKeyVariants.Ed25519 - ) - ), - }); - const network = runtime.getSetting("APTOS_NETWORK") as Network; - const aptosClient = new Aptos( - new AptosConfig({ - network, - }) - ); - - const APT_DECIMALS = 8; - const adjustedAmount = BigInt( - Number(content.amount) * (10 ** APT_DECIMALS) - ); - elizaLogger.log( - `Transferring: ${content.amount} tokens (${adjustedAmount} base units)` - ); - - const tx = await aptosClient.transaction.build.simple({ - sender: aptosAccount.accountAddress.toStringLong(), - data: { - function: "0x1::aptos_account::transfer", - typeArguments: [], - functionArguments: [content.recipient, adjustedAmount], - }, - }); - const committedTransaction = - await aptosClient.signAndSubmitTransaction({ - signer: aptosAccount, - transaction: tx, - }); - const executedTransaction = await aptosClient.waitForTransaction({ - transactionHash: committedTransaction.hash, - }); - - elizaLogger.log("Transfer successful:", executedTransaction.hash); - - if (callback) { - callback({ - text: `Successfully transferred ${content.amount} APT to ${content.recipient}, Transaction: ${executedTransaction.hash}`, - content: { - success: true, - hash: executedTransaction.hash, - amount: content.amount, - recipient: content.recipient, - }, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error during token transfer:", error); - if (callback) { - callback({ - text: `Error transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 69 APT tokens to 0x4f2e63be8e7fe287836e29cde6f3d5cbc96eefd0c0e3f3747668faa2ae7324b0", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll send 69 APT tokens now...", - action: "SEND_TOKEN", - }, - }, - { - user: "{{user2}}", - content: { - text: "Successfully sent 69 APT tokens to 0x4f2e63be8e7fe287836e29cde6f3d5cbc96eefd0c0e3f3747668faa2ae7324b0, Transaction: 0x39a8c432d9bdad993a33cc1faf2e9b58fb7dd940c0425f1d6db3997e4b4b05c0", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-aptos/src/constants.ts b/packages/plugin-aptos/src/constants.ts deleted file mode 100644 index 2df89e284077b..0000000000000 --- a/packages/plugin-aptos/src/constants.ts +++ /dev/null @@ -1 +0,0 @@ -export const APT_DECIMALS = 8; diff --git a/packages/plugin-aptos/src/enviroment.ts b/packages/plugin-aptos/src/enviroment.ts deleted file mode 100644 index e94e629e8a097..0000000000000 --- a/packages/plugin-aptos/src/enviroment.ts +++ /dev/null @@ -1,36 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const aptosEnvSchema = z.object({ - APTOS_PRIVATE_KEY: z.string().min(1, "Aptos private key is required"), - APTOS_NETWORK: z.enum(["mainnet", "testnet"]), -}); - -export type AptosConfig = z.infer; - -export async function validateAptosConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - APTOS_PRIVATE_KEY: - runtime.getSetting("APTOS_PRIVATE_KEY") || - process.env.APTOS_PRIVATE_KEY, - APTOS_NETWORK: - runtime.getSetting("APTOS_NETWORK") || - process.env.APTOS_NETWORK, - }; - - return aptosEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Aptos configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/plugin-aptos/src/index.ts b/packages/plugin-aptos/src/index.ts deleted file mode 100644 index 79b4d413f019a..0000000000000 --- a/packages/plugin-aptos/src/index.ts +++ /dev/null @@ -1,15 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import transferToken from "./actions/transfer.ts"; -import { WalletProvider, walletProvider } from "./providers/wallet.ts"; - -export { WalletProvider, transferToken as TransferAptosToken }; - -export const aptosPlugin: Plugin = { - name: "aptos", - description: "Aptos Plugin for Eliza", - actions: [transferToken], - evaluators: [], - providers: [walletProvider], -}; - -export default aptosPlugin; diff --git a/packages/plugin-aptos/src/providers/wallet.ts b/packages/plugin-aptos/src/providers/wallet.ts deleted file mode 100644 index bfc299e1501a5..0000000000000 --- a/packages/plugin-aptos/src/providers/wallet.ts +++ /dev/null @@ -1,256 +0,0 @@ -import type { - IAgentRuntime, - ICacheManager, - Memory, - Provider, - State, -} from "@elizaos/core"; -import { - Account, - Aptos, - AptosConfig, - Ed25519PrivateKey, - type Network, - PrivateKey, - PrivateKeyVariants, -} from "@aptos-labs/ts-sdk"; -import BigNumber from "bignumber.js"; -import NodeCache from "node-cache"; -import * as path from "node:path"; -import { APT_DECIMALS } from "../constants"; - -// Provider configuration -const PROVIDER_CONFIG = { - MAX_RETRIES: 3, - RETRY_DELAY: 2000, -}; - -interface WalletPortfolio { - totalUsd: string; - totalApt: string; -} - -interface Prices { - apt: { usd: string }; -} - -export class WalletProvider { - private cache: NodeCache; - private cacheKey = "aptos/wallet"; - - constructor( - private aptosClient: Aptos, - private address: string, - private cacheManager: ICacheManager - ) { - this.cache = new NodeCache({ stdTTL: 300 }); // Cache TTL set to 5 minutes - } - - private async readFromCache(key: string): Promise { - const cached = await this.cacheManager.get( - path.join(this.cacheKey, key) - ); - return cached; - } - - private async writeToCache(key: string, data: T): Promise { - await this.cacheManager.set(path.join(this.cacheKey, key), data, { - expires: Date.now() + 5 * 60 * 1000, - }); - } - - private async getCachedData(key: string): Promise { - // Check in-memory cache first - const cachedData = this.cache.get(key); - if (cachedData) { - return cachedData; - } - - // Check file-based cache - const fileCachedData = await this.readFromCache(key); - if (fileCachedData) { - // Populate in-memory cache - this.cache.set(key, fileCachedData); - return fileCachedData; - } - - return null; - } - - private async setCachedData(cacheKey: string, data: T): Promise { - // Set in-memory cache - this.cache.set(cacheKey, data); - - // Write to file-based cache - await this.writeToCache(cacheKey, data); - } - - private async fetchPricesWithRetry() { - let lastError: Error; - - for (let i = 0; i < PROVIDER_CONFIG.MAX_RETRIES; i++) { - try { - const cellanaAptUsdcPoolAddr = - "0x234f0be57d6acfb2f0f19c17053617311a8d03c9ce358bdf9cd5c460e4a02b7c"; - const response = await fetch( - `https://api.dexscreener.com/latest/dex/pairs/aptos/${cellanaAptUsdcPoolAddr}` - ); - - if (!response.ok) { - const errorText = await response.text(); - throw new Error( - `HTTP error! status: ${response.status}, message: ${errorText}` - ); - } - - const data = await response.json(); - return data; - } catch (error) { - console.error(`Attempt ${i + 1} failed:`, error); - lastError = error; - if (i < PROVIDER_CONFIG.MAX_RETRIES - 1) { - const delay = PROVIDER_CONFIG.RETRY_DELAY * (2 ** i); - await new Promise((resolve) => setTimeout(resolve, delay)); - } - } - } - - console.error( - "All attempts failed. Throwing the last error:", - lastError - ); - throw lastError; - } - - async fetchPortfolioValue(): Promise { - try { - const cacheKey = `portfolio-${this.address}`; - const cachedValue = - await this.getCachedData(cacheKey); - - if (cachedValue) { - console.log("Cache hit for fetchPortfolioValue", cachedValue); - return cachedValue; - } - console.log("Cache miss for fetchPortfolioValue"); - - const prices = await this.fetchPrices().catch((error) => { - console.error("Error fetching APT price:", error); - throw error; - }); - const aptAmountOnChain = await this.aptosClient - .getAccountAPTAmount({ - accountAddress: this.address, - }) - .catch((error) => { - console.error("Error fetching APT amount:", error); - throw error; - }); - - const aptAmount = new BigNumber(aptAmountOnChain).div( - new BigNumber(10).pow(APT_DECIMALS) - ); - const totalUsd = new BigNumber(aptAmount).times(prices.apt.usd); - - const portfolio = { - totalUsd: totalUsd.toString(), - totalApt: aptAmount.toString(), - }; - this.setCachedData(cacheKey, portfolio); - console.log("Fetched portfolio:", portfolio); - return portfolio; - } catch (error) { - console.error("Error fetching portfolio:", error); - throw error; - } - } - - async fetchPrices(): Promise { - try { - const cacheKey = "prices"; - const cachedValue = await this.getCachedData(cacheKey); - - if (cachedValue) { - console.log("Cache hit for fetchPrices"); - return cachedValue; - } - console.log("Cache miss for fetchPrices"); - - const aptPriceData = await this.fetchPricesWithRetry().catch( - (error) => { - console.error("Error fetching APT price:", error); - throw error; - } - ); - const prices: Prices = { - apt: { usd: aptPriceData.pair.priceUsd }, - }; - this.setCachedData(cacheKey, prices); - return prices; - } catch (error) { - console.error("Error fetching prices:", error); - throw error; - } - } - - formatPortfolio(runtime, portfolio: WalletPortfolio): string { - let output = `${runtime.character.name}\n`; - output += `Wallet Address: ${this.address}\n`; - - const totalUsdFormatted = new BigNumber(portfolio.totalUsd).toFixed(2); - const totalAptFormatted = new BigNumber(portfolio.totalApt).toFixed(4); - - output += `Total Value: $${totalUsdFormatted} (${totalAptFormatted} APT)\n`; - - return output; - } - - async getFormattedPortfolio(runtime): Promise { - try { - const portfolio = await this.fetchPortfolioValue(); - return this.formatPortfolio(runtime, portfolio); - } catch (error) { - console.error("Error generating portfolio report:", error); - return "Unable to fetch wallet information. Please try again later."; - } - } -} - -const walletProvider: Provider = { - get: async ( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise => { - const privateKey = runtime.getSetting("APTOS_PRIVATE_KEY"); - const aptosAccount = Account.fromPrivateKey({ - privateKey: new Ed25519PrivateKey( - PrivateKey.formatPrivateKey( - privateKey, - PrivateKeyVariants.Ed25519 - ) - ), - }); - const network = runtime.getSetting("APTOS_NETWORK") as Network; - - try { - const aptosClient = new Aptos( - new AptosConfig({ - network, - }) - ); - const provider = new WalletProvider( - aptosClient, - aptosAccount.accountAddress.toStringLong(), - runtime.cacheManager - ); - return await provider.getFormattedPortfolio(runtime); - } catch (error) { - console.error("Error in wallet provider:", error); - return null; - } - }, -}; - -// Module exports -export { walletProvider }; diff --git a/packages/plugin-aptos/src/tests/wallet.test.ts b/packages/plugin-aptos/src/tests/wallet.test.ts deleted file mode 100644 index f7d2829413dca..0000000000000 --- a/packages/plugin-aptos/src/tests/wallet.test.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { describe, it, expect, beforeEach, vi, afterEach } from "vitest"; -import { WalletProvider } from "../providers/wallet.ts"; -import { - Account, - Aptos, - AptosConfig, - Ed25519PrivateKey, - Network, - PrivateKey, - PrivateKeyVariants, -} from "@aptos-labs/ts-sdk"; -import { defaultCharacter } from "@elizaos/core"; -import BigNumber from "bignumber.js"; -import { APT_DECIMALS } from "../constants.ts"; - -// Mock NodeCache -vi.mock("node-cache", () => { - return { - default: vi.fn().mockImplementation(() => ({ - set: vi.fn(), - get: vi.fn().mockReturnValue(null), - })), - }; -}); - -// Mock path module -vi.mock("path", async () => { - const actual = await vi.importActual("path"); - return { - ...actual, - join: vi.fn().mockImplementation((...args) => args.join("/")), - }; -}); - -// Mock the ICacheManager -const mockCacheManager = { - get: vi.fn().mockResolvedValue(null), - set: vi.fn(), - delete: vi.fn(), -}; - -describe("WalletProvider", () => { - let walletProvider; - let mockedRuntime; - - beforeEach(() => { - vi.clearAllMocks(); - mockCacheManager.get.mockResolvedValue(null); - - const aptosClient = new Aptos( - new AptosConfig({ - network: Network.TESTNET, - }) - ); - const aptosAccount = Account.fromPrivateKey({ - privateKey: new Ed25519PrivateKey( - PrivateKey.formatPrivateKey( - // this is a testnet private key - "0x90e02bf2439492bd9be1ec5f569704accefd65ba88a89c4dcef1977e0203211e", - PrivateKeyVariants.Ed25519 - ) - ), - }); - - // Create new instance of TokenProvider with mocked dependencies - walletProvider = new WalletProvider( - aptosClient, - aptosAccount.accountAddress.toStringLong(), - mockCacheManager - ); - - mockedRuntime = { - character: defaultCharacter, - }; - }); - - afterEach(() => { - vi.clearAllTimers(); - }); - - describe("Wallet Integration", () => { - it("should check wallet address", async () => { - const result = - await walletProvider.getFormattedPortfolio(mockedRuntime); - - const prices = await walletProvider.fetchPrices(); - const aptAmountOnChain = - await walletProvider.aptosClient.getAccountAPTAmount({ - accountAddress: walletProvider.address, - }); - const aptAmount = new BigNumber(aptAmountOnChain) - .div(new BigNumber(10).pow(APT_DECIMALS)) - .toFixed(4); - const totalUsd = new BigNumber(aptAmount) - .times(prices.apt.usd) - .toFixed(2); - - expect(result).toEqual( - `Eliza\nWallet Address: ${walletProvider.address}\n` + - `Total Value: $${totalUsd} (${aptAmount} APT)\n` - ); - }); - }); -}); diff --git a/packages/plugin-aptos/tsconfig.json b/packages/plugin-aptos/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/plugin-aptos/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-aptos/tsup.config.ts b/packages/plugin-aptos/tsup.config.ts deleted file mode 100644 index dd25475bb630f..0000000000000 --- a/packages/plugin-aptos/tsup.config.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - "base-x", - "bs58", - "borsh", - "@solana/buffer-layout", - "stream", - "buffer", - "querystring", - "amqplib", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-arbitrage/__tests__/actions/arbitrageAction.test.ts b/packages/plugin-arbitrage/__tests__/actions/arbitrageAction.test.ts deleted file mode 100644 index 9de78c328955c..0000000000000 --- a/packages/plugin-arbitrage/__tests__/actions/arbitrageAction.test.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { executeArbitrageAction } from '../../src/actions/arbitrageAction'; -import { ServiceType } from '@elizaos/core'; -import { ArbitrageService } from '../../src/services/ArbitrageService'; - -describe('executeArbitrageAction', () => { - const mockRuntime = { - getSetting: vi.fn(), - getService: vi.fn() - }; - - const mockMessage = { - userId: 'test-user', - content: { - text: 'Execute arbitrage' - } - }; - - const mockArbitrageService = { - evaluateMarkets: vi.fn(), - executeArbitrage: vi.fn() - }; - - beforeEach(() => { - vi.clearAllMocks(); - mockRuntime.getService.mockReturnValue(mockArbitrageService); - }); - - describe('metadata', () => { - it('should have correct name and description', () => { - expect(executeArbitrageAction.name).toBe('EXECUTE_ARBITRAGE'); - expect(executeArbitrageAction.description).toContain('Execute arbitrage trades'); - }); - - it('should have valid examples', () => { - expect(Array.isArray(executeArbitrageAction.examples)).toBe(true); - executeArbitrageAction.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - expect(example.length).toBe(2); - expect(example[1].content.action).toBe('EXECUTE_ARBITRAGE'); - }); - }); - }); - - describe('validation', () => { - it('should validate required settings', async () => { - mockRuntime.getSetting.mockReturnValue('test-key'); - const isValid = await executeArbitrageAction.validate(mockRuntime, mockMessage); - expect(isValid).toBe(true); - }); - - it('should fail validation when settings are missing', async () => { - mockRuntime.getSetting.mockReturnValue(undefined); - const isValid = await executeArbitrageAction.validate(mockRuntime, mockMessage); - expect(isValid).toBe(false); - }); - }); - - describe('handler', () => { - it('should execute arbitrage when opportunities exist', async () => { - const mockOpportunities = [ - { - buyFromMarket: { id: 'market1' }, - sellToMarket: { id: 'market2' }, - profit: '100' - } - ]; - - mockArbitrageService.evaluateMarkets.mockResolvedValue(mockOpportunities); - mockArbitrageService.executeArbitrage.mockResolvedValue(true); - - const result = await executeArbitrageAction.handler(mockRuntime, mockMessage); - expect(result).toBe(true); - expect(mockArbitrageService.evaluateMarkets).toHaveBeenCalled(); - expect(mockArbitrageService.executeArbitrage).toHaveBeenCalledWith(mockOpportunities); - }); - - it('should handle case when no opportunities exist', async () => { - mockArbitrageService.evaluateMarkets.mockResolvedValue([]); - - const result = await executeArbitrageAction.handler(mockRuntime, mockMessage); - expect(result).toBe(true); - expect(mockArbitrageService.evaluateMarkets).toHaveBeenCalled(); - expect(mockArbitrageService.executeArbitrage).not.toHaveBeenCalled(); - }); - - it('should handle evaluation errors', async () => { - mockArbitrageService.evaluateMarkets.mockRejectedValue(new Error('Evaluation failed')); - - await expect(executeArbitrageAction.handler(mockRuntime, mockMessage)) - .rejects.toThrow('Evaluation failed'); - }); - - it('should handle execution errors', async () => { - const mockOpportunities = [ - { - buyFromMarket: { id: 'market1' }, - sellToMarket: { id: 'market2' }, - profit: '100' - } - ]; - - mockArbitrageService.evaluateMarkets.mockResolvedValue(mockOpportunities); - mockArbitrageService.executeArbitrage.mockRejectedValue(new Error('Execution failed')); - - await expect(executeArbitrageAction.handler(mockRuntime, mockMessage)) - .rejects.toThrow('Execution failed'); - }); - }); -}); diff --git a/packages/plugin-arbitrage/__tests__/core/Arbitrage.test.ts b/packages/plugin-arbitrage/__tests__/core/Arbitrage.test.ts deleted file mode 100644 index 783a0919cc7ef..0000000000000 --- a/packages/plugin-arbitrage/__tests__/core/Arbitrage.test.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { Arbitrage } from '../../src/core/Arbitrage'; -import { BigNumber } from '@ethersproject/bignumber'; -import { TestMarket } from '../utils/TestMarket'; -import { CrossedMarketDetails } from '../../src/type'; - -describe('Arbitrage', () => { - let arbitrage: Arbitrage; - let mockProvider: any; - let mockWallet: any; - - beforeEach(() => { - mockProvider = { - getGasPrice: vi.fn().mockResolvedValue(BigNumber.from('50000000000')), - getBlock: vi.fn().mockResolvedValue({ number: 1 }) - }; - - mockWallet = { - provider: mockProvider, - address: '0xmockaddress' - }; - - arbitrage = new Arbitrage(mockWallet, mockProvider); - }); - - describe('market evaluation', () => { - it('should filter out markets with insufficient liquidity', async () => { - const mockMarkets = { - '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2': [ - new TestMarket('0xmarket1', '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'), - new TestMarket('0xmarket2', '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2') - ] - }; - - // Mock insufficient liquidity - vi.spyOn(mockMarkets['0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'][0], 'getReserves').mockResolvedValue(BigNumber.from('100')); - vi.spyOn(mockMarkets['0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'][1], 'getReserves').mockResolvedValue(BigNumber.from('100')); - - const opportunities = await arbitrage.evaluateMarkets(mockMarkets); - expect(opportunities.length).toBe(0); - }); - }); - - describe('bundle execution', () => { - it('should handle simulation success', async () => { - const mockMarket = new TestMarket('0xmarket1', '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'); - const mockOpportunities: CrossedMarketDetails[] = [{ - marketPairs: [{ - buyFromMarket: mockMarket, - sellToMarket: mockMarket - }], - profit: BigNumber.from('1000000'), - volume: BigNumber.from('1000000'), - tokenAddress: '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', - buyFromMarket: mockMarket, - sellToMarket: mockMarket - }]; - - await expect(arbitrage.takeCrossedMarkets(mockOpportunities, 1, 1)).resolves.not.toThrow(); - }); - - it('should handle simulation failure', async () => { - const mockMarket = new TestMarket('0xmarket1', '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'); - vi.spyOn(mockMarket, 'sellTokensToNextMarket').mockRejectedValue(new Error('Simulation failed')); - - const mockOpportunities: CrossedMarketDetails[] = [{ - marketPairs: [{ - buyFromMarket: mockMarket, - sellToMarket: mockMarket - }], - profit: BigNumber.from('1000000'), - volume: BigNumber.from('1000000'), - tokenAddress: '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', - buyFromMarket: mockMarket, - sellToMarket: mockMarket - }]; - - await expect(arbitrage.takeCrossedMarkets(mockOpportunities, 1, 1)).resolves.not.toThrow(); - }); - }); -}); diff --git a/packages/plugin-arbitrage/__tests__/index.test.ts b/packages/plugin-arbitrage/__tests__/index.test.ts deleted file mode 100644 index a77cccb9f7218..0000000000000 --- a/packages/plugin-arbitrage/__tests__/index.test.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import arbitragePlugin from '../src/index'; -import { executeArbitrageAction } from '../src/actions/arbitrageAction'; -import { marketProvider } from '../src/providers/marketProvider'; -import { ArbitrageService } from '../src/services/ArbitrageService'; - -describe('arbitragePlugin', () => { - it('should have correct name and description', () => { - expect(arbitragePlugin.name).toBe('arbitrage-plugin'); - expect(arbitragePlugin.description).toBe('Automated arbitrage trading plugin'); - }); - - it('should register the correct action', () => { - expect(arbitragePlugin.actions).toContain(executeArbitrageAction); - }); - - it('should register the correct provider', () => { - expect(arbitragePlugin.providers).toContain(marketProvider); - }); - - it('should register the arbitrage service', () => { - expect(arbitragePlugin.services.length).toBe(1); - expect(arbitragePlugin.services[0]).toBeInstanceOf(ArbitrageService); - }); -}); diff --git a/packages/plugin-arbitrage/__tests__/services/ArbitrageService.test.ts b/packages/plugin-arbitrage/__tests__/services/ArbitrageService.test.ts deleted file mode 100644 index 256cbbbe62413..0000000000000 --- a/packages/plugin-arbitrage/__tests__/services/ArbitrageService.test.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { ArbitrageService } from '../../src/services/ArbitrageService'; -import { ServiceType, IAgentRuntime } from '@elizaos/core'; - -describe('ArbitrageService', () => { - let arbitrageService: ArbitrageService; - let mockRuntime: IAgentRuntime; - - beforeEach(() => { - mockRuntime = { - getSetting: vi.fn((key: string) => { - switch (key) { - case 'ARBITRAGE_ETHEREUM_WS_URL': - return 'ws://test.com'; - case 'ARBITRAGE_EVM_PROVIDER_URL': - return 'http://test.com'; - case 'ARBITRAGE_EVM_PRIVATE_KEY': - return '0x1234567890123456789012345678901234567890123456789012345678901234'; - case 'FLASHBOTS_RELAY_SIGNING_KEY': - return '0x1234567890123456789012345678901234567890123456789012345678901234'; - default: - return undefined; - } - }), - getLogger: vi.fn().mockReturnValue({ - log: vi.fn(), - error: vi.fn(), - warn: vi.fn() - }), - getBlocksApi: vi.fn().mockReturnValue({ - getRecentBlocks: vi.fn().mockResolvedValue([]) - }) - } as unknown as IAgentRuntime; - - arbitrageService = new ArbitrageService(); - }); - - describe('basic functionality', () => { - it('should have correct service type', () => { - expect(arbitrageService.serviceType).toBe(ServiceType.ARBITRAGE); - }); - - it('should throw error if required settings are missing', async () => { - mockRuntime.getSetting = vi.fn().mockReturnValue(undefined); - await expect(arbitrageService.initialize(mockRuntime)).rejects.toThrow(); - }); - }); -}); diff --git a/packages/plugin-arbitrage/__tests__/setup.ts b/packages/plugin-arbitrage/__tests__/setup.ts deleted file mode 100644 index 3efccda5cc25a..0000000000000 --- a/packages/plugin-arbitrage/__tests__/setup.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { vi } from 'vitest'; -import { WebSocket } from 'ws'; -import { FlashbotsBundleProvider } from '@flashbots/ethers-provider-bundle'; - -// Mock WebSocket -vi.mock('ws', () => ({ - WebSocket: vi.fn().mockImplementation(() => ({ - on: vi.fn(), - close: vi.fn(), - send: vi.fn() - })) -})); - -// Mock ethers providers -vi.mock('@ethersproject/providers', () => ({ - WebSocketProvider: vi.fn().mockImplementation(() => ({ - on: vi.fn(), - getGasPrice: vi.fn().mockResolvedValue('1000000000'), - getBlock: vi.fn().mockResolvedValue({ number: 1 }) - })) -})); - -// Mock Flashbots provider -vi.mock('@flashbots/ethers-provider-bundle', () => ({ - FlashbotsBundleProvider: { - create: vi.fn().mockResolvedValue({ - sendBundle: vi.fn().mockResolvedValue({ - wait: vi.fn().mockResolvedValue(true) - }), - simulate: vi.fn().mockResolvedValue({ - success: true, - profit: '1000000000000000' - }) - }) - } -})); - -// Mock @elizaos/core -vi.mock('@elizaos/core', () => ({ - Service: class {}, - ServiceType: { - ARBITRAGE: 'arbitrage' - }, - elizaLogger: { - info: vi.fn(), - error: vi.fn(), - log: vi.fn() - } -})); diff --git a/packages/plugin-arbitrage/__tests__/utils/TestMarket.ts b/packages/plugin-arbitrage/__tests__/utils/TestMarket.ts deleted file mode 100644 index 28f6b06abd2e5..0000000000000 --- a/packages/plugin-arbitrage/__tests__/utils/TestMarket.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { EthMarket } from '../../src/core/EthMarket'; -import { BigNumber } from '@ethersproject/bignumber'; - -export class TestMarket extends EthMarket { - constructor(address: string, tokenAddress: string) { - super(address, tokenAddress, [tokenAddress], {}); - } - - receiveDirectly(tokenAddress: string): boolean { - return true; - } - - async getReserves(tokenAddress: string): Promise { - return BigNumber.from('1000000'); - } - - async getTokensOut(tokenIn: string, tokenOut: string, amountIn: BigNumber): Promise { - return amountIn.mul(95).div(100); // 5% slippage - } - - async sellTokens(tokenAddress: string, volume: BigNumber, recipient: string): Promise { - return '0xmocktx'; - } - - async sellTokensToNextMarket(tokenAddress: string, volume: BigNumber, nextMarket: EthMarket): Promise<{ targets: string[], data: string[] }> { - return { - targets: ['0xmocktarget'], - data: ['0xmockdata'] - }; - } -} diff --git a/packages/plugin-arbitrage/examples/trader.character.json b/packages/plugin-arbitrage/examples/trader.character.json deleted file mode 100644 index 28149b6f3d223..0000000000000 --- a/packages/plugin-arbitrage/examples/trader.character.json +++ /dev/null @@ -1,115 +0,0 @@ -{ - "name": "Trader", - "description": "A trading bot that specializes in crypto arbitrage", - "clients": ["direct"], - "modelProvider": "anthropic", - "settings": { - "secrets": { - "EVM_PRIVATE_KEY": "YOUR_PRIVATE_KEY_HERE", - "FLASHBOTS_RELAY_SIGNING_KEY": "YOUR_FLASHBOTS_KEY_HERE", - "BUNDLE_EXECUTOR_ADDRESS": "YOUR_EXECUTOR_ADDRESS_HERE" - }, - "arbitrage": { - "ethereumWsUrl": "YOUR_ETH_WSS_URL", - "rpcUrl": "YOUR_ETH_RPC_URL" - } - }, - "plugins": [ - "@elizaos/plugin-arbitrage", - "@elizaos/plugin-evm" - ], - "modelSettings": { - "provider": "anthropic", - "model": "claude-3-sonnet-20240229" - }, - "bio": [ - "Expert in cryptocurrency trading and arbitrage.", - "Specializes in identifying profitable trading opportunities.", - "Monitors multiple exchanges for price differences.", - "Provides real-time market analysis and insights." - ], - "lore": [ - "Created to help traders identify and execute profitable arbitrage opportunities.", - "Trained on extensive market data and trading patterns." - ], - "knowledge": [ - "Understands cryptocurrency market dynamics", - "Knows how to identify arbitrage opportunities", - "Can analyze trading pairs across different exchanges", - "Monitors market conditions in real-time" - ], - "messageExamples": [ - [ - { - "user": "{{user1}}", - "content": { "text": "analyze BTC/ETH pair for arbitrage opportunities" } - }, - { - "user": "Trader", - "content": { - "text": "I'll analyze the BTC/ETH trading pair for potential arbitrage opportunities across different exchanges." - } - } - ], - [ - { - "user": "{{user1}}", - "content": { "text": "check current market conditions" } - }, - { - "user": "Trader", - "content": { - "text": "I'll check the current market conditions and look for profitable trading opportunities." - } - } - ] - ], - "postExamples": [ - "Market Analysis: Current arbitrage opportunities in the BTC/ETH market", - "Trading Update: Identified profitable arbitrage paths between exchanges", - "Market Alert: Significant price divergence detected between exchanges", - "Strategy Overview: Best practices for arbitrage trading" - ], - "topics": [ - "cryptocurrency trading", - "arbitrage opportunities", - "market analysis", - "trading strategies", - "price analysis", - "exchange monitoring", - "risk management", - "trading automation" - ], - "adjectives": [ - "analytical", - "precise", - "professional", - "strategic", - "vigilant", - "data-driven", - "methodical", - "efficient" - ], - "style": { - "all": [ - "Keep responses clear and data-driven", - "Focus on market opportunities", - "Provide actionable insights", - "Be professional and precise", - "Use clear market terminology", - "Always consider risk management" - ], - "chat": [ - "Provide detailed market analysis", - "Be direct and informative", - "Focus on actionable opportunities", - "Maintain professional tone" - ], - "post": [ - "Share clear market insights", - "Highlight significant opportunities", - "Include relevant market data", - "Maintain analytical perspective" - ] - } -} diff --git a/packages/plugin-arbitrage/package.json b/packages/plugin-arbitrage/package.json deleted file mode 100644 index dbbd2bcf5677f..0000000000000 --- a/packages/plugin-arbitrage/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "@elizaos/plugin-arbitrage", - "version": "0.25.6-alpha.1", - "description": "Arbitrage trading plugin for Eliza", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - ".": { - "types": "./dist/index.d.ts", - "import": "./dist/index.mjs", - "require": "./dist/index.js", - "default": "./dist/index.js" - } - }, - "scripts": { - "clean": "rm -rf dist", - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest", - "test:coverage": "vitest run --coverage" - }, - "dependencies": { - "@elizaos/adapter-sqlite": "^0.1.8", - "@elizaos/core": "workspace:*", - "@ethersproject/abi": "^5.7.0", - "@ethersproject/abstract-provider": "^5.7.0", - "@ethersproject/address": "^5.7.0", - "@ethersproject/bignumber": "^5.7.0", - "@ethersproject/contracts": "^5.7.0", - "@ethersproject/providers": "^5.7.2", - "@ethersproject/units": "^5.7.0", - "@ethersproject/wallet": "^5.7.0", - "@flashbots/ethers-provider-bundle": "0.6.2", - "dotenv": "^16.4.7", - "ethers": "5.7.2", - "lodash": "^4.17.21", - "ws": "^8.18.0" - }, - "devDependencies": { - "@types/lodash": "^4.17.14", - "@types/node": "^22.10.9", - "@types/ws": "^8.5.13", - "rimraf": "^5.0.5", - "typescript": "^5.7.3", - "@types/dotenv": "^8.2.0", - "tsup": "^8.0.2", - "vitest": "^3.0.0", - "@vitest/coverage-v8": "^2.1.4" - }, - "peerDependencies": { - "@elizaos/core": "workspace:*" - } -} diff --git a/packages/plugin-arbitrage/src/actions/arbitrageAction.ts b/packages/plugin-arbitrage/src/actions/arbitrageAction.ts deleted file mode 100644 index 43d0d3619ad0f..0000000000000 --- a/packages/plugin-arbitrage/src/actions/arbitrageAction.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { Action, IAgentRuntime, Memory, ServiceType } from "@elizaos/core"; -import { ArbitrageService } from "../services/ArbitrageService"; - -export const executeArbitrageAction: Action = { - name: "EXECUTE_ARBITRAGE", - similes: ["TRADE_ARBITRAGE", "RUN_ARBITRAGE"], - description: "Execute arbitrage trades across markets", - - validate: async (runtime: IAgentRuntime, _message: Memory) => { - // Validate settings are present - return runtime.getSetting("arbitrage.walletPrivateKey") !== undefined; - }, - - handler: async (runtime: IAgentRuntime, _message: Memory) => { - const service = runtime.getService(ServiceType.ARBITRAGE) as ArbitrageService; - const markets = await service.evaluateMarkets(); - - if (markets.length > 0) { - await service.executeArbitrage(markets); - } - - return true; - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { text: "Find arbitrage opportunities" } - }, - { - user: "{{user2}}", - content: { - text: "Scanning for arbitrage trades", - action: "EXECUTE_ARBITRAGE" - } - } - ] - ] -}; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/config/addresses.ts b/packages/plugin-arbitrage/src/config/addresses.ts deleted file mode 100644 index ca0f0f994cbda..0000000000000 --- a/packages/plugin-arbitrage/src/config/addresses.ts +++ /dev/null @@ -1,18 +0,0 @@ -export const UNISWAP_LOOKUP_CONTRACT_ADDRESS = '0x5EF1009b9FCD4fec3094a5564047e190D72Bd511' -//mainnet ^^ goerli vv -//export const UNISWAP_LOOKUP_CONTRACT_ADDRESS = '0xF52FE911458C6a3279832b764cDF0189e49f073A' -export const WETH_ADDRESS = '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'; -export const SUSHISWAP_FACTORY_ADDRESS = '0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac'; -export const UNISWAP_FACTORY_ADDRESS = '0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f'; - -//export const CRO_FACTORY_ADDRESS = "0x9DEB29c9a4c7A88a3C0257393b7f3335338D9A9D"; -//export const ZEUS_FACTORY_ADDRESS = "0xbdda21dd8da31d5bee0c9bb886c044ebb9b8906a"; -//export const LUA_FACTORY_ADDRESS = "0x0388c1e0f210abae597b7de712b9510c6c36c857"; - -export const FACTORY_ADDRESSES = [ - //CRO_FACTORY_ADDRESS, - //ZEUS_FACTORY_ADDRESS, - //LUA_FACTORY_ADDRESS, - SUSHISWAP_FACTORY_ADDRESS, - UNISWAP_FACTORY_ADDRESS, -] diff --git a/packages/plugin-arbitrage/src/config/thresholds.ts b/packages/plugin-arbitrage/src/config/thresholds.ts deleted file mode 100644 index ed913a590f0eb..0000000000000 --- a/packages/plugin-arbitrage/src/config/thresholds.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { BigNumber } from "ethers"; - -export interface MarketThresholds { - minProfitThreshold: BigNumber; - maxTradeSize: BigNumber; - gasLimit: number; - minerRewardPercentage: number; -} - -export const DEFAULT_THRESHOLDS: MarketThresholds = { - minProfitThreshold: BigNumber.from("100000000000000"), // 0.0001 ETH - maxTradeSize: BigNumber.from("1000000000000000000"), // 1 ETH - gasLimit: 500000, - minerRewardPercentage: 90 -}; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/core/Arbitrage.ts b/packages/plugin-arbitrage/src/core/Arbitrage.ts deleted file mode 100644 index 94e205e8bc049..0000000000000 --- a/packages/plugin-arbitrage/src/core/Arbitrage.ts +++ /dev/null @@ -1,553 +0,0 @@ -import { BigNumber } from "@ethersproject/bignumber"; -import { Contract } from "@ethersproject/contracts"; -import { Provider, TransactionResponse } from "@ethersproject/providers"; -import { Wallet } from "@ethersproject/wallet"; -import { FlashbotsBundleProvider } from "@flashbots/ethers-provider-bundle"; -import { DEFAULT_THRESHOLDS, MarketThresholds } from '../config/thresholds'; -import { WETH_ADDRESS } from "./addresses"; -import { EthMarket } from "./EthMarket"; -import { CrossedMarketDetails, MarketsByToken, MarketType } from "./types"; -import { ETHER } from "./utils"; -import { elizaLogger } from "@elizaos/core"; - -export interface BundleEntry { - to: string; - gas: number; - gas_price: string; - value: number; - input: string; - from: string; - signedTransaction: string; - signer: string; -} - -export class Arbitrage { - private bundleEntries: { bundle: BundleEntry[], blockNumber: number }[] = []; - private thresholds: MarketThresholds = DEFAULT_THRESHOLDS; - private MAX_RETRIES = 3; - private RETRY_DELAY = 1000; // 1 second - - constructor( - private wallet: Wallet, - private flashbotsProvider: FlashbotsBundleProvider, - private bundleExecutorContract: Contract - ) {} - - async evaluateMarkets(marketsByToken: MarketsByToken): Promise { - elizaLogger.log("Starting market evaluation..."); - const opportunities: CrossedMarketDetails[] = []; - - for (const [tokenAddress, markets] of Object.entries(marketsByToken)) { - // Filter out markets with insufficient liquidity - const validMarkets = await this.filterValidMarkets(markets, tokenAddress); - - // Compare each market pair for arbitrage opportunities - for (let i = 0; i < validMarkets.length; i++) { - for (let j = i + 1; j < validMarkets.length; j++) { - const opportunity = await this.checkArbitrageOpportunity( - validMarkets[i], - validMarkets[j], - tokenAddress - ); - if (opportunity) { - opportunities.push(opportunity); - } - } - } - } - - // Sort opportunities by profit - return opportunities.sort((a, b) => b.profit.sub(a.profit).toNumber()); - } - - private async filterValidMarkets(markets: EthMarket[], tokenAddress: string): Promise { - const validMarkets: EthMarket[] = []; - for (const market of markets) { - try { - const reserves = await market.getReserves(tokenAddress); - if (reserves.gt(this.thresholds.minProfitThreshold)) { - validMarkets.push(market); - } - } catch (error) { - console.error(`Error checking market ${market.marketAddress}:`, error); - } - } - return validMarkets; - } - - private async checkArbitrageOpportunity( - market1: EthMarket, - market2: EthMarket, - tokenAddress: string - ): Promise { - try { - // Get prices from both markets - const price1 = await market1.getTokensOut(WETH_ADDRESS, tokenAddress, ETHER); - const price2 = await market2.getTokensOut(WETH_ADDRESS, tokenAddress, ETHER); - - // Calculate potential profit - const [buyMarket, sellMarket] = price1.gt(price2) - ? [market2, market1] - : [market1, market2]; - - const profit = price1.gt(price2) - ? price1.sub(price2) - : price2.sub(price1); - - if (profit.gt(this.thresholds.minProfitThreshold)) { - // Calculate optimal trade volume - const volume = await this.calculateOptimalVolume(buyMarket, sellMarket, tokenAddress, profit); - - return { - marketPairs: [{ - buyFromMarket: buyMarket, - sellToMarket: sellMarket - }], - profit, - volume, - tokenAddress, - buyFromMarket: buyMarket, - sellToMarket: sellMarket - }; - } - } catch (error) { - console.error("Error checking arbitrage opportunity:", error); - } - return null; - } - - async takeCrossedMarkets( - markets: CrossedMarketDetails[], - currentBlock: number, - maxAttempts: number - ): Promise { - for (const market of markets) { - for (let attempt = 1; attempt <= maxAttempts; attempt++) { - try { - const transaction = await this.executeArbitrageTrade(market, currentBlock); - if (transaction) { - elizaLogger.log(`Successful arbitrage execution: ${transaction.hash}`); - // Wait for confirmation - await transaction.wait(1); - break; - } - } catch (error) { - console.error(`Attempt ${attempt} failed:`, error); - if (attempt === maxAttempts) { - console.error("Max attempts reached for market", market); - } else { - await new Promise(r => setTimeout(r, this.RETRY_DELAY)); - } - } - } - } - } - - private async executeArbitrageTrade( - market: CrossedMarketDetails, - blockNumber: number - ): Promise { - // Prepare the trade calls - const buyCalls = await market.buyFromMarket.sellTokensToNextMarket( - WETH_ADDRESS, - market.volume, - market.sellToMarket - ); - - // Calculate intermediate amounts - const intermediateAmount = await market.buyFromMarket.getTokensOut( - WETH_ADDRESS, - market.tokenAddress, - market.volume - ); - - // Prepare sell call - const sellCallData = await market.sellToMarket.sellTokens( - market.tokenAddress, - intermediateAmount, - this.bundleExecutorContract.address - ); - - // Combine all calls - const targets = [...buyCalls.targets, market.sellToMarket.marketAddress]; - const payloads = [...buyCalls.data, sellCallData]; - - // Calculate miner reward (90% of profit) - const minerReward = market.profit.mul(90).div(100); - - // Create and simulate bundle - const bundle = await this.createBundle( - market.volume, - minerReward, - targets, - payloads, - blockNumber - ); - - // Execute if simulation successful - return this.executeBundleWithRetry(bundle, blockNumber); - } - - private async createBundle( - volume: BigNumber, - minerReward: BigNumber, - targets: string[], - payloads: string[], - blockNumber: number - ): Promise { - // Estimate gas - const gasEstimate = await this.estimateGasWithBuffer( - volume, - minerReward, - targets, - payloads - ); - - // Get optimal gas price - const gasPrice = await this.getOptimalGasPrice(blockNumber); - - // Create transaction - const transaction = await this.bundleExecutorContract.populateTransaction.uniswapWeth( - volume, - minerReward, - targets, - payloads, - { gasLimit: gasEstimate, gasPrice } - ); - - // Sign transaction - const signedTx = await this.wallet.signTransaction(transaction); - - // Create bundle entry - const bundleEntry = await this.createBundleEntry(signedTx); - - return [bundleEntry]; - } - - private async estimateGasWithBuffer( - volume: BigNumber, - minerReward: BigNumber, - targets: string[], - payloads: string[] - ): Promise { - const estimate = await this.bundleExecutorContract.estimateGas.uniswapWeth( - volume, - minerReward, - targets, - payloads - ); - return estimate.mul(120).div(100); // Add 20% buffer - } - - private async getOptimalGasPrice(blockNumber: number): Promise { - const { currentGasPrice, avgGasPrice } = await getGasPriceInfo(this.wallet.provider as Provider); - const basePrice = currentGasPrice.gt(avgGasPrice) ? currentGasPrice : avgGasPrice; - return basePrice.mul(110).div(100); // Add 10% to be competitive - } - - private async executeBundleWithRetry( - bundle: BundleEntry[], - blockNumber: number - ): Promise { - for (let i = 0; i < this.MAX_RETRIES; i++) { - try { - // Simulate first - await this.simulateBundle(bundle, blockNumber); - - // If simulation successful, submit - const response = await this.flashbotsProvider.sendBundle( - bundle.map(entry => ({ - signedTransaction: entry.signedTransaction, - signer: this.wallet, - transaction: { - to: entry.to, - gasLimit: entry.gas, - gasPrice: entry.gas_price, - value: entry.value, - data: entry.input - } - })), - blockNumber + 1 - ); - - if ('error' in response) { - throw new Error(response.error.message); - } - - return response as unknown as TransactionResponse; - } catch (error) { - console.error(`Bundle execution attempt ${i + 1} failed:`, error); - if (i === this.MAX_RETRIES - 1) throw error; - await new Promise(r => setTimeout(r, this.RETRY_DELAY)); - } - } - return null; - } - - private async createBundleEntry(signedTx: string): Promise { - const tx = await this.wallet.provider.getTransaction(signedTx); - if (!tx?.to || !tx?.gasPrice || !tx?.value) { - throw new Error("Invalid transaction"); - } - - return { - to: tx.to, - gas: tx.gasLimit.toNumber(), - gas_price: tx.gasPrice.toString(), - value: tx.value.toNumber(), - input: tx.data, - from: this.wallet.address, - signedTransaction: signedTx, - signer: this.wallet.address - }; - } - - private async simulateBundle(bundle: BundleEntry[], blockNumber: number): Promise { - const stringBundle = bundle.map(entry => entry.signedTransaction); - const simulation = await this.flashbotsProvider.simulate(stringBundle, blockNumber); - - if ('error' in simulation) { - throw new Error(`Simulation failed: ${simulation.error.message}`); - } - - // Verify profitability - const { bundleGasPrice, coinbaseDiff, totalGasUsed } = simulation; - const cost = bundleGasPrice.mul(totalGasUsed); - const profit = coinbaseDiff.sub(cost); - - if (profit.lte(this.thresholds.minProfitThreshold)) { - throw new Error("Bundle not profitable enough"); - } - } - - async submitBundleWithAdjustedGasPrice(bundle: BundleEntry[], blockNumber: number, blocksApi: any): Promise { - elizaLogger.log(`Submitting bundle with adjusted gas price for block ${blockNumber}`); - - try { - // Get current gas prices - const { currentGasPrice, avgGasPrice } = await getGasPriceInfo(this.wallet.provider as Provider); - - // Monitor competing bundles - const competingBundlesGasPrices = await monitorCompetingBundlesGasPrices(blocksApi); - let competingBundleGasPrice = BigNumber.from(0); - - // Find highest competing gas price - for (const price of competingBundlesGasPrices) { - const currentPrice = BigNumber.from(price); - if (currentPrice.gt(competingBundleGasPrice)) { - competingBundleGasPrice = currentPrice; - } - } - - // Calculate adjusted gas price - const adjustedGasPrice = await this.adjustGasPriceForTransaction( - currentGasPrice, - avgGasPrice, - competingBundleGasPrice - ); - - // Validate adjusted gas price - if (adjustedGasPrice.lte(currentGasPrice)) { - throw new Error("Adjusted gas price is not competitive"); - } - - // Validate bundle gas - const isValidBundleGas = await checkBundleGas(adjustedGasPrice); - if (!isValidBundleGas) { - throw new Error("Invalid bundle gas"); - } - - // Set submission window - const currentTimestamp = Math.floor(Date.now() / 1000); - const maxTimestamp = currentTimestamp + 60; // 1 minute window - - // Submit bundle - const targetBlockNumber = blockNumber + 1; - const bundleSubmission = await this.flashbotsProvider.sendBundle( - bundle.map(entry => ({ - signedTransaction: entry.signedTransaction, - signer: this.wallet, - transaction: { - to: entry.to, - gasLimit: entry.gas, - gasPrice: entry.gas_price, - value: entry.value, - data: entry.input - } - })), - targetBlockNumber, - { - minTimestamp: currentTimestamp, - maxTimestamp: maxTimestamp - } - ); - - // Check submission result - if ('error' in bundleSubmission) { - throw new Error(`Bundle submission failed: ${bundleSubmission.error.message}`); - } - - elizaLogger.log("Bundle submitted successfully:", { - blockNumber: targetBlockNumber, - adjustedGasPrice: adjustedGasPrice.toString(), - bundleHash: bundleSubmission.bundleHash - }); - - } catch (error) { - console.error("Failed to submit bundle with adjusted gas price:", error); - throw error; - } - } - - private async adjustGasPriceForTransaction( - currentGasPrice: BigNumber, - avgGasPrice: BigNumber, - competingBundleGasPrice: BigNumber - ): Promise { - elizaLogger.log("Calculating adjusted gas price", { - current: currentGasPrice.toString(), - average: avgGasPrice.toString(), - competing: competingBundleGasPrice.toString() - }); - - // Find highest gas price - let adjustedGasPrice = currentGasPrice; - if (avgGasPrice.gt(adjustedGasPrice)) { - adjustedGasPrice = avgGasPrice; - } - if (competingBundleGasPrice.gt(adjustedGasPrice)) { - adjustedGasPrice = competingBundleGasPrice; - } - - // Add premium to ensure priority (10% increase) - const premium = adjustedGasPrice.mul(10).div(100); - adjustedGasPrice = adjustedGasPrice.add(premium); - - elizaLogger.log("Adjusted gas price:", adjustedGasPrice.toString()); - return adjustedGasPrice; - } - - private async calculateOptimalVolume( - buyFromMarket: MarketType, - sellToMarket: MarketType, - tokenAddress: string, - profit: BigNumber - ): Promise { - elizaLogger.log("Entering calculateOptimalVolume"); - - // Determine the available liquidity in both markets - const availableLiquidityBuy = await buyFromMarket.getReserves(tokenAddress); - const availableLiquiditySell = await sellToMarket.getReserves(tokenAddress); - - // Set a maximum trade size limit to manage risk - const maxTradeSize = BigNumber.from(100000); // Adjust as needed - - // Calculate price impacts and trading fees - const priceImpactBuy = await buyFromMarket.getPriceImpact(tokenAddress, maxTradeSize); - const priceImpactSell = await sellToMarket.getPriceImpact(tokenAddress, maxTradeSize); - - const tradingFeeBuy = await buyFromMarket.getTradingFee(tokenAddress); - const tradingFeeSell = await sellToMarket.getTradingFee(tokenAddress); - - // Binary Search Initialization - let left = BigNumber.from(1); - let right = maxTradeSize; - let optimalVolume = BigNumber.from(0); - let maxExpectedProfit = BigNumber.from(0); - - while (left.lt(right)) { - const mid = left.add(right).div(2); - - // Calculate expected profit at mid - const expectedProfit = profit - .mul(mid) - .sub(priceImpactBuy.mul(mid)) - .sub(priceImpactSell.mul(mid)) - .sub(tradingFeeBuy.mul(mid)) - .sub(tradingFeeSell.mul(mid)); - - if (expectedProfit.gt(maxExpectedProfit) && expectedProfit.gte(this.thresholds.minProfitThreshold)) { - maxExpectedProfit = expectedProfit; - optimalVolume = mid; - left = mid.add(1); - } else { - right = mid.sub(1); - } - } - - // Ensure that the optimal volume does not exceed available liquidity - optimalVolume = BigNumber.from(Math.min( - optimalVolume.toNumber(), - availableLiquidityBuy.toNumber(), - availableLiquiditySell.toNumber() - )); - - elizaLogger.log(`calculateOptimalVolume: optimalVolume = ${optimalVolume}`); - return optimalVolume; - } -} - -// Helper functions -async function checkBundleGas(bundleGas: BigNumber): Promise { - const isValid = bundleGas.gte(42000); - elizaLogger.log(`checkBundleGas: bundleGas = ${bundleGas}, isValid = ${isValid}`); - return isValid; -} - -export async function monitorCompetingBundlesGasPrices(blocksApi: { getRecentBlocks: () => any; }): Promise> { - elizaLogger.log("Entering monitorCompetingBundlesGasPrices"); - const recentBlocks = await blocksApi.getRecentBlocks(); - const competingBundlesGasPrices = recentBlocks.map((block: { bundleGasPrice: any; }) => block.bundleGasPrice); - elizaLogger.log(`monitorCompetingBundlesGasPrices: competingBundlesGasPrices = ${competingBundlesGasPrices}`); - return competingBundlesGasPrices; -} - -export async function getGasPriceInfo(provider: Provider): Promise<{ - currentGasPrice: BigNumber, - avgGasPrice: BigNumber -}> { - const feeData = await provider.getFeeData(); - const currentGasPrice = feeData.gasPrice || BigNumber.from(0); - - // Get average from last few blocks - const block = await provider.getBlock("latest"); - const prices: BigNumber[] = []; - for (let i = 0; i < 5; i++) { - const historicalBlock = await provider.getBlock(block.number - i); - if (historicalBlock.baseFeePerGas) { - prices.push(historicalBlock.baseFeePerGas); - } - } - - const avgGasPrice = prices.length > 0 - ? prices.reduce((a, b) => a.add(b)).div(prices.length) - : currentGasPrice; - - return { currentGasPrice, avgGasPrice }; -} - -export async function calculateOptimalVolume( - buyFromMarket: MarketType, - sellToMarket: MarketType, - tokenAddress: string, - profit: BigNumber -): Promise { - const buyLiquidity = await buyFromMarket.getReserves(tokenAddress); - const sellLiquidity = await sellToMarket.getReserves(tokenAddress); - - // Start with 1% of the smaller liquidity pool - let optimalVolume = buyLiquidity.lt(sellLiquidity) - ? buyLiquidity.div(100) - : sellLiquidity.div(100); - - // Adjust based on price impact - const buyImpact = await buyFromMarket.getPriceImpact(tokenAddress, optimalVolume); - const sellImpact = await sellToMarket.getPriceImpact(tokenAddress, optimalVolume); - - // If price impact is too high, reduce volume - if (buyImpact.add(sellImpact).gt(BigNumber.from(300))) { // 3% total impact - optimalVolume = optimalVolume.mul(80).div(100); // Reduce by 20% - } - - return optimalVolume; -} \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/core/EthMarket.ts b/packages/plugin-arbitrage/src/core/EthMarket.ts deleted file mode 100644 index 09869d6f1bdb6..0000000000000 --- a/packages/plugin-arbitrage/src/core/EthMarket.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { BigNumber } from "@ethersproject/bignumber"; -import { MarketType } from "../type"; - - -export interface CallDetails { - target: string; - data: string; - value?: BigNumber; - } - - export interface TokenBalances { - [tokenAddress: string]: BigNumber - } - - export interface MultipleCallData { - targets: Array - data: Array - } - - -export abstract class EthMarket implements MarketType { - constructor( - public marketAddress: string, - public tokenAddress: string, - public tokens: string[], - public protocol: any - ) {} - - async getReserves(_tokenAddress: string): Promise { - // Implementation - return BigNumber.from(0); - } - - async getTokensOut(_tokenIn: string, _tokenOut: string, _amountIn: BigNumber): Promise { - // Implementation - return BigNumber.from(0); - } - - async getPriceImpact(_tokenAddress: string, _tradeSize: BigNumber): Promise { - // Implementation - return BigNumber.from(0); - } - - async getTradingFee(_tokenAddress: string): Promise { - // Implementation - return BigNumber.from(0); - } - - async getBalance(_tokenAddress: string): Promise { - // Implementation - return BigNumber.from(0); - } - - async sellTokens(_tokenAddress: string, _volume: BigNumber, _recipient: string): Promise { - // Implementation - return ""; - } - - async sellTokensToNextMarket(_tokenAddress: string, _volume: BigNumber, _nextMarket: MarketType): Promise { - // Implementation - return { targets: [], data: [] }; - } - - abstract receiveDirectly(tokenAddress: string): boolean; -} - -export interface BuyCalls { - targets: string[]; - data: string[]; -} \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/core/UniswapV2EthPair.ts b/packages/plugin-arbitrage/src/core/UniswapV2EthPair.ts deleted file mode 100644 index 8ee99daf441ee..0000000000000 --- a/packages/plugin-arbitrage/src/core/UniswapV2EthPair.ts +++ /dev/null @@ -1,526 +0,0 @@ -import * as _ from "lodash"; -import { BigNumber } from "@ethersproject/bignumber"; -import { Contract } from "@ethersproject/contracts"; -import { JsonRpcProvider, StaticJsonRpcProvider } from "@ethersproject/providers"; -import { Interface } from "@ethersproject/abi"; -import { formatEther, parseEther } from "@ethersproject/units"; -import { isAddress } from "@ethersproject/address"; -import { ethers } from "ethers"; -import { UNISWAP_PAIR_ABI, UNISWAP_QUERY_ABI, UNISWAP_FACTORY_ABI, WETH_ABI} from "././abi"; -import { FACTORY_ADDRESSES, UNISWAP_LOOKUP_CONTRACT_ADDRESS } from "./addresses"; -import { CallDetails, MultipleCallData, TokenBalances } from "./EthMarket"; -import { ETHER } from "./utils"; -import { MarketType } from '../type'; -import { EthMarket, CrossedMarketDetails, MarketsByToken, BuyCalls } from "./types"; -require('dotenv').config(); -import pLimit from 'p-limit'; - -const DEFAULT_WETH_ADDRESS = '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'; -const ETHEREUM_RPC_URL = process.env.ETHEREUM_RPC_URL; -//const factoryAddress = UNISWAP_FACTORY_ADDRESS; - -const WETH_ADDRESS = process.env.WETH_ADDRESS || DEFAULT_WETH_ADDRESS; - -// batch count limit helpful for testing, loading entire set of uniswap markets takes a long time to load -const BATCH_COUNT_LIMIT = 100; -const UNISWAP_BATCH_SIZE = 1000; -const provider = new StaticJsonRpcProvider(ETHEREUM_RPC_URL); - -// Not necessary, slightly speeds up loading initialization when we know tokens are bad -// Estimate gas will ensure we aren't submitting bad bundles, but bad tokens waste time -const blacklistTokens = [ - '0xD75EA151a61d06868E31F8988D28DFE5E9df57B4', - //'0x06AF07097C9Eeb7fD685c692751D5C66dB49c215' -] - -export interface ImpactAndFeeFuncs { - getPriceImpact: (tokenAddress: string, tradeSize: BigNumber, reserve: BigNumber) => Promise; - getTradingFee: (tokenAddress: string) => Promise; -} - -export interface GroupedMarkets { - marketsByToken: MarketsByToken; - allMarketPairs: Array; - getPriceImpact(tokenAddress: string, tradeSize: BigNumber): Promise; - getTradingFee(tokenAddress: string): Promise; -} - -export class UniswapV2EthPair implements EthMarket, MarketType { - static filteredPairs: any; - private static limit = pLimit(75); // Limit concurrent operations to 100 - private static BATCH_SIZE = 500; // Smaller batch size for better management - tokens: any; - _tokens: string[]; // Add this line - tokenAddress: string; // Add this line - protocol: string; - provider: StaticJsonRpcProvider; - static buyFromMarket(_buyFromMarket: any, _sellToMarket: EthMarket, _tokenAddress: string, _profit: number) { - throw new Error("Method not implemented."); - } - static impactAndFeeFuncs(provider: StaticJsonRpcProvider, FACTORY_ADDRESSES: string[], impactAndFeeFuncs: any) { - throw new Error("Method not implemented."); - } - static updateReservesFromResults(pairs: Array, results: Array): void { - for (let i = 0; i < pairs.length; i++) { - const pair = pairs[i]; - const result = results[i]; - // Assuming result is an array of BigNumber representing reserves - pair.setReservesViaOrderedBalances(result); - } - } - reserve: BigNumber; - async getTradingFee(): Promise { - // Uniswap V2 has a fixed trading fee of 0.3% (30 basis points) - const tradingFee: BigNumber = BigNumber.from(30).div(10000); - return tradingFee; - } - static uniswapInterface = new Contract(WETH_ADDRESS, UNISWAP_PAIR_ABI); - private _tokenBalances: TokenBalances; - - constructor(marketAddress: string, tokens: Array, protocol: string, tokenAddress: string, provider: StaticJsonRpcProvider) { - this.marketAddress = marketAddress; - this._tokens = tokens; - this.protocol = protocol; - this.tokenAddress = tokenAddress; - this.reserve = BigNumber.from(0); // Initialize reserve in the constructor - const initialBalances = tokens.map(() => BigNumber.from(0)); - this._tokenBalances = _.zipObject(tokens, initialBalances); - this.provider = provider; -} - marketAddress: string; - - private static async exponentialBackoff(attempt: number): Promise { - const delay = Math.min(Math.pow(2, attempt) * 1000, 10000); // Cap at 10 seconds - await new Promise(resolve => setTimeout(resolve, delay)); - } -async getPriceImpact(tokenAddress: string, tradeSize: BigNumber): Promise { - const reserve = await this.getReserves(tokenAddress); - const impact = tradeSize.mul(BigNumber.from(10000)).div(reserve.add(tradeSize)); - return impact; // Returns price impact as a basis point value (1/100 of a percent) -} -async getReserves(tokenAddress: string): Promise { - const pairContract = new Contract(this.marketAddress, UNISWAP_PAIR_ABI, provider); - const [reserve0, reserve1] = await pairContract.getReserves(); - // Normalize addresses to lowercase for comparison - const normalizedTokenAddress = tokenAddress.toLowerCase(); - const normalizedTokens = this._tokens.map(token => token.toLowerCase()); - return normalizedTokenAddress === normalizedTokens[0] ? reserve0 : reserve1; -} - receiveDirectly(tokenAddress: string): boolean { - return tokenAddress in this._tokenBalances; - } - - async prepareReceive(tokenAddress: string, amountIn: BigNumber): Promise> { - if (this._tokenBalances[tokenAddress] === undefined) { - throw new Error(`Market does not operate on token ${tokenAddress}`) - } - if (! amountIn.gt(0)) { - throw new Error(`Invalid amount: ${amountIn.toString()}`) - } - // No preparation necessary - return [] - } - // Example: Advanced error handling and potential gas optimization placeholder - - static async fetchWETHBalance( - provider: StaticJsonRpcProvider, - marketAddress: string, - WETH_ADDRESS: string - ): Promise { - for (let attempt = 0; attempt < 3; attempt++) { - try { - const wethContract = new Contract(WETH_ADDRESS, WETH_ABI, provider); - const balance = await wethContract.balanceOf(marketAddress); - return BigNumber.from(balance); - } catch (error: any) { - if (attempt === 2) { - console.error( - `Failed to fetch WETH balance for address ${marketAddress}`, - error.message - ); - return BigNumber.from(0); - } - await this.exponentialBackoff(attempt); - } - } - return BigNumber.from(0); // Typescript requires a return here even though it won't be reached - } - - static ImpactAndFeeFuncs: ImpactAndFeeFuncs = { - getPriceImpact: async (tokenAddress: string, tradeSize: BigNumber, reserve: BigNumber) => { - if (!reserve || reserve.isZero()) { - throw new Error("Reserve is zero"); - } - const impact = tradeSize.mul(BigNumber.from(10000)).div(reserve.add(tradeSize)); - return impact; // Returns price impact as a basis point value (1/100 of a percent) - }, - getTradingFee: async (tokenAddress: string): Promise => { - // compute trading fee here - const tradingFee: BigNumber = BigNumber.from(30).div(10000); - return tradingFee; // don't convert BigNumber to number, keep it as BigNumber - }, - }; - static async getUniswapMarkets(provider: StaticJsonRpcProvider, factoryAddress: string): Promise> { - // Setup the contract to query Uniswap market pairs - const uniswapQuery = new Contract(UNISWAP_LOOKUP_CONTRACT_ADDRESS, UNISWAP_QUERY_ABI, provider); - - // Initialize an array to hold the market pairs - const marketPairs: UniswapV2EthPair[] = []; - - // Calculate the total number of batches to process based on the batch limit and size - const allPairsLength = await new Contract(factoryAddress, UNISWAP_FACTORY_ABI, provider).allPairsLength(); - const totalBatches = Math.ceil(allPairsLength.toNumber() / UNISWAP_BATCH_SIZE); - - // Iterate over all pairs in batches, with consideration for the batch count limit - for (let batch = 0; batch < Math.min(totalBatches, BATCH_COUNT_LIMIT); batch++) { - const startIndex = batch * UNISWAP_BATCH_SIZE; - const endIndex = Math.min(startIndex + UNISWAP_BATCH_SIZE, allPairsLength.toNumber()); - - const batchPairs = await uniswapQuery.functions.getPairsByIndexRange(factoryAddress, startIndex, endIndex); - // Validate the response format - if (Array.isArray(batchPairs) && batchPairs.length > 0 && Array.isArray(batchPairs[0])) { - batchPairs[0].forEach(pairArray => { // Adjusted to access the first element of batchPairs, which is the actual array of pairs - if(Array.isArray(pairArray) && pairArray.length === 3) { - const [token0, token1, pairAddress] = pairArray; - - //console.log(`Processing pair: Token0: ${token0}, Token1: ${token1}, PairAddress: ${pairAddress}`); - - // Validate each address individually - if (!isAddress(token0) || !isAddress(token1) || !isAddress(pairAddress)) { - console.error(`Invalid address detected. Token0: ${token0}, Token1: ${token1}, PairAddress: ${pairAddress}`); - return; - } - // Exclude pairs involving blacklisted tokens - if (!blacklistTokens.includes(token0) && !blacklistTokens.includes(token1)) { - // Process valid pairs here - const marketPair = new UniswapV2EthPair(pairAddress, [token0, token1], 'UniswapV2', token0, provider); // Example processing - marketPairs.push(marketPair); - } else { - //console.log(`Skipping blacklisted pair. Token0: ${token0}, Token1: ${token1}`); - } - } else { - // Handle unexpected format - console.error('Unexpected pair data format:', pairArray); - } - }); - } else { - throw new Error("Expected an array of pairs data"); - } - - // Break the loop if the last batch was smaller than the UNISWAP_BATCH_SIZE, indicating we've processed all available pairs - if (batchPairs[0].length < UNISWAP_BATCH_SIZE) break; - } - - // Return the array containing all the market pairs that were processed - return marketPairs; -} - -static async getUniswapMarketsByToken( - provider: StaticJsonRpcProvider, - factoryAddresses: string[], - impactAndFeeFuncs: any, - progressCallback?: (progress: number) => void -): Promise<{ - marketsByToken: { [token: string]: UniswapV2EthPair[] }; - allMarketPairs: UniswapV2EthPair[]; - getPriceImpact: (tokenAddress: string, tradeSize: BigNumber) => Promise; - getTradingFee: (tokenAddress: string) => Promise; -}> { - try { - // Fetch all pairs from factory addresses - const allPairs = await Promise.all( - factoryAddresses.map(factoryAddress => UniswapV2EthPair.getUniswapMarkets(provider, factoryAddress)) - ); - const allPairsFlat: UniswapV2EthPair[] = _.flatten(allPairs); - - // Update reserves for all pairs - await UniswapV2EthPair.updateReserves(provider, allPairsFlat, WETH_ADDRESS); - - // Validate and filter pairs based on WETH balance - const allPairsWithBalance = await Promise.all( - allPairsFlat.map(async (pair: UniswapV2EthPair) => { - try { - // Now we're calling getBalance on the instance (pair), not the class - const balance = await pair.getBalance(WETH_ADDRESS); - return balance.gt(ETHER) ? pair : null; - } catch (error: any) { - return null; - } - }) - ); - - - // Only retain valid pairs - const filteredPairs: UniswapV2EthPair[] = allPairsWithBalance.filter(pair => pair !== null) as UniswapV2EthPair[]; - - // Grouping markets by token - const marketsByToken = _.groupBy(filteredPairs, pair => - pair._tokens[0] === WETH_ADDRESS ? pair._tokens[1] : pair._tokens[0] - ) as { [token: string]: UniswapV2EthPair[] }; - - // Logging market information - console.log(`Grouped markets by token:`, marketsByToken); - console.log(`Filtered pairs count:`, filteredPairs.length); - - // Add progress reporting - let processedPairs = 0; - const totalPairs = filteredPairs.length; - - // During processing - processedPairs++; - if (progressCallback) { - progressCallback(processedPairs / totalPairs); - } - - // Return structured market data along with impact and fee calculation methods - return { - marketsByToken, - allMarketPairs: filteredPairs, - getPriceImpact: async (tokenAddress: string, tradeSize: BigNumber) => { - const pair = filteredPairs.find(pair => pair._tokens.includes(tokenAddress)); - if (!pair) { - throw new Error(`No pair found for token ${tokenAddress}`); - } - const reserve = await pair.getReserves(tokenAddress); - return impactAndFeeFuncs.getPriceImpact(tokenAddress, tradeSize, reserve); - }, - getTradingFee: impactAndFeeFuncs.getTradingFee, - }; - } catch (error: any) { - // Handling unexpected failures - console.error('Error details:', error.message, error.stack); - console.error('An error occurred while getting Uniswap Markets By Token:', error); - // Fallback return structure in case of failure - return { - marketsByToken: {}, - allMarketPairs: [], - getPriceImpact: async () => { throw new Error("Not implemented"); }, - getTradingFee: () => { throw new Error("Not implemented"); }, - }; - } -} -static async updateReserves(provider: StaticJsonRpcProvider, pairsInArbitrage: UniswapV2EthPair[], WETH_ADDRESS: string) { - console.log(`Updating reserves for ${pairsInArbitrage.length} markets`); - let filteredPairsInArbitrage = []; - - // Process in smaller batches - for (let i = 0; i < pairsInArbitrage.length; i += this.BATCH_SIZE) { - const batchPairs = pairsInArbitrage.slice(i, i + this.BATCH_SIZE); - - // Add delay between batches - if (i > 0) { - await new Promise(resolve => setTimeout(resolve, 100)); - } - - const promises = batchPairs.map(marketPair => - this.limit(async () => { - try { - const pairContract = new Contract( - marketPair.marketAddress, - UNISWAP_PAIR_ABI, - provider - ); - - // Retry logic now using exponentialBackoff - for (let attempt = 0; attempt < 3; attempt++) { - try { - const [reserve0, reserve1] = await pairContract.getReserves(); - const totalReserves = reserve0.add(reserve1); - const totalReservesInEth = formatEther(totalReserves); - - if (parseFloat(totalReservesInEth) < 3) { - return null; - } - - const wethBalance = await this.limit(async () => - this.fetchWETHBalance(provider, marketPair.marketAddress, WETH_ADDRESS) - ); - - if (!wethBalance.isZero()) { - await marketPair.setReservesViaOrderedBalances([wethBalance]); - return marketPair; - } - return null; - } catch (error) { - if (attempt === 2) throw error; - await this.exponentialBackoff(attempt); // Using the helper method here - } - } - } catch (error) { - console.error( - `Failed to update reserves for pair ${marketPair.marketAddress}:`, - error - ); - return null; - } - }) - ); - - try { - const results = await Promise.all(promises); - const validResults = results.filter(pair => pair !== null); - filteredPairsInArbitrage.push(...validResults); - } catch (error) { - console.error('Batch processing error:', error); - } - } - - console.log(`Filtered pairs for arbitrage calculation: ${filteredPairsInArbitrage.length}`); - return filteredPairsInArbitrage; -} -// In UniswapV2EthPair getBalance method: - -async getBalance(tokenAddress: string): Promise { - tokenAddress = tokenAddress.toLowerCase(); - - if (tokenAddress === WETH_ADDRESS.toLowerCase()) { - const wethBalance = await UniswapV2EthPair.fetchWETHBalance(this.provider, this.marketAddress, WETH_ADDRESS); - return wethBalance; - } - - const balance = this._tokenBalances[tokenAddress]; - if (balance === undefined) { - console.warn(`Invalid or unrecognized token address: ${tokenAddress}`); - return BigNumber.from(0); - } - return balance as BigNumber; // Add type assertion if needed -} - async setReservesViaOrderedBalances(balances: Array): Promise { - await this.setReservesViaMatchingArray(this._tokens, balances) // Change this line - } - // Optimizing setReservesViaMatchingArray for clearer balance updating: - - async setReservesViaMatchingArray(tokens: Array, balances: Array): Promise { - const tokenBalances = _.zipObject(tokens, balances); - if (!_.isEqual(this._tokenBalances, tokenBalances)) { - this._tokenBalances = tokenBalances; - } - } - - async getTokensIn(tokenIn: string, tokenOut: string, amountOut: BigNumber): Promise { - const reserveIn = this._tokenBalances[tokenIn] - const reserveOut = this._tokenBalances[tokenOut] - return this.getAmountIn(reserveIn, reserveOut, amountOut); - } - - async getTokensOut(tokenIn: string, tokenOut: string, amountIn: BigNumber): Promise { - const reserveIn = this._tokenBalances[tokenIn]; - const reserveOut = this._tokenBalances[tokenOut]; - return Promise.resolve(this.getAmountOut(reserveIn, reserveOut, amountIn)); - } - - getAmountIn(reserveIn: BigNumber, reserveOut: BigNumber, amountOut: BigNumber): BigNumber { - const numerator: BigNumber = reserveIn.mul(amountOut).mul(1000); - const denominator: BigNumber = reserveOut.sub(amountOut).mul(997); - return numerator.div(denominator).add(1); - } - - getAmountOut(reserveIn: BigNumber, reserveOut: BigNumber, amountIn: BigNumber): BigNumber { - const amountInWithFee: BigNumber = amountIn.mul(997); - const numerator = amountInWithFee.mul(reserveOut); - const denominator = reserveIn.mul(1000).add(amountInWithFee); - return numerator.div(denominator); - } - async sellTokensToNextMarket(tokenIn: string, amountIn: BigNumber, ethMarket: EthMarket): Promise { - if (ethMarket.receiveDirectly(tokenIn) === true) { - const exchangeCall = await this.sellTokens(tokenIn, amountIn, ethMarket.marketAddress); - return { - data: [exchangeCall], - targets: [this.marketAddress] - }; - } - - const exchangeCall = await this.sellTokens(tokenIn, amountIn, ethMarket.marketAddress); - return { - data: [exchangeCall], - targets: [this.marketAddress] - }; - } - - async sellTokens(tokenIn: string, amountIn: BigNumber, recipient: string): Promise { - let amount0Out = BigNumber.from(0); - let amount1Out = BigNumber.from(0); - let tokenOut: string; - if (tokenIn === this._tokens[0]) { - tokenOut = this._tokens[1]; - amount1Out = await this.getTokensOut(tokenIn, tokenOut, amountIn); - } else if (tokenIn === this._tokens[1]) { - tokenOut = this._tokens[0]; - amount0Out = await this.getTokensOut(tokenIn, tokenOut, amountIn); - } else { - throw new Error("Bad token input address"); - } - const populatedTransaction = await UniswapV2EthPair.uniswapInterface.populateTransaction.swap(amount0Out, amount1Out, recipient, []); - if (populatedTransaction === undefined || populatedTransaction.data === undefined) throw new Error("HI"); - return populatedTransaction.data; - } - - - - // Example updated method for batch-updating reserves via Multicall2 - static async updateReservesWithMulticall( - provider: StaticJsonRpcProvider, - pairs: UniswapV2EthPair[] - ): Promise { - console.log('Attempting to update reserves in one or more multicall batches'); - const MULTICALL2_ADDRESS = "0x5BA1e12693Dc8F9c48aAD8770482f4739bEeD696"; - const MULTICALL2_ABI = [ - "function aggregate(tuple(address target, bytes callData)[] calls) public returns (uint256 blockNumber, bytes[] returnData)" - ]; - - const limit = pLimit(50); // Adjust concurrency to your preference - const multicall2 = new Contract(MULTICALL2_ADDRESS, MULTICALL2_ABI, provider); - - // This chunk size helps control how many calls are sent per block of multicall - const CHUNK_SIZE = 200; - let updatedPairs: UniswapV2EthPair[] = []; - - // Break the pairs array into chunks to avoid block gas limit issues - for (let i = 0; i < pairs.length; i += CHUNK_SIZE) { - const chunk = pairs.slice(i, i + CHUNK_SIZE); - - // Build calls array for the chunk - const calls = chunk.map(pair => { - const callData = new Interface(UNISWAP_PAIR_ABI).encodeFunctionData("getReserves"); - return { - target: pair.marketAddress, - callData: callData - }; - }); - - // Execute multicall for this chunk - try { - const [, returnData] = await limit(() => multicall2.aggregate(calls)); - // Return data is an array of ABI-encoded results for each call - - chunk.forEach((pair, idx) => { - try { - const result = returnData[idx]; - if (result) { - const [reserve0, reserve1] = new Interface(UNISWAP_PAIR_ABI).decodeFunctionResult( - "getReserves", - result - ) as [BigNumber, BigNumber, number]; - - // If reserves are below a threshold, skip them - const totalReserve = reserve0.add(reserve1); - if (totalReserve.gt(parseEther("3"))) { - // For example if you have a method to set reserves or store them - // pair.setReservesViaOrderedBalances(...) - // or directly store into your pair class - } - updatedPairs.push(pair); - } - } catch (e) { - console.error(`Decoding error for pair ${pair.marketAddress}`, e); - } - }); - } catch (e) { - console.error(`Multicall chunk failed: ${e}`); - } - } - - console.log(`Finished multicall update. Updated pairs: ${updatedPairs.length}`); - return updatedPairs; - } -} diff --git a/packages/plugin-arbitrage/src/core/abi.ts b/packages/plugin-arbitrage/src/core/abi.ts deleted file mode 100644 index 7b35d39e91589..0000000000000 --- a/packages/plugin-arbitrage/src/core/abi.ts +++ /dev/null @@ -1,80 +0,0 @@ -export const UNISWAP_QUERY_ABI = [{"inputs":[{"internalType":"contract UniswapV2Factory","name":"_uniswapFactory","type":"address"},{"internalType":"uint256","name":"_start","type":"uint256"},{"internalType":"uint256","name":"_stop","type":"uint256"}],"name":"getPairsByIndexRange","outputs":[{"internalType":"address[3][]","name":"","type":"address[3][]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"contract IUniswapV2Pair[]","name":"_pairs","type":"address[]"}],"name":"getReservesByPairs","outputs":[{"internalType":"uint256[3][]","name":"","type":"uint256[3][]"}],"stateMutability":"view","type":"function"}]; -export const BUNDLE_EXECUTOR_ABI = [ - { - "inputs": [ - { - "internalType": "address", - "name": "_executor", - "type": "address" - } - ], - "stateMutability": "payable", - "type": "constructor" - }, - { - "inputs": [ - { - "internalType": "address payable", - "name": "_to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "_value", - "type": "uint256" - }, - { - "internalType": "bytes", - "name": "_data", - "type": "bytes" - } - ], - "name": "call", - "outputs": [ - { - "internalType": "bytes", - "name": "", - "type": "bytes" - } - ], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_wethAmountToFirstMarket", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_ethAmountToCoinbase", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "_targets", - "type": "address[]" - }, - { - "internalType": "bytes[]", - "name": "_payloads", - "type": "bytes[]" - } - ], - "name": "uniswapWeth", - "outputs": [], - "stateMutability": "payable", - "type": "function" - }, - { - "stateMutability": "payable", - "type": "receive" - } -]; - -export const UNISWAP_PAIR_ABI = [{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"spender","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Burn","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount0Out","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1Out","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Swap","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint112","name":"reserve0","type":"uint112"},{"indexed":false,"internalType":"uint112","name":"reserve1","type":"uint112"}],"name":"Sync","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"constant":true,"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"MINIMUM_LIQUIDITY","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"PERMIT_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"burn","outputs":[{"internalType":"uint256","name":"amount0","type":"uint256"},{"internalType":"uint256","name":"amount1","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"factory","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"getReserves","outputs":[{"internalType":"uint112","name":"_reserve0","type":"uint112"},{"internalType":"uint112","name":"_reserve1","type":"uint112"},{"internalType":"uint32","name":"_blockTimestampLast","type":"uint32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"_token0","type":"address"},{"internalType":"address","name":"_token1","type":"address"}],"name":"initialize","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"kLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"mint","outputs":[{"internalType":"uint256","name":"liquidity","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"price0CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"price1CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"skim","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"internalType":"uint256","name":"amount0Out","type":"uint256"},{"internalType":"uint256","name":"amount1Out","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"swap","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"sync","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"token0","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"token1","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"}]; - -export const WETH_ABI = [{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"deposit","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"payable":true,"stateMutability":"payable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}]; -export const UNISWAP_FACTORY_ABI = [{"inputs":[{"internalType":"address","name":"_feeToSetter","type":"address"}],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"token0","type":"address"},{"indexed":true,"internalType":"address","name":"token1","type":"address"},{"indexed":false,"internalType":"address","name":"pair","type":"address"},{"indexed":false,"internalType":"uint256","name":"","type":"uint256"}],"name":"PairCreated","type":"event"},{"constant":true,"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"allPairs","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"allPairsLength","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"}],"name":"createPair","outputs":[{"internalType":"address","name":"pair","type":"address"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"feeTo","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"feeToSetter","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"getPair","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"_feeTo","type":"address"}],"name":"setFeeTo","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"_feeToSetter","type":"address"}],"name":"setFeeToSetter","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"}]; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/core/addresses.ts b/packages/plugin-arbitrage/src/core/addresses.ts deleted file mode 100644 index 4071e7ac0ea1c..0000000000000 --- a/packages/plugin-arbitrage/src/core/addresses.ts +++ /dev/null @@ -1,11 +0,0 @@ -export const WETH_ADDRESS = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"; - -// Uniswap V2 Factory addresses for major DEXes -export const FACTORY_ADDRESSES = { - UNISWAP: "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f", - SUSHISWAP: "0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac", - SHIBASWAP: "0x115934131916C8b277DD010Ee02de363c09d037c" -}; - -// Uniswap lookup contract for price checks -export const UNISWAP_LOOKUP_CONTRACT_ADDRESS = "0x5EF1009b9FCD4fec3094a5564047e190D72Bd511"; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/core/types.ts b/packages/plugin-arbitrage/src/core/types.ts deleted file mode 100644 index 6154cc66b34d4..0000000000000 --- a/packages/plugin-arbitrage/src/core/types.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { BigNumber } from "@ethersproject/bignumber"; -import { - MarketType as BaseMarketType, - EthMarket as BaseEthMarket, - MarketPair as BaseMarketPair, - CrossedMarketDetails as BaseCrossedMarketDetails, - MarketsByToken as BaseMarketsByToken, - BuyCalls as BaseBuyCalls -} from "../type"; - -export type EthMarket = BaseEthMarket; -export type MarketPair = BaseMarketPair; -export type CrossedMarketDetails = BaseCrossedMarketDetails; -export type MarketsByToken = BaseMarketsByToken; -export type BuyCalls = BaseBuyCalls; -export type MarketType = BaseMarketType; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/core/utils.ts b/packages/plugin-arbitrage/src/core/utils.ts deleted file mode 100644 index 4b6f8b4a18807..0000000000000 --- a/packages/plugin-arbitrage/src/core/utils.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { BigNumber, Wallet } from "ethers"; - -// Constants -export const ETHER = BigNumber.from(10).pow(18); -export const DEFAULT_GAS_LIMIT = 250000; - -// Math utilities -export function bigNumberToDecimal(value: BigNumber, base = 18): number { - const divisor = BigNumber.from(10).pow(base); - return value.mul(10000).div(divisor).toNumber() / 10000; -} - -// Authentication utilities -export const getDefaultRelaySigningKey = (): string => { - console.warn( - "No FLASHBOTS_RELAY_SIGNING_KEY specified. Creating temporary key..." - ); - return Wallet.createRandom().privateKey; -}; - -// Add error handling utilities -export const handleArbitrageError = (error: Error): void => { - console.error(`Arbitrage Error: ${error.message}`); - // Add any specific error handling logic -}; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/core/websocketmanager.ts b/packages/plugin-arbitrage/src/core/websocketmanager.ts deleted file mode 100644 index e30031becc862..0000000000000 --- a/packages/plugin-arbitrage/src/core/websocketmanager.ts +++ /dev/null @@ -1,433 +0,0 @@ -import { WebSocket } from 'ws'; -import { BigNumber } from 'ethers/lib/ethers'; -import { Arbitrage } from './Arbitrage'; -import { UniswapV2EthPair } from './UniswapV2EthPair'; -import * as dotenv from "dotenv"; -dotenv.config(); - - -export interface SubscriptionConfig { - DEX_ADDRESSES: string[]; - TRANSFER_TOPIC: string; - SWAP_TOPIC: string; -} - -export class EnhancedWebSocketManager { - private arbitrage: Arbitrage; - private markets: { [token: string]: UniswapV2EthPair[] }; - ws: any; - config: SubscriptionConfig; - subscriptions: any; - pendingTxs: any; - start: any; - - constructor( - websocketUrl: string, - config: SubscriptionConfig, - arbitrage: Arbitrage, - initialMarkets: { [token: string]: UniswapV2EthPair[] } - ) { - this.ws = new WebSocket(websocketUrl); - this.config = config; - this.arbitrage = arbitrage; - this.markets = initialMarkets; - this.subscriptions = new Map(); // Add this line - this.pendingTxs = new Set(); - this.setupWebSocket(); - } - - private setupWebSocket() { - this.ws.on('open', () => { - console.log('WebSocket connection established'); - this.subscribeToAll(); - }); - - this.ws.on('message', (data: string) => { - const message = JSON.parse(data); - this.handleMessage(message); - }); - - this.ws.on('close', () => { - console.log('WebSocket connection closed'); - this.reconnect(); - }); - - this.ws.on('error', (error: any) => { - console.error('WebSocket error:', error); - }); - } - - private async subscribeToAll() { - // Subscribe to pending transactions specific to DEX addresses - await this.subscribeToPendingTransactions(); - - // Subscribe to new blocks - await this.subscribeToNewHeads(); - - // Subscribe to DEX-specific logs (transfers and swaps) - await this.subscribeToLogs(); - } - - private async subscribeToPendingTransactions() { - const subscribeMsg = { - jsonrpc: "2.0", - id: 1, - method: "eth_subscribe", - params: [ - "alchemy_pendingTransactions", - { - toAddress: this.config.DEX_ADDRESSES, - hashesOnly: false - } - ] - }; - - this.ws.send(JSON.stringify(subscribeMsg)); - } - - private async subscribeToNewHeads() { - const subscribeMsg = { - jsonrpc: "2.0", - id: 2, - method: "eth_subscribe", - params: ["newHeads"] - }; - - this.ws.send(JSON.stringify(subscribeMsg)); - } - - private async subscribeToLogs() { - const subscribeMsg = { - jsonrpc: "2.0", - id: 3, - method: "eth_subscribe", - params: [ - "logs", - { - address: this.config.DEX_ADDRESSES, - topics: [ - [this.config.TRANSFER_TOPIC, this.config.SWAP_TOPIC] - ] - } - ] - }; - - this.ws.send(JSON.stringify(subscribeMsg)); - } - - private async handleMessage(message: any) { - // Handle subscription confirmations - if (message.id) { - this.subscriptions.set(message.id.toString(), message.result); - return; - } - - // Handle subscription messages - if (message.method === "eth_subscription") { - const { subscription, result } = message.params; - - switch(subscription) { - case this.subscriptions.get("1"): // Pending Transactions - await this.handlePendingTransaction(result); - break; - - case this.subscriptions.get("2"): // New Heads - await this.handleNewBlock(result); - break; - - case this.subscriptions.get("3"): // Logs - await this.handleLog(result); - break; - } - } - } - - private async handlePendingTransaction(tx: any) { - if (this.pendingTxs.has(tx.hash)) return; - - this.pendingTxs.add(tx.hash); - - // Check if this is a DEX interaction - if (this.config.DEX_ADDRESSES.includes(tx.to?.toLowerCase())) { - await this.analyzePotentialArbitrage(tx); - } - } - - private async handleNewBlock(block: any) { - // Clear pending transactions from previous block - this.pendingTxs.clear(); - - // Trigger price updates and arbitrage checks - await this.checkArbitrageOpportunities(block.number); - } - - private async handleLog(log: any) { - const isSwap = log.topics[0] === this.config.SWAP_TOPIC; - const isTransfer = log.topics[0] === this.config.TRANSFER_TOPIC; - - if (isSwap) { - await this.handleSwapEvent(log); - } else if (isTransfer) { - await this.handleTransferEvent(log); - } - } - - private async analyzePotentialArbitrage(tx: any) { - try { - const decodedInput = this.decodeTransactionInput(tx.input, tx); - - if (this.isSwapTransaction(decodedInput)) { - // Special handling for ETH swaps - if (decodedInput.swapType === 'ETH_FOR_TOKENS') { - console.log(`Detected ETH swap: ${tx.value} ETH for ${decodedInput.tokenOut}`); - - // Calculate potential arbitrage opportunity with actual ETH value - const opportunity = await this.calculateArbitrageOpportunity( - tx.to, - decodedInput.tokenIn, // WETH address - decodedInput.tokenOut, - decodedInput.amountIn // Actual ETH value from tx - ); - - if (opportunity.profit.gt(0)) { - await this.executeArbitrage(opportunity); - } - } else { - // Calculate potential arbitrage opportunity - const opportunity = await this.calculateArbitrageOpportunity( - tx.to, - decodedInput.tokenIn, - decodedInput.tokenOut, - decodedInput.amountIn - ); - - if (opportunity.profit.gt(0)) { - await this.executeArbitrage(opportunity); - } - } - } - } catch (error) { - console.error('Error analyzing potential arbitrage:', error); - } - } - - private async checkArbitrageOpportunities(blockNumber: string) { - // Implement your cross-DEX arbitrage checking logic here - console.log(`Checking arbitrage opportunities for block ${blockNumber}`); - } - - private decodeTransactionInput(input: string, tx?: any): any { - try { - // Common DEX function signatures - const SWAP_EXACT_TOKENS = '0x38ed1739'; - const SWAP_TOKENS_EXACT = '0x8803dbee'; - const SWAP_ETH_FOR_TOKENS = '0x7ff36ab5'; // swapExactETHForTokens - - // Remove '0x' prefix - const cleanInput = input.slice(2); - const functionSignature = cleanInput.slice(0, 8); - - switch (functionSignature) { - case SWAP_ETH_FOR_TOKENS: - // Format: swapExactETHForTokens(uint256 amountOutMin, address[] path, address to, uint256 deadline) - return { - isSwap: true, - swapType: 'ETH_FOR_TOKENS', - tokenIn: '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', // WETH address - tokenOut: '0x' + cleanInput.slice(34, 74), // First token in path - amountIn: BigNumber.from(tx?.value || '0'), // ETH value from tx - amountOutMin: BigNumber.from('0x' + cleanInput.slice(10, 74)), // minimum amount of tokens to receive - deadline: BigNumber.from('0x' + cleanInput.slice(138, 202)) // transaction deadline - }; - case SWAP_EXACT_TOKENS: - case SWAP_TOKENS_EXACT: - return { - isSwap: true, - tokenIn: '0x' + cleanInput.slice(34, 74), // First token address parameter - tokenOut: '0x' + cleanInput.slice(98, 138), // Second token address parameter - amountIn: BigNumber.from('0x' + cleanInput.slice(138, 178)) // Amount parameter - }; - default: - return { isSwap: false }; - } - } catch (error) { - console.error('Error decoding transaction input:', error); - return { isSwap: false }; - } - } - - private isSwapTransaction(decodedInput: any): boolean { - return decodedInput.isSwap === true; - } - - private async calculateArbitrageOpportunity(dex: string, tokenIn: string, tokenOut: string, amountIn: BigNumber) { - try { - // Get prices from different DEXes - const prices = await Promise.all(this.config.DEX_ADDRESSES.map(async (dexAddress) => { - if (dexAddress.toLowerCase() === dex.toLowerCase()) return null; - - // Get quote from other DEX - const quote = await this.arbitrage.getOutputAmount( - dexAddress, - tokenIn, - tokenOut, - amountIn - ); - - return { - dexAddress, - outputAmount: quote - }; - })); - - // Find best arbitrage opportunity - let bestProfit = BigNumber.from(0); - let bestRoute = null; - - for (const price of prices) { - if (!price) continue; - - // Calculate potential profit - const profit = price.outputAmount.sub(amountIn); - - if (profit.gt(bestProfit)) { - bestProfit = profit; - bestRoute = { - sourceRouter: dex, - targetRouter: price.dexAddress, - tokenIn, - tokenOut, - amountIn, - expectedOutput: price.outputAmount - }; - } - } - - return { - profit: bestProfit, - route: bestRoute - }; - } catch (error) { - console.error('Error calculating arbitrage opportunity:', error); - return { profit: BigNumber.from(0), route: null }; - } - } - - private async executeArbitrage(opportunity: any) { - if (!opportunity.route) return; - - try { - // Check if profit meets minimum threshold (e.g., covers gas) - const minProfitThreshold = BigNumber.from(process.env.MIN_PROFIT_THRESHOLD || '0'); - if (opportunity.profit.lt(minProfitThreshold)) { - console.log('Profit too low to execute arbitrage'); - return; - } - - // Execute the arbitrage transaction - const tx = await this.arbitrage.executeArbitrage( - opportunity.route.sourceRouter, - opportunity.route.targetRouter, - opportunity.route.tokenIn, - opportunity.route.tokenOut, - opportunity.route.amountIn - ); - - console.log(`Arbitrage executed: ${tx.hash}`); - - // Wait for confirmation - const receipt = await tx.wait(); - console.log(`Arbitrage confirmed in block ${receipt.blockNumber}`); - } catch (error) { - console.error('Error executing arbitrage:', error); - } - } - - private async handleSwapEvent(log: any) { - try { - const dexAddress = log.address; - // Update market state for this DEX - if (!this.markets[dexAddress]) { - this.markets[dexAddress] = []; - } - // Track latest swap - const marketInfo = { - lastUpdate: Date.now(), - lastSwap: log - }; - // Update first entry or add new one - if (this.markets[dexAddress].length > 0) { - Object.assign(this.markets[dexAddress][0], marketInfo); - } else { - this.markets[dexAddress].push(marketInfo as unknown as UniswapV2EthPair); - } - - // Extract relevant data from the swap event - const topics = log.topics; - const data = log.data; - - // Update local price state - await this.arbitrage.updatePrices(dexAddress); - - // Check for arbitrage opportunities - const tokens = await this.arbitrage.getTokenPair(dexAddress); - if (!tokens) return; - - const amount = BigNumber.from('1000000000000000000'); // 1 token as base amount - const opportunity = await this.calculateArbitrageOpportunity( - dexAddress, - tokens.token0, - tokens.token1, - amount - ); - - if (opportunity.profit.gt(0)) { - await this.executeArbitrage(opportunity); - } - } catch (error) { - console.error('Error handling swap event:', error); - } - } - - private async handleTransferEvent(log: any) { - try { - // Extract transfer details - const from = '0x' + log.topics[1].slice(26); - const to = '0x' + log.topics[2].slice(26); - const amount = BigNumber.from(log.data); - - // If transfer involves a DEX, update prices - if (this.config.DEX_ADDRESSES.includes(from.toLowerCase()) || - this.config.DEX_ADDRESSES.includes(to.toLowerCase())) { - const dexAddress = this.config.DEX_ADDRESSES.find( - addr => addr.toLowerCase() === from.toLowerCase() || - addr.toLowerCase() === to.toLowerCase() - ); - - if (dexAddress) { - await this.arbitrage.updatePrices(dexAddress); - } - } - } catch (error) { - console.error('Error handling transfer event:', error); - } - } - - private reconnect() { - setTimeout(() => { - console.log('Attempting to reconnect...'); - this.ws = new WebSocket(this.ws.url); - this.setupWebSocket(); - }, 5000); // Wait 5 seconds before reconnecting - } -} - -// Example usage -const config: SubscriptionConfig = { - DEX_ADDRESSES: [ - '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', // Uniswap V2 Router - '0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F' // Sushiswap Router - ], - TRANSFER_TOPIC: '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', - SWAP_TOPIC: '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822' -}; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/index.ts b/packages/plugin-arbitrage/src/index.ts deleted file mode 100644 index df1fea1ffe7c4..0000000000000 --- a/packages/plugin-arbitrage/src/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Plugin, Action, Provider, IAgentRuntime } from "@elizaos/core"; -import { executeArbitrageAction } from "./actions/arbitrageAction"; -import { marketProvider } from "./providers/marketProvider"; -import { ArbitrageService } from "./services/ArbitrageService"; -// Create a single instance of the service - -const arbitrageService = new ArbitrageService(); - -const arbitragePlugin: Plugin = { - name: "arbitrage-plugin", - description: "Automated arbitrage trading plugin", - actions: [executeArbitrageAction], - providers: [marketProvider], - services: [arbitrageService] -}; - -export default arbitragePlugin; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/providers/marketProvider.ts b/packages/plugin-arbitrage/src/providers/marketProvider.ts deleted file mode 100644 index 3e2f977dc001a..0000000000000 --- a/packages/plugin-arbitrage/src/providers/marketProvider.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Provider, IAgentRuntime, Memory, ServiceType } from "@elizaos/core"; -import { ArbitrageService } from "../services/ArbitrageService"; -import { ArbitrageState } from "../type"; - -export const marketProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory): Promise => { - const service = runtime.getService(ServiceType.ARBITRAGE) as ArbitrageService; - const markets = await service.evaluateMarkets(); - - return { - opportunities: markets.length, - totalProfit: "0", // Calculate total profit - lastUpdate: new Date().toISOString(), - markets: {} // This will be populated by the service - }; - } -}; \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/services/ArbitrageService.ts b/packages/plugin-arbitrage/src/services/ArbitrageService.ts deleted file mode 100644 index 8377eb4fe5013..0000000000000 --- a/packages/plugin-arbitrage/src/services/ArbitrageService.ts +++ /dev/null @@ -1,165 +0,0 @@ -import { Service, ServiceType, IAgentRuntime } from "@elizaos/core"; -import { Arbitrage } from "../core/Arbitrage"; -import { WebSocket } from 'ws'; -import { CrossedMarketDetails, MarketsByToken } from "../type"; -import { WebSocketProvider } from "@ethersproject/providers"; -import { Wallet } from "@ethersproject/wallet"; -import { FlashbotsBundleProvider } from "@flashbots/ethers-provider-bundle"; -import { Contract } from "@ethersproject/contracts"; - -// Declare the ARBITRAGE service type -declare module "@elizaos/core" { - interface ServiceTypeMap { - arbitrage: Service & ArbitrageService; - } - - export enum ServiceType { - ARBITRAGE = "arbitrage" - } -} - -export class ArbitrageService extends Service { - private arbitrage: Arbitrage | null = null; - private wsConnection: WebSocket | null = null; - private marketsByToken = {}; - private currentBlock = 0; - private runtime!: IAgentRuntime; - - static get serviceType(): ServiceType { - return ServiceType.ARBITRAGE; - } - - get serviceType(): ServiceType { - return ServiceType.ARBITRAGE; - } - - // Remove unnecessary constructor - // constructor() { - // super(); - // } - - async initialize(runtime: IAgentRuntime): Promise { - this.runtime = runtime; - - // Get WebSocket URL with multiple fallback options - let wsUrl = runtime.getSetting("ARBITRAGE_ETHEREUM_WS_URL") - - let rpcUrl = runtime.getSetting("ARBITRAGE_EVM_PROVIDER_URL") - - - // Debug logging - console.log('ArbitrageService initialize - URLs:', { - wsUrl, - rpcUrl - }); - - if (!wsUrl && !rpcUrl) { - throw new Error("Missing both ARBITRAGE_ETHEREUM_WS_URL and ARBITRAGE_EVM_PROVIDER_URL envs"); - } - - // If we only have RPC URL, derive WS URL - if (!wsUrl && rpcUrl) { - wsUrl = rpcUrl.replace('https://', 'wss://'); - console.log('Using derived WebSocket URL:', wsUrl); - } - - if (!wsUrl) { - throw new Error("No WebSocket URL available after all fallbacks"); - } - - // Initialize wallet and providers - const walletKey = runtime.getSetting("ARBITRAGE_EVM_PRIVATE_KEY") - if (!walletKey) throw new Error("Missing ARBITRAGE_EVM_PRIVATE_KEY env"); - - // Initialize provider - console.log('Initializing WebSocketProvider with URL:', wsUrl); - const provider = new WebSocketProvider(wsUrl as string); - const wallet = new Wallet(walletKey, provider); - - // Initialize Flashbots provider - const flashbotsKey = runtime.getSetting("FLASHBOTS_RELAY_SIGNING_KEY") - if (!flashbotsKey) throw new Error("Missing FLASHBOTS_RELAY_SIGNING_KEY env"); - - const flashbotsProvider = await FlashbotsBundleProvider.create( - provider, - wallet, - flashbotsKey - ); - - // Initialize bundle executor contract - const bundleExecutorAddress = runtime.getSetting("BUNDLE_EXECUTOR_ADDRESS"); - if (!bundleExecutorAddress) throw new Error("Missing BUNDLE_EXECUTOR_ADDRESS env"); - - // Create Contract instance - const bundleExecutorContract = new Contract( - bundleExecutorAddress, - [ - 'function execute(bytes[] calldata calls) external payable', - 'function executeWithToken(bytes[] calldata calls, address tokenAddress, uint256 tokenAmount) external payable' - ], - wallet - ); - - // Initialize Arbitrage instance with Contract instance - this.arbitrage = new Arbitrage( - wallet, - flashbotsProvider, - bundleExecutorContract - ); - - // Setup WebSocket connection - console.log('Setting up WebSocket connection to:', wsUrl); - this.wsConnection = new WebSocket(wsUrl); - this.setupWebSocketHandlers(); - } - - private setupWebSocketHandlers(): void { - if (!this.wsConnection) return; - - this.wsConnection.on('open', () => { - console.log('WebSocket connection established'); - // Subscribe to new blocks - this.wsConnection?.send(JSON.stringify({ - jsonrpc: '2.0', - id: 1, - method: 'eth_subscribe', - params: ['newHeads'] - })); - }); - - this.wsConnection.on('message', async (data: string) => { - const message = JSON.parse(data); - if (message.params?.result?.number) { - this.currentBlock = Number.parseInt(message.params.result.number, 16); - } - }); - - this.wsConnection.on('error', (error) => { - console.error('WebSocket error:', error); - }); - - this.wsConnection.on('close', () => { - console.log('WebSocket connection closed'); - // Attempt to reconnect after a delay - setTimeout(() => this.initialize(this.runtime), 5000); - }); - } - - async evaluateMarkets(): Promise { - if (!this.arbitrage) throw new Error("ArbitrageService not initialized"); - return this.arbitrage.evaluateMarkets(this.marketsByToken); - } - - async executeArbitrage(markets: CrossedMarketDetails[]): Promise { - if (!this.arbitrage) throw new Error("ArbitrageService not initialized"); - const maxAttempts = 10; - return this.arbitrage.takeCrossedMarkets(markets, this.currentBlock, maxAttempts); - } - - async stop(): Promise { - if (this.wsConnection) { - this.wsConnection.close(); - this.wsConnection = null; - } - } -} \ No newline at end of file diff --git a/packages/plugin-arbitrage/src/type.ts b/packages/plugin-arbitrage/src/type.ts deleted file mode 100644 index 737d54972f6a9..0000000000000 --- a/packages/plugin-arbitrage/src/type.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { BigNumber } from "@ethersproject/bignumber"; -import { IAgentRuntime, Memory, Provider } from "@elizaos/core"; - -// Core Arbitrage Types -export interface CrossedMarketDetails { - marketPairs: MarketPair[]; - profit: BigNumber; - volume: BigNumber; - tokenAddress: string; - buyFromMarket: EthMarket; - sellToMarket: EthMarket; -} - -export interface MarketPair { - buyFromMarket: EthMarket; - sellToMarket: EthMarket; -} - -// Market Management Types -export type MarketsByToken = { - [tokenAddress: string]: Array; -} - -export interface MarketType { - marketAddress: string; - getReserves(tokenAddress: string): Promise; - getPriceImpact(tokenAddress: string, tradeSize: BigNumber): Promise; - getTradingFee(tokenAddress: string): Promise; -} - -// Trading Operation Types -export interface BuyCalls { - targets: string[]; - data: string[]; -} - -export interface EthMarket extends MarketType { - tokenAddress: string; - marketAddress: string; - tokens: string[]; - protocol: any; - getBalance(address: string): Promise; - sellTokensToNextMarket(WETH_ADDRESS: string, volume: BigNumber, sellToMarket: EthMarket): Promise; - getTokensOut(WETH_ADDRESS: string, tokenAddress: string, volume: BigNumber): Promise; - sellTokens(tokenAddress: string, amount: BigNumber, address: string): Promise; - receiveDirectly(tokenAddress: string): boolean; -} - -// Eliza Plugin Integration Types -export interface ArbitrageAction { - name: string; - handler: (runtime: IAgentRuntime, message: Memory) => Promise; - validate: (runtime: IAgentRuntime, message: Memory) => Promise; -} - -export interface ArbitrageProvider extends Provider { - get: (runtime: IAgentRuntime, message: Memory) => Promise; -} - -export interface ArbitrageState { - opportunities: number; - totalProfit: string; - lastUpdate: string; - markets: MarketsByToken; -} - -// WebSocket Types -export interface SubscriptionConfig { - DEX_ADDRESSES: string[]; - TRANSFER_TOPIC: string; - SWAP_TOPIC: string; -} - -export interface ArbitrageOpportunity { - profit: BigNumber; - route: { - sourceRouter: string; - targetRouter: string; - tokenIn: string; - tokenOut: string; - amountIn: BigNumber; - expectedOutput: BigNumber; - } | null; -} - -// Runtime Configuration Types -export interface ArbitrageConfig { - minProfitThreshold: BigNumber; - maxTradeSize: BigNumber; - gasLimit: number; - minerRewardPercentage: number; -} - -// Error Types -export interface ArbitrageError extends Error { - type: 'EXECUTION' | 'VALIDATION' | 'CONFIGURATION'; - details?: any; -} - -// Event Types for WebSocket -export interface SwapEvent { - address: string; - topics: string[]; - data: string; - transactionHash: string; - blockNumber: number; -} - -export interface TransferEvent { - from: string; - to: string; - value: BigNumber; - tokenAddress: string; -} - -// Add this to your existing types -export interface ExtendedAgentRuntime extends IAgentRuntime { - wallet: any; // Replace 'any' with proper wallet type - flashbotsProvider: any; // Replace 'any' with proper provider type - bundleExecutorContract: any; - marketsByToken: MarketsByToken; - currentBlock: number; -} \ No newline at end of file diff --git a/packages/plugin-arbitrage/tsconfig.json b/packages/plugin-arbitrage/tsconfig.json deleted file mode 100644 index 357a2159f21f2..0000000000000 --- a/packages/plugin-arbitrage/tsconfig.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "target": "es2020", - "module": "commonjs", - "noEmit": false, - "declaration": true, - "strict": true, - "esModuleInterop": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "allowImportingTsExtensions": false, - "lib": [ - "es2020", - "dom" - ], - "moduleResolution": "node" - }, - "include": [ - "src/**/*" - ], - "exclude": [ - "node_modules", - "dist" - ] -} \ No newline at end of file diff --git a/packages/plugin-arbitrage/tsup.config.ts b/packages/plugin-arbitrage/tsup.config.ts deleted file mode 100644 index a1149adf967b6..0000000000000 --- a/packages/plugin-arbitrage/tsup.config.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { defineConfig } from 'tsup'; - -export default defineConfig({ - entry: ['src/index.ts'], - format: ['esm'], - dts: true, - splitting: false, - sourcemap: true, - clean: true, - treeshake: true, - target: 'node18' -}); \ No newline at end of file diff --git a/packages/plugin-arbitrage/vitest.config.ts b/packages/plugin-arbitrage/vitest.config.ts deleted file mode 100644 index 45670e50fc15d..0000000000000 --- a/packages/plugin-arbitrage/vitest.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - setupFiles: ['./__tests__/setup.ts'], - include: ['__tests__/**/*.test.ts'], - coverage: { - reporter: ['text', 'json', 'html'], - include: ['src/**/*.ts'], - exclude: ['**/*.d.ts', '**/*.test.ts'] - } - }, - resolve: { - alias: { - '@elizaos/core': '/packages/core/src' - } - } -}); diff --git a/packages/plugin-arthera/README.md b/packages/plugin-arthera/README.md deleted file mode 100644 index b634635d46986..0000000000000 --- a/packages/plugin-arthera/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# `@elizaos/plugin-arthera` - -This plugin provides actions and providers for interacting with Arthera. - ---- - -## Configuration - -### Default Setup - -By default, **Arthera** is enabled. To use it, simply add your private key to the `.env` file: - -```env -ARTHERA_PRIVATE_KEY=your-private-key-here -``` - -### Custom RPC URLs - -By default, the RPC URL is inferred from the `viem/chains` config. To use a custom RPC URL for a specific chain, add the following to your `.env` file: - -```env -ETHEREUM_PROVIDER_=https://your-custom-rpc-url -``` - -**Example usage:** - -```env -ETHEREUM_PROVIDER_ARTHERA=https://rpc.arthera.net -``` - -## Provider - -The **Wallet Provider** initializes with Arthera. It: - -- Provides the **context** of the currently connected address and its balance. -- Creates **Public** and **Wallet clients** to interact with the supported chain. - ---- - -## Actions - -### Transfer - -Transfer tokens from one address to another on Arthera. Just specify the: - -- **Amount** -- **Chain** -- **Recipient Address** - -**Example usage:** - -```bash -Transfer 1 AA to 0xRecipient on arthera. -``` - ---- - -## Contribution - -The plugin contains tests. Whether you're using **TDD** or not, please make sure to run the tests before submitting a PR. - -### Running Tests - -Navigate to the `plugin-arthera` directory and run: - -```bash -pnpm test -``` diff --git a/packages/plugin-arthera/biome.json b/packages/plugin-arthera/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-arthera/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-arthera/package.json b/packages/plugin-arthera/package.json deleted file mode 100644 index 7287d2d37b3c6..0000000000000 --- a/packages/plugin-arthera/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "@elizaos/plugin-arthera", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "tsup": "8.3.5", - "viem": "2.21.58" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "devDependencies": { - "whatwg-url": "7.1.0", - "@biomejs/biome": "1.9.4" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-arthera/src/actions/transfer.ts b/packages/plugin-arthera/src/actions/transfer.ts deleted file mode 100644 index 57b86c0e3d541..0000000000000 --- a/packages/plugin-arthera/src/actions/transfer.ts +++ /dev/null @@ -1,173 +0,0 @@ -import { type ByteArray, formatEther, parseEther, type Hex } from "viem"; -import { - composeContext, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; - -import { initWalletProvider, type WalletProvider } from "../providers/wallet"; -import type { Transaction, TransferParams } from "../types"; -import { transferTemplate } from "../templates"; - -export { transferTemplate }; - -// Exported for tests -export class TransferAction { - constructor(private walletProvider: WalletProvider) {} - - async transfer(params: TransferParams): Promise { - const walletClient = this.walletProvider.getWalletClient( - params.fromChain - ); - - console.log( - `Transferring: ${params.amount} tokens from (${walletClient.account.address} to (${params.toAddress} on ${params.fromChain})` - ); - - if (!params.data) { - params.data = "0x"; - } - - try { - const hash = await walletClient.sendTransaction({ - account: walletClient.account, - to: params.toAddress, - value: parseEther(params.amount), - data: params.data as Hex, - kzg: { - blobToKzgCommitment: (_: ByteArray): ByteArray => { - throw new Error("Function not implemented."); - }, - computeBlobKzgProof: ( - _blob: ByteArray, - _commitment: ByteArray - ): ByteArray => { - throw new Error("Function not implemented."); - }, - }, - chain: undefined, - }); - - return { - hash, - from: walletClient.account.address, - to: params.toAddress, - value: parseEther(params.amount), - data: params.data as Hex, - }; - } catch (error) { - throw new Error(`Transfer failed: ${error.message}`); - } - } -} - -const buildTransferDetails = async ( - state: State, - runtime: IAgentRuntime, - wp: WalletProvider -): Promise => { - const context = composeContext({ - state, - template: transferTemplate, - }); - - const chains = Object.keys(wp.chains); - - const contextWithChains = context.replace( - "SUPPORTED_CHAINS", - chains.map((item) => `"${item}"`).join("|") - ); - - const transferDetails = (await generateObjectDeprecated({ - runtime, - context: contextWithChains, - modelClass: ModelClass.SMALL, - })) as TransferParams; - - const existingChain = wp.chains[transferDetails.fromChain]; - - if (!existingChain) { - throw new Error( - `The chain ${transferDetails.fromChain} not configured yet. Add the chain or choose one from configured: ${chains.toString()}` - ); - } - - return transferDetails; -}; - -export const transferAction = { - name: "transfer", - description: "Transfer tokens between addresses on the same chain", - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - console.log("Transfer action handler called"); - const walletProvider = initWalletProvider(runtime); - const action = new TransferAction(walletProvider); - - // Compose transfer context - const paramOptions = await buildTransferDetails( - state, - runtime, - walletProvider - ); - - try { - const transferResp = await action.transfer(paramOptions); - if (callback) { - callback({ - text: `Successfully transferred ${paramOptions.amount} tokens to ${paramOptions.toAddress}\nTransaction Hash: ${transferResp.hash}`, - content: { - success: true, - hash: transferResp.hash, - amount: formatEther(transferResp.value), - recipient: transferResp.to, - chain: paramOptions.fromChain, - }, - }); - } - return true; - } catch (error) { - console.error("Error during token transfer:", error); - if (callback) { - callback({ - text: `Error transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - template: transferTemplate, - validate: async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("ARTHERA_PRIVATE_KEY"); - return typeof privateKey === "string" && privateKey.startsWith("0x"); - }, - examples: [ - [ - { - user: "assistant", - content: { - text: "I'll help you transfer 1 AA to 0x742d35Cc6634C0532925a3b844Bc454e4438f44e", - action: "SEND_TOKENS", - }, - }, - { - user: "user", - content: { - text: "Transfer 1 AA to 0x742d35Cc6634C0532925a3b844Bc454e4438f44e", - action: "SEND_TOKENS", - }, - }, - ], - ], - similes: ["SEND_TOKENS", "TOKEN_TRANSFER", "MOVE_TOKENS"], -}; diff --git a/packages/plugin-arthera/src/index.ts b/packages/plugin-arthera/src/index.ts deleted file mode 100644 index 3fe8d585945ee..0000000000000 --- a/packages/plugin-arthera/src/index.ts +++ /dev/null @@ -1,18 +0,0 @@ -export * from "./actions/transfer"; -export * from "./providers/wallet"; -export * from "./types"; - -import type { Plugin } from "@elizaos/core"; -import { transferAction } from "./actions/transfer"; -import { artheraWalletProvider } from "./providers/wallet"; - -export const artheraPlugin: Plugin = { - name: "arthera", - description: "Arthera blockchain integration plugin", - providers: [artheraWalletProvider], - evaluators: [], - services: [], - actions: [transferAction], -}; - -export default artheraPlugin; diff --git a/packages/plugin-arthera/src/providers/wallet.ts b/packages/plugin-arthera/src/providers/wallet.ts deleted file mode 100644 index e5daa7924aff9..0000000000000 --- a/packages/plugin-arthera/src/providers/wallet.ts +++ /dev/null @@ -1,203 +0,0 @@ -import { - createPublicClient, - createWalletClient, - formatUnits, - http, -} from "viem"; -import { privateKeyToAccount } from "viem/accounts"; -import type { IAgentRuntime, Provider, Memory, State } from "@elizaos/core"; -import type { - Address, - WalletClient, - PublicClient, - Chain, - HttpTransport, - Account, - PrivateKeyAccount, -} from "viem"; -import * as viemChains from "viem/chains"; - -import type { SupportedChain } from "../types"; - -export class WalletProvider { - private currentChain: SupportedChain = "arthera"; - chains: Record = { arthera: viemChains.arthera }; - account: PrivateKeyAccount; - - constructor(privateKey: `0x${string}`, chains?: Record) { - this.setAccount(privateKey); - this.setChains(chains); - - if (chains && Object.keys(chains).length > 0) { - this.setCurrentChain(Object.keys(chains)[0] as SupportedChain); - } - } - - getAddress(): Address { - return this.account.address; - } - - getCurrentChain(): Chain { - return this.chains[this.currentChain]; - } - - getPublicClient( - chainName: SupportedChain - ): PublicClient { - const transport = this.createHttpTransport(chainName); - - const publicClient = createPublicClient({ - chain: this.chains[chainName], - transport, - }); - return publicClient; - } - - getWalletClient(chainName: SupportedChain): WalletClient { - const transport = this.createHttpTransport(chainName); - - const walletClient = createWalletClient({ - chain: this.chains[chainName], - transport, - account: this.account, - }); - - return walletClient; - } - - getChainConfigs(chainName: SupportedChain): Chain { - const chain = viemChains[chainName]; - - if (!chain?.id) { - throw new Error("Invalid chain name"); - } - - return chain; - } - - async getWalletBalance(): Promise { - try { - const client = this.getPublicClient(this.currentChain); - const balance = await client.getBalance({ - address: this.account.address, - }); - return formatUnits(balance, 18); - } catch (error) { - console.error("Error getting wallet balance:", error); - return null; - } - } - - async getWalletBalanceForChain( - chainName: SupportedChain - ): Promise { - try { - const client = this.getPublicClient(chainName); - const balance = await client.getBalance({ - address: this.account.address, - }); - return formatUnits(balance, 18); - } catch (error) { - console.error("Error getting wallet balance:", error); - return null; - } - } - - private setAccount = (pk: `0x${string}`) => { - this.account = privateKeyToAccount(pk); - }; - - private setChains = (chains?: Record) => { - if (!chains) { - return; - } - for (const chain of Object.keys(chains)) { - this.chains[chain] = chains[chain]; - } - }; - - private setCurrentChain = (chain: SupportedChain) => { - this.currentChain = chain; - }; - - private createHttpTransport = (chainName: SupportedChain) => { - const chain = this.chains[chainName]; - - if (chain.rpcUrls.custom) { - return http(chain.rpcUrls.custom.http[0]); - } - return http(chain.rpcUrls.default.http[0]); - }; - - static genChainFromName( - chainName: string, - customRpcUrl?: string | null - ): Chain { - const baseChain = viemChains[chainName]; - - if (!baseChain?.id) { - throw new Error("Invalid chain name"); - } - - const viemChain: Chain = customRpcUrl - ? { - ...baseChain, - rpcUrls: { - ...baseChain.rpcUrls, - custom: { - http: [customRpcUrl], - }, - }, - } - : baseChain; - - return viemChain; - } -} - -const genChainsFromRuntime = ( - runtime: IAgentRuntime -): Record => { - const chainNames = ["arthera"]; - const chains = {}; - - for (const chainName of chainNames) { - const rpcUrl = runtime.getSetting( - `ETHEREUM_PROVIDER_${chainName.toUpperCase()}` - ); - const chain = WalletProvider.genChainFromName(chainName, rpcUrl); - chains[chainName] = chain; - } - - return chains; -}; - -export const initWalletProvider = (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("ARTHERA_PRIVATE_KEY"); - if (!privateKey) { - throw new Error("ARTHERA_PRIVATE_KEY is missing"); - } - - const chains = genChainsFromRuntime(runtime); - - return new WalletProvider(privateKey as `0x${string}`, chains); -}; - -export const artheraWalletProvider: Provider = { - async get( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - try { - const walletProvider = initWalletProvider(runtime); - const address = walletProvider.getAddress(); - const balance = await walletProvider.getWalletBalance(); - const chain = walletProvider.getCurrentChain(); - return `Arthera Wallet Address: ${address}\nBalance: ${balance} ${chain.nativeCurrency.symbol}\nChain ID: ${chain.id}, Name: ${chain.name}`; - } catch (error) { - console.error("Error in Arthera wallet provider:", error); - return null; - } - }, -}; diff --git a/packages/plugin-arthera/src/templates/index.ts b/packages/plugin-arthera/src/templates/index.ts deleted file mode 100644 index d8206074bcedf..0000000000000 --- a/packages/plugin-arthera/src/templates/index.ts +++ /dev/null @@ -1,23 +0,0 @@ -export const transferTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -Extract the following information about the requested transfer: -- Chain to execute on: Must be one of ["arthera", "base", ...] (like in viem/chains) -- Amount to transfer: Must be a string representing the amount in AA (only number without coin symbol, e.g., "0.1") -- Recipient address: Must be a valid Arthera address starting with "0x" -- Token symbol or address (if not native token): Optional, leave as null for AA transfers - -Respond with a JSON markdown block containing only the extracted values. All fields except 'token' are required: - -\`\`\`json -{ - "fromChain": SUPPORTED_CHAINS, - "amount": string, - "toAddress": string, - "token": string | null -} -\`\`\` -`; diff --git a/packages/plugin-arthera/src/tests/transfer.test.ts b/packages/plugin-arthera/src/tests/transfer.test.ts deleted file mode 100644 index efb9935e989c9..0000000000000 --- a/packages/plugin-arthera/src/tests/transfer.test.ts +++ /dev/null @@ -1,83 +0,0 @@ -import { describe, it, expect, beforeEach } from "vitest"; -import { generatePrivateKey } from "viem/accounts"; -import type { Chain } from "viem"; -import { getEnvVariable } from "@elizaos/core"; - -import { TransferAction } from "../actions/transfer"; -import { WalletProvider } from "../providers/wallet"; - -describe("Transfer Action", () => { - let wp: WalletProvider; - let wp1: WalletProvider; - - beforeEach(async () => { - const pk = generatePrivateKey(); - const pk1 = getEnvVariable("ARTHERA_PRIVATE_KEY") as `0x${string}`; - const customChains = prepareChains(); - wp = new WalletProvider(pk, customChains); - if (pk1) { - wp1 = new WalletProvider(pk1, customChains); - } - }); - describe("Constructor", () => { - it("should initialize with wallet provider", () => { - const ta = new TransferAction(wp); - - expect(ta).toBeDefined(); - }); - }); - describe("Transfer", () => { - let ta: TransferAction; - let ta1: TransferAction; - let receiverAddress: `0x${string}`; - - beforeEach(() => { - ta = new TransferAction(wp); - if (wp1) { - ta1 = new TransferAction(wp1); - receiverAddress = wp1.getAddress(); - } - else { - receiverAddress = wp.getAddress(); - } - }); - - it("throws if not enough gas", async () => { - await expect( - ta.transfer({ - fromChain: "arthera", - toAddress: receiverAddress, - amount: "1", - }) - ).rejects.toThrow( - "Transfer failed: The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account." - ); - }); - - if (wp1) { - it("transfers tokens", async () => { - const tx = await ta1.transfer({ - fromChain: "arthera", - toAddress: receiverAddress, - amount: "0.001", - }); - - expect(tx).toBeDefined(); - expect(tx.from).toEqual(wp1.getAddress()); - expect(tx.to).toEqual(receiverAddress); - expect(tx.value).toEqual(1000000000000000n); - }); - } - }); -}); - -const prepareChains = () => { - const customChains: Record = {}; - const chainNames = ["arthera"]; - chainNames.forEach( - (chain) => - (customChains[chain] = WalletProvider.genChainFromName(chain)) - ); - - return customChains; -}; diff --git a/packages/plugin-arthera/src/tests/wallet.test.ts b/packages/plugin-arthera/src/tests/wallet.test.ts deleted file mode 100644 index dec22d57051ff..0000000000000 --- a/packages/plugin-arthera/src/tests/wallet.test.ts +++ /dev/null @@ -1,175 +0,0 @@ -import { describe, it, expect, beforeAll, beforeEach } from "vitest"; -import { generatePrivateKey, privateKeyToAccount } from "viem/accounts"; -import { arthera, type Chain } from "viem/chains"; - -import { WalletProvider } from "../providers/wallet"; - -const customRpcUrls = { - arthera: "custom-rpc.arthera.io", -}; - -describe("Wallet provider", () => { - let walletProvider: WalletProvider; - let pk: `0x${string}`; - const customChains: Record = {}; - - beforeAll(() => { - pk = generatePrivateKey(); - - const chainNames = ["arthera"]; - chainNames.forEach( - (chain) => - (customChains[chain] = WalletProvider.genChainFromName(chain)) - ); - }); - - describe("Constructor", () => { - it("sets address", () => { - const account = privateKeyToAccount(pk); - const expectedAddress = account.address; - - walletProvider = new WalletProvider(pk); - - expect(walletProvider.getAddress()).toEqual(expectedAddress); - }); - it("sets default chain to arthera", () => { - walletProvider = new WalletProvider(pk); - - expect(walletProvider.chains.arthera.id).toEqual(arthera.id); - expect(walletProvider.getCurrentChain().id).toEqual(arthera.id); - }); - it("sets custom chains", () => { - walletProvider = new WalletProvider(pk, customChains); - - expect(walletProvider.chains.arthera.id).toEqual(arthera.id); - }); - it("sets the first provided custom chain as current chain", () => { - walletProvider = new WalletProvider(pk, customChains); - - expect(walletProvider.getCurrentChain().id).toEqual(arthera.id); - }); - }); - describe("Clients", () => { - beforeEach(() => { - walletProvider = new WalletProvider(pk); - }); - it("generates public client", () => { - const client = walletProvider.getPublicClient("arthera"); - expect(client.chain.id).toEqual(arthera.id); - expect(client.transport.url).toEqual( - arthera.rpcUrls.default.http[0] - ); - }); - it("generates public client with custom rpcurl", () => { - const chain = WalletProvider.genChainFromName( - "arthera", - customRpcUrls.arthera - ); - const wp = new WalletProvider(pk, { ["arthera"]: chain }); - - const client = wp.getPublicClient("arthera"); - expect(client.chain.id).toEqual(arthera.id); - expect(client.chain.rpcUrls.default.http[0]).toEqual( - arthera.rpcUrls.default.http[0] - ); - expect(client.chain.rpcUrls.custom.http[0]).toEqual( - customRpcUrls.arthera - ); - expect(client.transport.url).toEqual(customRpcUrls.arthera); - }); - it("generates wallet client", () => { - const account = privateKeyToAccount(pk); - const expectedAddress = account.address; - - const client = walletProvider.getWalletClient("arthera"); - - expect(client.account.address).toEqual(expectedAddress); - expect(client.transport.url).toEqual( - arthera.rpcUrls.default.http[0] - ); - }); - it("generates wallet client with custom rpcurl", () => { - const account = privateKeyToAccount(pk); - const expectedAddress = account.address; - const chain = WalletProvider.genChainFromName( - "arthera", - customRpcUrls.arthera - ); - const wp = new WalletProvider(pk, { ["arthera"]: chain }); - - const client = wp.getWalletClient("arthera"); - - expect(client.account.address).toEqual(expectedAddress); - expect(client.chain.id).toEqual(arthera.id); - expect(client.chain.rpcUrls.default.http[0]).toEqual( - arthera.rpcUrls.default.http[0] - ); - expect(client.chain.rpcUrls.custom.http[0]).toEqual( - customRpcUrls.arthera - ); - expect(client.transport.url).toEqual(customRpcUrls.arthera); - }); - }); - describe("Balance", () => { - beforeEach(() => { - walletProvider = new WalletProvider(pk, customChains); - }); - it("should fetch balance", async () => { - const bal = await walletProvider.getWalletBalance(); - - expect(bal).toEqual("0"); - }); - it("should fetch balance for a specific added chain", async () => { - const bal = await walletProvider.getWalletBalanceForChain("arthera"); - - expect(bal).toEqual("0"); - }); - it("should return null if chain is not added", async () => { - const bal = await walletProvider.getWalletBalanceForChain("base"); - expect(bal).toBeNull(); - }); - }); - describe("Chain", () => { - beforeEach(() => { - walletProvider = new WalletProvider(pk, customChains); - }); - it("generates chains from chain name", () => { - const chainName = "arthera"; - const chain: Chain = WalletProvider.genChainFromName(chainName); - - expect(chain.rpcUrls.default.http[0]).toEqual( - arthera.rpcUrls.default.http[0] - ); - }); - it("generates chains from chain name with custom rpc url", () => { - const chainName = "arthera"; - const customRpcUrl = customRpcUrls.arthera; - const chain: Chain = WalletProvider.genChainFromName( - chainName, - customRpcUrl - ); - - expect(chain.rpcUrls.default.http[0]).toEqual( - arthera.rpcUrls.default.http[0] - ); - expect(chain.rpcUrls.custom.http[0]).toEqual(customRpcUrl); - }); - it("gets chain configs", () => { - const chain = walletProvider.getChainConfigs("arthera"); - - expect(chain.id).toEqual(arthera.id); - }); - it("throws if unsupported chain name", () => { - // intentionally set unsupported chain, ts will complain - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - expect(() => WalletProvider.genChainFromName("ethereum")).toThrow(); - }); - it("throws if invalid chain name", () => { - // intentionally set incorrect chain, ts will complain - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - expect(() => WalletProvider.genChainFromName("eth")).toThrow(); - }); - }); -}); diff --git a/packages/plugin-arthera/src/types/index.ts b/packages/plugin-arthera/src/types/index.ts deleted file mode 100644 index f772c842a6282..0000000000000 --- a/packages/plugin-arthera/src/types/index.ts +++ /dev/null @@ -1,73 +0,0 @@ -import type { - Account, - Address, - Chain, - Hash, - HttpTransport, - PublicClient, - WalletClient, -} from "viem"; -import * as viemChains from "viem/chains"; - -const _SupportedChainList = Object.keys(viemChains) as Array< - keyof typeof viemChains ->; -export type SupportedChain = (typeof _SupportedChainList)[number]; - -// Transaction types -export interface Transaction { - hash: Hash; - from: Address; - to: Address; - value: bigint; - data?: `0x${string}`; - chainId?: number; -} - -// Chain configuration -export interface ChainMetadata { - chainId: number; - name: string; - chain: Chain; - rpcUrl: string; - nativeCurrency: { - name: string; - symbol: string; - decimals: number; - }; - blockExplorerUrl: string; -} - -export interface ChainConfig { - chain: Chain; - publicClient: PublicClient; - walletClient?: WalletClient; -} - -// Action parameters -export interface TransferParams { - fromChain: SupportedChain; - toAddress: Address; - amount: string; - data?: `0x${string}`; -} - -// Plugin configuration -export interface ArtheraPluginConfig { - rpcUrl?: { - arthera?: string; - }; - secrets?: { - ARTHERA_PRIVATE_KEY: string; - }; - testMode?: boolean; - multicall?: { - batchSize?: number; - wait?: number; - }; -} - -export interface ProviderError extends Error { - code?: number; - data?: unknown; -} diff --git a/packages/plugin-arthera/tsconfig.json b/packages/plugin-arthera/tsconfig.json deleted file mode 100644 index b6ce190d98941..0000000000000 --- a/packages/plugin-arthera/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src", - "typeRoots": [ - "./node_modules/@types", - "./src/types" - ], - "declaration": true - }, - "include": [ - "src" - ] -} diff --git a/packages/plugin-arthera/tsup.config.ts b/packages/plugin-arthera/tsup.config.ts deleted file mode 100644 index 04abb28556202..0000000000000 --- a/packages/plugin-arthera/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "viem", - ], -}); diff --git a/packages/plugin-asterai/.npmignore b/packages/plugin-asterai/.npmignore deleted file mode 100644 index 0468b4b3648ec..0000000000000 --- a/packages/plugin-asterai/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts diff --git a/packages/plugin-asterai/README.md b/packages/plugin-asterai/README.md deleted file mode 100644 index c84bf32f3a5b2..0000000000000 --- a/packages/plugin-asterai/README.md +++ /dev/null @@ -1,80 +0,0 @@ -# @elizaos/plugin-asterai - -A plugin for interacting with [asterai](https://asterai.io) plugins and agents. - -## Description - -This plugin provides functionality to allow Eliza agents to interact with -asterai plugins and agents. - -This will expand your Eliza character's utility by giving it access to all -the functionality of asterai's ecosystem of marketplace and private plugins -and agents. - -## Installation - -```bash -pnpm install @elizaos/plugin-asterai -``` - -## Configuration - -The plugin requires the following environment variables to be set: - -```typescript -ASTERAI_AGENT_ID= -ASTERAI_PUBLIC_QUERY_KEY= -``` - -## Usage - -### Basic Integration - -```typescript -import { asteraiPlugin } from '@elizaos/plugin-asterai'; -``` - -### Example Usage - -The plugin supports natural language for interacting with the asterai agent -through your Eliza character. - -For example, if your asterai agent can fetch weather data: - -```typescript -"Hey Eliza, how's the weather in LA?" -``` - -Eliza will then query the asterai agent to fetch the information. - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run tests: - -```bash -pnpm run test -``` - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. - diff --git a/packages/plugin-asterai/__tests__/actions/query.test.ts b/packages/plugin-asterai/__tests__/actions/query.test.ts deleted file mode 100644 index bd9745cfa8f8f..0000000000000 --- a/packages/plugin-asterai/__tests__/actions/query.test.ts +++ /dev/null @@ -1,110 +0,0 @@ -const mockQuery = vi.fn(); - -vi.mock('@asterai/client', () => ({ - AsteraiClient: vi.fn(() => ({ - query: mockQuery - })) -})); - -vi.mock('../../src/index', () => ({ - getInitAsteraiClient: vi.fn(() => ({ - query: mockQuery - })) -})); - -import { vi, describe, it, expect, beforeEach } from 'vitest'; -import type { IAgentRuntime, Memory, State } from '@elizaos/core'; -import { queryAction } from '../../src/actions/query'; - -describe('queryAction', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn(), - } as unknown as IAgentRuntime; - - const mockMessage: Memory = { - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { - text: 'test query' - } - } as Memory; - - const mockState: State = {}; - const mockCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - const settings = { - ASTERAI_AGENT_ID: 'test-agent-id', - ASTERAI_PUBLIC_QUERY_KEY: 'test-query-key' - }; - return settings[key as keyof typeof settings] || null; - }); - }); - - describe('validate', () => { - it('should validate with correct configuration', async () => { - const result = await queryAction.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should throw error with invalid configuration', async () => { - vi.mocked(mockRuntime.getSetting).mockReturnValue(null); - - await expect(queryAction.validate(mockRuntime, mockMessage)) - .rejects - .toThrow('Asterai plugin configuration validation failed'); - }); - }); - - describe('handler', () => { - it('should handle query and return response', async () => { - mockQuery.mockResolvedValueOnce({ text: () => Promise.resolve('mocked response') }); - - const result = await queryAction.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'mocked response' - }); - expect(mockQuery).toHaveBeenCalledWith({ - query: 'test query' - }); - }); - - it('should handle query errors gracefully', async () => { - mockQuery.mockRejectedValueOnce(new Error('Query failed')); - - await expect( - queryAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback) - ).rejects.toThrow('Query failed'); - }); - }); - - describe('metadata', () => { - it('should have correct name and similes', () => { - expect(queryAction.name).toBe('QUERY_ASTERAI_AGENT'); - expect(queryAction.similes).toContain('MESSAGE_ASTERAI_AGENT'); - expect(queryAction.similes).toContain('TALK_TO_ASTERAI_AGENT'); - }); - - it('should have valid examples', () => { - expect(Array.isArray(queryAction.examples)).toBe(true); - expect(queryAction.examples.length).toBeGreaterThan(0); - - queryAction.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - expect(example.length).toBe(2); - expect(example[1].content.action).toBe('QUERY_ASTERAI_AGENT'); - }); - }); - }); -}); diff --git a/packages/plugin-asterai/__tests__/environment.test.ts b/packages/plugin-asterai/__tests__/environment.test.ts deleted file mode 100644 index 2ff08992b1d68..0000000000000 --- a/packages/plugin-asterai/__tests__/environment.test.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { validateAsteraiConfig } from '../src/environment'; -import type { IAgentRuntime } from '@elizaos/core'; - -describe('environment configuration', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn(), - } as unknown as IAgentRuntime; - - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should validate correct configuration', async () => { - vi.mocked(mockRuntime.getSetting) - .mockImplementation((key: string) => { - const settings = { - ASTERAI_AGENT_ID: 'test-agent-id', - ASTERAI_PUBLIC_QUERY_KEY: 'test-query-key' - }; - return settings[key as keyof typeof settings] || null; - }); - - const config = await validateAsteraiConfig(mockRuntime); - expect(config).toEqual({ - ASTERAI_AGENT_ID: 'test-agent-id', - ASTERAI_PUBLIC_QUERY_KEY: 'test-query-key' - }); - }); - - it('should throw error for missing ASTERAI_AGENT_ID', async () => { - vi.mocked(mockRuntime.getSetting) - .mockImplementation((key: string) => { - const settings = { - ASTERAI_PUBLIC_QUERY_KEY: 'test-query-key' - }; - return settings[key as keyof typeof settings] || null; - }); - - await expect(validateAsteraiConfig(mockRuntime)) - .rejects - .toThrow('Asterai plugin configuration validation failed'); - }); - - it('should throw error for missing ASTERAI_PUBLIC_QUERY_KEY', async () => { - vi.mocked(mockRuntime.getSetting) - .mockImplementation((key: string) => { - const settings = { - ASTERAI_AGENT_ID: 'test-agent-id' - }; - return settings[key as keyof typeof settings] || null; - }); - - await expect(validateAsteraiConfig(mockRuntime)) - .rejects - .toThrow('Asterai plugin configuration validation failed'); - }); -}); diff --git a/packages/plugin-asterai/__tests__/providers/asterai.provider.test.ts b/packages/plugin-asterai/__tests__/providers/asterai.provider.test.ts deleted file mode 100644 index 0f307bfaf4aec..0000000000000 --- a/packages/plugin-asterai/__tests__/providers/asterai.provider.test.ts +++ /dev/null @@ -1,97 +0,0 @@ -const mockFetchSummary = vi.fn(); - -vi.mock('@asterai/client', () => ({ - AsteraiClient: vi.fn(() => ({ - fetchSummary: mockFetchSummary - })) -})); - -vi.mock('../../src/index', () => ({ - getInitAsteraiClient: vi.fn(() => ({ - fetchSummary: mockFetchSummary - })) -})); - -import { vi, describe, it, expect, beforeEach } from 'vitest'; -import type { IAgentRuntime, Memory, State } from '@elizaos/core'; -import { asteraiProvider } from '../../src/providers/asterai.provider'; - -describe('asteraiProvider', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn(), - knowledgeManager: { - getMemoryById: vi.fn(), - createMemory: vi.fn() - } - } as unknown as IAgentRuntime; - - const mockMessage: Memory = { - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { - text: 'test message' - } - } as Memory; - - const mockState: State = {}; - - beforeEach(() => { - vi.clearAllMocks(); - vi.mocked(mockRuntime.getSetting).mockImplementation((key: string) => { - const settings = { - ASTERAI_AGENT_ID: 'test-agent-id', - ASTERAI_PUBLIC_QUERY_KEY: 'test-query-key' - }; - return settings[key as keyof typeof settings] || null; - }); - }); - - describe('get', () => { - it('should return null if environment is not configured', async () => { - vi.mocked(mockRuntime.getSetting).mockReturnValue(null); - - const result = await asteraiProvider.get(mockRuntime, mockMessage, mockState); - expect(result).toBeNull(); - }); - - it('should return existing summary from knowledge manager', async () => { - const mockSummary = { - content: { text: 'existing summary' } - }; - vi.mocked(mockRuntime.knowledgeManager.getMemoryById).mockResolvedValue(mockSummary); - - const result = await asteraiProvider.get(mockRuntime, mockMessage, mockState); - expect(result).toBe('existing summary'); - }); - - it('should fetch and store new summary if none exists', async () => { - vi.mocked(mockRuntime.knowledgeManager.getMemoryById) - .mockResolvedValueOnce(null) - .mockResolvedValueOnce({ content: { text: 'new summary' } }); - - mockFetchSummary.mockResolvedValueOnce('new summary'); - vi.mocked(mockRuntime.knowledgeManager.createMemory).mockResolvedValueOnce(undefined); - - const result = await asteraiProvider.get(mockRuntime, mockMessage, mockState); - - expect(mockRuntime.knowledgeManager.createMemory).toHaveBeenCalledWith(expect.objectContaining({ - id: 'test-agent-id', - content: { text: 'new summary' } - })); - expect(result).toBe('new summary'); - }); - - it('should handle errors when fetching summary', async () => { - vi.mocked(mockRuntime.knowledgeManager.getMemoryById).mockResolvedValue(null); - mockFetchSummary.mockRejectedValue(new Error('Failed to fetch summary')); - - try { - await asteraiProvider.get(mockRuntime, mockMessage, mockState); - } catch (error) { - expect(error).toBeInstanceOf(Error); - expect(error.message).toBe('Failed to fetch summary'); - } - }); - }); -}); diff --git a/packages/plugin-asterai/biome.json b/packages/plugin-asterai/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-asterai/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-asterai/package.json b/packages/plugin-asterai/package.json deleted file mode 100644 index 0343334ab58d7..0000000000000 --- a/packages/plugin-asterai/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "@elizaos/plugin-asterai", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@asterai/client": "0.1.6", - "@elizaos/core": "workspace:*", - "bignumber.js": "9.1.2", - "bs58": "6.0.0", - "elliptic": "6.6.1", - "node-cache": "5.1.2", - "sha3": "2.1.4", - "uuid": "11.0.3" - }, - "devDependencies": { - "@types/elliptic": "6.4.18", - "@types/uuid": "10.0.0", - "tsup": "8.3.5", - "vitest": "^3.0.0", - "@biomejs/biome": "1.9.4" - }, - "scripts": { - "lines": "find . \\( -name '*.cdc' -o -name '*.ts' \\) -not -path '*/node_modules/*' -not -path '*/tests/*' -not -path '*/deps/*' -not -path '*/dist/*' -not -path '*/imports*' | xargs wc -l", - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-asterai/src/actions/query.ts b/packages/plugin-asterai/src/actions/query.ts deleted file mode 100644 index c59fbbe632c2c..0000000000000 --- a/packages/plugin-asterai/src/actions/query.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { - elizaLogger, - type Action, - type ActionExample, - type HandlerCallback, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { validateAsteraiConfig } from "../environment"; -import {getInitAsteraiClient} from "../index.ts"; - -export const queryAction = { - name: "QUERY_ASTERAI_AGENT", - similes: [ - "MESSAGE_ASTERAI_AGENT", - "TALK_TO_ASTERAI_AGENT", - "SEND_MESSAGE_TO_ASTERAI_AGENT", - "COMMUNICATE_WITH_ASTERAI_AGENT", - ], - description: - "Call this action to send a message to the asterai agent which " + - "has access to external plugins and functionality to answer " + - "the user you are assisting, to help perform a workflow task, etc.", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - const config = await validateAsteraiConfig(runtime); - getInitAsteraiClient( - config.ASTERAI_AGENT_ID, - config.ASTERAI_PUBLIC_QUERY_KEY - ); - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - const config = await validateAsteraiConfig(runtime); - const asteraiClient = getInitAsteraiClient( - config.ASTERAI_AGENT_ID, - config.ASTERAI_PUBLIC_QUERY_KEY - ); - elizaLogger.debug("called QUERY_ASTERAI_AGENT action with message:", message.content); - const response = await asteraiClient.query({ - query: message.content.text - }); - const textResponse = await response.text(); - callback({ - text: textResponse - }); - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "How's the weather in LA?", - }, - }, - { - user: "{{user2}}", - content: { - text: "Let me check that for you, just a moment.", - action: "QUERY_ASTERAI_AGENT", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-asterai/src/environment.ts b/packages/plugin-asterai/src/environment.ts deleted file mode 100644 index 016186c590e0b..0000000000000 --- a/packages/plugin-asterai/src/environment.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -const envSchema = z.object({ - ASTERAI_AGENT_ID: z - .string() - .min(1, "ASTERAI_AGENT_ID is required"), - ASTERAI_PUBLIC_QUERY_KEY: z - .string() - .min(1, "ASTERAI_PUBLIC_QUERY_KEY is required"), -}); - -export type AsteraiConfig = z.infer; - -export async function validateAsteraiConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - ASTERAI_AGENT_ID: - runtime.getSetting("ASTERAI_AGENT_ID") || - process.env.ASTERAI_AGENT_ID, - ASTERAI_PUBLIC_QUERY_KEY: - runtime.getSetting("ASTERAI_PUBLIC_QUERY_KEY") || process.env.ASTERAI_PUBLIC_QUERY_KEY, - }; - - return envSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Asterai plugin configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/plugin-asterai/src/index.ts b/packages/plugin-asterai/src/index.ts deleted file mode 100644 index 7f77f154b2613..0000000000000 --- a/packages/plugin-asterai/src/index.ts +++ /dev/null @@ -1,33 +0,0 @@ -import {asteraiProvider} from "./providers/asterai.provider.ts"; -import type { Plugin } from "@elizaos/core"; -import { queryAction } from "./actions/query"; -import { AsteraiClient } from "@asterai/client"; - -export * from "./environment"; -export * from "./providers/asterai.provider"; - -let asteraiClient: AsteraiClient | null = null; - -export const getInitAsteraiClient = ( - agentId: string, - publicQueryKey: string -): AsteraiClient => { - if (!asteraiClient) { - asteraiClient = new AsteraiClient({ - appId: agentId, - queryKey: publicQueryKey, - }) - } - return asteraiClient; -}; - -export const asteraiPlugin: Plugin = { - name: "asterai", - description: "asterai Plugin for Eliza", - providers: [asteraiProvider], - actions: [queryAction], - evaluators: [], - services: [], -}; - -export default asteraiPlugin; diff --git a/packages/plugin-asterai/src/providers/asterai.provider.ts b/packages/plugin-asterai/src/providers/asterai.provider.ts deleted file mode 100644 index b9d70036b3b17..0000000000000 --- a/packages/plugin-asterai/src/providers/asterai.provider.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { - elizaLogger, - type IAgentRuntime, - type Memory, - type Provider, - type State, type UUID, -} from "@elizaos/core"; -import {validateAsteraiConfig} from "../environment.ts"; -import {getInitAsteraiClient} from "../index.ts"; - -const asteraiProvider: Provider = { - get: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State - ): Promise => { - const hasConfiguredEnv = - !!runtime.getSetting("ASTERAI_AGENT_ID") && - !!runtime.getSetting("ASTERAI_PUBLIC_QUERY_KEY"); - if (!hasConfiguredEnv) { - elizaLogger.error( - "ASTERAI_AGENT_ID or ASTERAI_PUBLIC_QUERY_KEY " + - "not configured, skipping provider" - ); - return null; - } - const config = await validateAsteraiConfig(runtime); - const asteraiClient = getInitAsteraiClient( - config.ASTERAI_AGENT_ID, - config.ASTERAI_PUBLIC_QUERY_KEY - ); - if (!asteraiClient) { - elizaLogger.error("asteraiClient is not initialised"); - return null; - } - const agentId = runtime.getSetting("ASTERAI_AGENT_ID") as UUID; - let agentSummaryMemory = await runtime.knowledgeManager.getMemoryById(agentId); - if (!agentSummaryMemory) { - // Fetch & set summary memory. - const summary = await asteraiClient.fetchSummary(); - elizaLogger.debug("asterai agent summary fetched:", summary); - await runtime.knowledgeManager.createMemory({ - id: agentId, - userId: message.userId, - agentId: message.agentId, - roomId: message.roomId, - createdAt: Date.now(), - content: { - text: summary - } - }); - agentSummaryMemory = await runtime.knowledgeManager.getMemoryById(agentId); - } - if (!agentSummaryMemory) { - elizaLogger.error("failed to initialise agent's summary memory"); - return null; - } - return agentSummaryMemory.content.text; - }, -}; - -// Module exports -export { asteraiProvider }; diff --git a/packages/plugin-asterai/tsconfig.json b/packages/plugin-asterai/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/plugin-asterai/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-asterai/tsup.config.ts b/packages/plugin-asterai/tsup.config.ts deleted file mode 100644 index 7f072ccb78412..0000000000000 --- a/packages/plugin-asterai/tsup.config.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - loader: { - ".cdc": "text", - }, - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - "base-x", - "bs58", - "borsh", - "stream", - "buffer", - "querystring", - "amqplib", - // Add other modules you want to externalize - "@onflow/fcl", - "@onflow/types", - "sha3", - "elliptic", - ], -}); diff --git a/packages/plugin-asterai/vitest.config.ts b/packages/plugin-asterai/vitest.config.ts deleted file mode 100644 index 3b66182de9127..0000000000000 --- a/packages/plugin-asterai/vitest.config.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - mockReset: true, - clearMocks: true, - restoreMocks: true, - reporters: ['default'], - testTimeout: 10000 - }, -}); diff --git a/packages/plugin-autonome/.npmignore b/packages/plugin-autonome/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-autonome/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-autonome/README.md b/packages/plugin-autonome/README.md deleted file mode 100644 index 97e79ddad5c41..0000000000000 --- a/packages/plugin-autonome/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# @elizaos/plugin-autonome - -A plugin that enables launching new Eliza agents through the [Autonome platform](https://dev.autonome.fun). - -## Installation - -```bash -pnpm add @elizaos/plugin-autonome -``` - -## Configuration - -### Environment Variables - -```env -# Required: JWT token from Autonome platform -AUTONOME_JWT_TOKEN=your_jwt_token - -# Required: Autonome RPC endpoint (fixed production endpoint) -AUTONOME_RPC=https://wizard-bff-rpc.alt.technology/v1/bff/aaa/apps -``` - -To get your JWT token: -1. Login to [dev.autonome.fun](https://dev.autonome.fun) -2. Open browser developer console -3. Extract your JWT token - -### Character Configuration - -Add the plugin to your character's configuration: - -```typescript -import { autonomePlugin } from "@elizaos/plugin-autonome"; - -const character = { - plugins: [autonomePlugin] -}; -``` - -## Features - -- Launch new Eliza agents through the Autonome platform -- Configure agent settings via natural language -- Track deployment status -- Direct integration with Autonome dashboard - -## Usage - -The plugin responds to various deployment commands: - -```plaintext -"Launch an agent, name is xiaohuo" -"Create a new agent" -"Deploy an Eliza agent" -``` - -Upon successful deployment, you'll receive a dashboard link: -``` -https://dev.autonome.fun/autonome/[app-id]/details -``` - -## API Reference - -### Actions - -#### LAUNCH_AGENT -Creates and deploys a new agent to the Autonome platform. - -Aliases: -- CREATE_AGENT -- DEPLOY_AGENT -- DEPLOY_ELIZA -- DEPLOY_BOT - -Parameters: -- `name`: Name of the agent to deploy -- `config`: Agent configuration in JSON format - -## Dependencies - -- @coral-xyz/anchor: 0.30.1 -- @elizaos/plugin-tee: workspace:* -- @elizaos/plugin-trustdb: workspace:* -- axios: ^1.7.9 diff --git a/packages/plugin-autonome/__tests__/actions/launchAgent.test.ts b/packages/plugin-autonome/__tests__/actions/launchAgent.test.ts deleted file mode 100644 index be37ab5fe92e6..0000000000000 --- a/packages/plugin-autonome/__tests__/actions/launchAgent.test.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import launchAgent from '../../src/actions/launchAgent'; -import axios from 'axios'; -import { ModelClass, elizaLogger, composeContext, generateObjectDeprecated } from '@elizaos/core'; - -vi.mock('axios'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - }, - composeContext: vi.fn().mockReturnValue('mock-context'), - generateObjectDeprecated: vi.fn(), - ModelClass: { - LARGE: 'large', - }, -})); - -describe('launchAgent', () => { - let mockRuntime; - let mockMessage; - let mockState; - let mockCallback; - - beforeEach(() => { - mockRuntime = { - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - getSetting: vi.fn((key) => { - if (key === 'AUTONOME_JWT_TOKEN') return 'mock-jwt-token'; - if (key === 'AUTONOME_RPC') return 'mock-rpc-url'; - return null; - }), - }; - - mockMessage = {}; - mockState = {}; - mockCallback = vi.fn(); - - vi.mocked(axios.post).mockReset(); - vi.mocked(generateObjectDeprecated).mockReset(); - vi.mocked(composeContext).mockReset().mockReturnValue('mock-context'); - }); - - it('should validate correctly', async () => { - const result = await launchAgent.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should have correct action properties', () => { - expect(launchAgent.name).toBe('LAUNCH_AGENT'); - expect(launchAgent.description).toBe('Launch an Eliza agent'); - expect(launchAgent.similes).toContain('CREATE_AGENT'); - expect(launchAgent.examples).toBeDefined(); - expect(Array.isArray(launchAgent.examples)).toBe(true); - }); - - it('should handle successful agent launch', async () => { - const mockContent = { - name: 'test-agent', - config: '{"key": "value"}', - }; - - vi.mocked(axios.post).mockResolvedValueOnce({ - data: { - app: { - id: 'mock-app-id', - }, - }, - }); - - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce(mockContent); - - const result = await launchAgent.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(mockCallback).toHaveBeenCalledWith({ - text: `Successfully launch agent ${mockContent.name}`, - content: { - success: true, - appId: 'https://dev.autonome.fun/autonome/mock-app-id/details', - }, - }); - - expect(axios.post).toHaveBeenCalledWith( - 'mock-rpc-url', - { - name: mockContent.name, - config: mockContent.config, - creationMethod: 2, - envList: {}, - templateId: 'Eliza', - }, - { - headers: { - Authorization: 'Bearer mock-jwt-token', - 'Content-Type': 'application/json', - }, - } - ); - }); - - it('should handle invalid launch content', async () => { - const mockInvalidContent = { - invalidField: 'test', - }; - - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce(mockInvalidContent); - - const result = await launchAgent.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Unable to process launch agent request. Invalid content provided.', - content: { error: 'Invalid launch agent content' }, - }); - }); - - it('should handle API error', async () => { - const mockContent = { - name: 'test-agent', - config: '{"key": "value"}', - }; - - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce(mockContent); - vi.mocked(axios.post).mockResolvedValueOnce(undefined); - - await launchAgent.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Error launching agent: Cannot read properties of undefined (reading \'data\')', - content: { error: 'Cannot read properties of undefined (reading \'data\')' }, - }); - }); -}); diff --git a/packages/plugin-autonome/biome.json b/packages/plugin-autonome/biome.json deleted file mode 100644 index ec7d14c614f96..0000000000000 --- a/packages/plugin-autonome/biome.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-autonome/package.json b/packages/plugin-autonome/package.json deleted file mode 100644 index 5f7d8180f768f..0000000000000 --- a/packages/plugin-autonome/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@elizaos/plugin-autonome", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@coral-xyz/anchor": "0.28.0", - "@elizaos/core": "workspace:*", - "@elizaos/plugin-tee": "workspace:*", - "@elizaos/plugin-trustdb": "workspace:*", - "axios": "^1.7.9" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "form-data": "4.0.1", - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-autonome/src/actions/launchAgent.ts b/packages/plugin-autonome/src/actions/launchAgent.ts deleted file mode 100644 index 241d258bdacdb..0000000000000 --- a/packages/plugin-autonome/src/actions/launchAgent.ts +++ /dev/null @@ -1,179 +0,0 @@ -import axios from "axios"; -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; - -export interface LaunchAgentContent extends Content { - name: string; - config: string; -} - -// Rafactoring -function isLaunchAgentContent(content: unknown): content is LaunchAgentContent { - elizaLogger.log("Content for launchAgent", content); - return ( - typeof content === "object" && - content !== null && - "name" in content && - "config" in content && - typeof (content as LaunchAgentContent).name === "string" && - typeof (content as LaunchAgentContent).config === "string" - ); -} - -const launchTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - -Example response: -\`\`\`json -{ - "name": "xiaohuo", -} -\`\`\` - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested agent launch: -- Agent name -- Character json config -`; - -export default { - name: "LAUNCH_AGENT", - similes: ["CREATE_AGENT", "DEPLOY_AGENT", "DEPLOY_ELIZA", "DEPLOY_BOT"], - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; - }, - description: "Launch an Eliza agent", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting LAUNCH_AGENT handler..."); - - // Initialize or update state also in lanuchContext - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose launch context - const launchContext = composeContext({ - state: currentState, - template: launchTemplate, - }); - - // Generate launch content - const content = await generateObjectDeprecated({ - runtime, - context: launchContext, - modelClass: ModelClass.LARGE, - }); - - // Validate launch content - if (!isLaunchAgentContent(content)) { - elizaLogger.error("Invalid launch content", content); - if (callback) { - callback({ - text: "Unable to process launch agent request. Invalid content provided.", - content: { error: "Invalid launch agent content" }, - }); - } - return false; - } - - const autonomeJwt = runtime.getSetting("AUTONOME_JWT_TOKEN"); - const autonomeRpc = runtime.getSetting("AUTONOME_RPC"); - - const requestBody = { - name: content.name, - config: content.config, - creationMethod: 2, - envList: {}, - templateId: "Eliza", - }; - - const sendPostRequest = async () => { - try { - const response = await axios.post(autonomeRpc, requestBody, { - headers: { - Authorization: `Bearer ${autonomeJwt}`, - "Content-Type": "application/json", - }, - }); - return response; - } catch (error) { - console.error("Error making RPC call:", error); - } - }; - - try { - const resp = await sendPostRequest(); - if (resp?.data?.app?.id) { - elizaLogger.log( - "Launching successful, please find your agent on" - ); - elizaLogger.log( - `https://dev.autonome.fun/autonome/${resp.data.app.id}/details` - ); - } - if (callback) { - callback({ - text: `Successfully launch agent ${content.name}`, - content: { - success: true, - appId: `https://dev.autonome.fun/autonome/${resp.data.app.id}/details`, - }, - }); - } - return true; - } catch (error) { - if (callback) { - elizaLogger.error("Error during launching agent"); - elizaLogger.error(error); - callback({ - text: `Error launching agent: ${error.message}`, - content: { error: error.message }, - }); - } - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Launch an agent, name is xiaohuo", - }, - }, - { - user: "{{user2}}", - content: { - text: "I'll launch the agent now...", - action: "LAUNCH_AGENT", - }, - }, - { - user: "{{user2}}", - content: { - text: "Successfully launch agent, id is ba2e8369-e256-4a0d-9f90-9c64e306dc9f", - }, - }, - ], - ] as ActionExample[][], -} satisfies Action; - diff --git a/packages/plugin-autonome/src/index.ts b/packages/plugin-autonome/src/index.ts deleted file mode 100644 index e1a23ac24b86d..0000000000000 --- a/packages/plugin-autonome/src/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import launchAgent from "./actions/launchAgent"; - -// Action setup -export const autonomePlugin: Plugin = { - name: "autonome", - description: "Autonome Plugin for Eliza", - actions: [launchAgent], - evaluators: [], - providers: [], -}; - -export default autonomePlugin; diff --git a/packages/plugin-autonome/tsconfig.json b/packages/plugin-autonome/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/plugin-autonome/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-autonome/tsup.config.ts b/packages/plugin-autonome/tsup.config.ts deleted file mode 100644 index a47c9eb64b0e0..0000000000000 --- a/packages/plugin-autonome/tsup.config.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - ], -}); diff --git a/packages/plugin-autonome/vitest.config.ts b/packages/plugin-autonome/vitest.config.ts deleted file mode 100644 index dc8b1b5ea44f3..0000000000000 --- a/packages/plugin-autonome/vitest.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - exclude: ['**/node_modules/**', '**/dist/**'], - coverage: { - provider: 'v8', - reporter: ['text', 'json', 'html'], - exclude: [ - 'node_modules/**', - 'dist/**', - '**/*.d.ts', - '**/*.test.ts', - 'coverage/**' - ] - } - } -}); diff --git a/packages/plugin-avail/.npmignore b/packages/plugin-avail/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-avail/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-avail/README.md b/packages/plugin-avail/README.md deleted file mode 100644 index 8a3f25ba44ab1..0000000000000 --- a/packages/plugin-avail/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# @elizaos/plugin-avail - Plugin for Avail - -This is a plugin for using Eliza to interact with the Avail DA network. Defaults to Turing testnet, but can be customized to use Mainnet by changing the RPC in the `.env` file at `AVAIL_RPC_URL`. - -## Actions -- **transfer**: This action enables the transfer of AVAIL tokens from the agent's wallet (as defined by the keyring generated from `AVAIL_SEED`) to another wallet. To use just mention the transfer of AVAIL tokens to an Avail account. - - - name: `SEND_AVAIL` - - - Message sample: `Send 100 AVAIL to 5GWbvXjefEvXXETtKQH7YBsUaPc379KAQATW1eqeJT26cbsK` - -- **submitData**: This action enables the submission of any arbitrary data to the Avail DA network. To use just mention that you need to send "any data" to Avail. You can customize the Avail `appID` through which the agent submits the data by modifying the `AVAIL_APP_ID` env config. - - - name: `SUBMIT_DATA` - - - Message sample: `Submit the following data to Avail "Hello World!"` - -## Usage & Testing - -### Detailed testing steps -- In `.env` you should set the value for `AVAIL_ADDRESS` (this is the public address for the agent account - [learn how to get one here](https://docs.availproject.org/user-guides/accounts#seed-phrases)) and `AVAIL_SEED` (seed phrase for the same account). - -- **Transfer AVAIL** - - To test transfer function, you need tokens in your Avail account. On testnet, you can use the [Avail Faucet](https://faucet.avail.tools/). If you need more please ping us on [Discord](https://discord.gg/y6fHnxZQX8), and we can send it over. - - Run the agent and prompt it with: "send AVAIL to " - e.g. `send 1 AVAIL to 5GWbvXjefEvXXETtKQH7YBsUaPc379KAQATW1eqeJT26cbsK` - - Assuming everything goes smoothly the agent returns with the Tx Hash, and Block Hash. -The tx hash can be checked on the Avail block explorer at https://avail-turing.subscan.io/ - -- **Submit Data** - - To test data submission, you need tokens in your Avail account to pay fees. On testnet, you can use the [Avail Faucet](https://faucet.avail.tools/). If you need more please ping us on [Discord](https://discord.gg/y6fHnxZQX8), and we can send it over. - - Run the agent and prompt it with: "Submit the following data to Avail " - e.g. `Submit the following data to Avail "Hello World!"` - - Assuming everything goes smoothly the agent returns with the Tx Hash, and Block Hash. The tx hash can be checked on the Avail block explorer at https://avail-turing.subscan.io/ - - -## Resources -- [Avail Documentation](https://docs.availproject.org/) -- [Set up an Avail Account](https://docs.availproject.org/user-guides/accounts#seed-phrases) - Learn how to get your `AVAIL_SEED` -- [Find more Network Information like RPC endpoints](https://docs.availproject.org/docs/networks) -- [Learn more about appIDs](https://docs.availproject.org/docs/build-with-avail/interact-with-avail-da/app-id) -- [Learn more about Avail](https://www.availproject.org/) -- [Awesome Avail Repo](https://github.com/availproject/awesome-avail) - diff --git a/packages/plugin-avail/__tests__/actions/submitData.test.ts b/packages/plugin-avail/__tests__/actions/submitData.test.ts deleted file mode 100644 index 65c16b260f01b..0000000000000 --- a/packages/plugin-avail/__tests__/actions/submitData.test.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import submitData, { isDataContent } from '../../src/actions/submitData'; -import { generateObjectDeprecated } from '@elizaos/core'; -import { composeContext } from '@elizaos/core'; -import * as availJsSdk from 'avail-js-sdk'; - -vi.mock('@elizaos/core', () => ({ - generateObjectDeprecated: vi.fn(), - composeContext: vi.fn(), -})); - -vi.mock('avail-js-sdk', () => ({ - initialize: vi.fn(), - getKeyringFromSeed: vi.fn(), -})); - -describe('submitData', () => { - let mockRuntime; - let mockMessage; - let mockState; - let mockCallback; - - beforeEach(() => { - mockRuntime = { - getSetting: vi.fn((key: string) => { - switch (key) { - case 'AVAIL_SEED': - return 'mock-seed'; - case 'AVAIL_RPC_URL': - return 'mock-node-url'; - case 'AVAIL_APP_ID': - return '0'; - case 'AVAIL_ADDRESS': - return '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY'; - default: - return undefined; - } - }), - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - }; - - mockMessage = { - content: { - data: 'test data' - } - }; - mockState = {}; - mockCallback = vi.fn(); - - vi.mocked(generateObjectDeprecated).mockReset(); - vi.mocked(composeContext).mockReset(); - }); - - it('should validate correctly', async () => { - const result = await submitData.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should have correct action properties', () => { - expect(submitData.name).toBe('SUBMIT_DATA'); - expect(submitData.description).toBe('Submit data to Avail as per user command'); - expect(submitData.examples).toBeDefined(); - expect(Array.isArray(submitData.examples)).toBe(true); - }); - - it('should validate data content correctly', () => { - const invalidContent = { invalidField: 'test' }; - expect(isDataContent(invalidContent)).toBe(false); - - const invalidDataType = { data: 123 }; - expect(isDataContent(invalidDataType)).toBe(false); - }); -}); diff --git a/packages/plugin-avail/__tests__/actions/transfer.test.ts b/packages/plugin-avail/__tests__/actions/transfer.test.ts deleted file mode 100644 index 490757ec5f18e..0000000000000 --- a/packages/plugin-avail/__tests__/actions/transfer.test.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import transfer, { isTransferContent } from '../../src/actions/transfer'; -import { generateObjectDeprecated } from '@elizaos/core'; -import { composeContext } from '@elizaos/core'; -import * as availJsSdk from 'avail-js-sdk'; - -vi.mock('@elizaos/core', () => ({ - generateObjectDeprecated: vi.fn(), - composeContext: vi.fn(), -})); - -vi.mock('avail-js-sdk', () => ({ - initialize: vi.fn(), - getKeyringFromSeed: vi.fn(), - isValidAddress: vi.fn(), - getDecimals: vi.fn(), - formatNumberToBalance: vi.fn(), -})); - -describe('transfer', () => { - let mockRuntime; - let mockMessage; - let mockState; - let mockCallback; - const mockTxHash = '0x1234567890abcdef'; - const mockBlockHash = '0xabcdef1234567890'; - - beforeEach(() => { - mockRuntime = { - getSetting: vi.fn((key: string) => { - switch (key) { - case 'AVAIL_SEED': - return 'mock-seed'; - case 'AVAIL_RPC_URL': - return 'mock-node-url'; - case 'AVAIL_APP_ID': - return '0'; - case 'AVAIL_ADDRESS': - return '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY'; - default: - return undefined; - } - }), - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - }; - - mockMessage = { - content: { - recipient: '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY', - amount: '1.5' - } - }; - mockState = {}; - mockCallback = vi.fn(); - - const mockApi = { - registry: { - findMetaError: vi.fn().mockReturnValue({ - docs: ['Transaction failed'], - name: 'InsufficientBalance', - section: 'balances' - }) - }, - tx: { - balances: { - transferKeepAlive: vi.fn().mockReturnValue({ - paymentInfo: vi.fn().mockResolvedValue({ - class: { toString: () => 'normal' }, - weight: { toString: () => '1000' }, - partialFee: { toHuman: () => '0.1 AVAIL' }, - }), - signAndSend: vi.fn().mockImplementation((signer, options, callback) => { - const result = { - status: { - isFinalized: true, - asFinalized: mockBlockHash, - toString: () => 'Finalized' - }, - txHash: mockTxHash, - isError: false, - dispatchError: undefined, - }; - callback(result); - return Promise.resolve(); - }), - }), - }, - }, - query: { - system: { - account: vi.fn().mockResolvedValue({ - data: { - free: { - toHuman: () => '100 AVAIL' - } - } - }), - }, - }, - }; - - vi.mocked(availJsSdk.initialize).mockResolvedValue(mockApi); - vi.mocked(availJsSdk.getKeyringFromSeed).mockReturnValue({ address: 'mock-address' }); - vi.mocked(availJsSdk.isValidAddress).mockReturnValue(true); - vi.mocked(availJsSdk.getDecimals).mockResolvedValue(18); - vi.mocked(availJsSdk.formatNumberToBalance).mockReturnValue('1500000000000000000'); - }); - - it('should validate correctly', async () => { - const result = await transfer.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should have correct action properties', () => { - expect(transfer.name).toBe('SEND_AVAIL'); - expect(transfer.description).toBe('Transfer AVAIL tokens from the agent\'s wallet to another address'); - expect(transfer.examples).toBeDefined(); - expect(Array.isArray(transfer.examples)).toBe(true); - }); - - it('should validate transfer content correctly', () => { - const validContent = { - recipient: '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY', - amount: '1.5' - }; - expect(isTransferContent(validContent)).toBe(true); - - const invalidContent = { invalidField: 'test' }; - expect(isTransferContent(invalidContent)).toBe(false); - - const invalidAmountType = { - recipient: '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY', - amount: {} - }; - expect(isTransferContent(invalidAmountType)).toBe(false); - }); -}); diff --git a/packages/plugin-avail/biome.json b/packages/plugin-avail/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-avail/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-avail/package.json b/packages/plugin-avail/package.json deleted file mode 100644 index 6800c4a0c01e3..0000000000000 --- a/packages/plugin-avail/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@elizaos/plugin-avail", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "@elizaos/plugin-trustdb": "workspace:*", - "avail-js-sdk": "^0.3.0" - }, - "devDependencies": { - "@types/node": "^20.0.0", - "tsup": "8.3.5", - "@polkadot/types": "^10.11.3", - "vitest": "^3.0.0", - "@vitest/coverage-v8": "^2.1.8", - "@biomejs/biome": "1.9.4" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - } -} diff --git a/packages/plugin-avail/src/actions/submitData.ts b/packages/plugin-avail/src/actions/submitData.ts deleted file mode 100644 index afde061c88aeb..0000000000000 --- a/packages/plugin-avail/src/actions/submitData.ts +++ /dev/null @@ -1,247 +0,0 @@ -import { - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, - elizaLogger, - composeContext, - generateObjectDeprecated, -} from "@elizaos/core"; -import { validateAvailConfig } from "../environment"; -import { - //getDecimals, - initialize, - getKeyringFromSeed, -} from "avail-js-sdk"; -import type { H256 } from "@polkadot/types/interfaces/runtime"; -import type { ISubmittableResult } from "@polkadot/types/types"; - -export interface DataContent extends Content { - data: string; -} - -export function isDataContent(content: DataContent): content is DataContent { - // Validate types - const validTypes = typeof content.data === "string"; - if (!validTypes) { - return false; - } -} - -const submitDataTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - - -Example response: -\`\`\`json -{ - "data": "Hello World, this is the data I submitted" -} -\`\`\` - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested AVAIL token transfer: -- Data to be submitted - -Respond with a JSON markdown block containing only the extracted values.`; - -export default { - name: "SUBMIT_DATA", - similes: [ - "SUBMIT_DATA_TO_AVAIL", - "SEND_DATA", - "SEND_DATA_TO_AVAIL", - "POST_DATA", - "POST_DATA_TO_AVAIL", - "POST_DATA_ON_AVAIL_NETWORK", - "POST_DATA_TO_AVAIL_NETWORK", - "SEND_DATA_ON_AVAIL_NETWORK", - "SEND_DATA_TO_AVAIL_NETWORK", - "SUBMIT_DATA_ON_AVAIL_NETWORK", - "SUBMIT_DATA_TO_AVAIL_NETWORK", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateAvailConfig(runtime); - return true; - }, - description: "Submit data to Avail as per user command", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting SUBMIT_DATA handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose transfer context - const submitDataContext = composeContext({ - state: currentState, - template: submitDataTemplate, - }); - - // Generate transfer content - const content = await generateObjectDeprecated({ - runtime, - context: submitDataContext, - modelClass: ModelClass.SMALL, - }); - - // Validate transfer content - // if (!isDataContent(content)) { - // console.log(content + typeof(content.data)) - // console.error("Invalid content for SUBMIT_DATA action."); - // if (callback) { - // callback({ - // text: "Unable to process submit data request. Invalid content provided.", - // content: { error: "Invalid data content" }, - // }); - // } - // return false; - // } - if (content.data != null) { - try { - const SEED = runtime.getSetting("AVAIL_SEED"); - if (!SEED) throw new Error("AVAIL_SEED not set"); - //const ACCOUNT = runtime.getSetting("AVAIL_ADDRESS")!; - const ENDPOINT = runtime.getSetting("AVAIL_RPC_URL"); - const APP_ID = runtime.getSetting("AVAIL_APP_ID"); - - const api = await initialize(ENDPOINT); - const keyring = getKeyringFromSeed(SEED); - const options = { app_id: APP_ID, nonce: -1 }; - //const decimals = getDecimals(api); - const data = content.data; - - const submitDataInfo = await api.tx.dataAvailability - .submitData(data) - .paymentInfo(keyring); - //print estimated fees - elizaLogger.log(`Transaction Fee for Submit Data: - class=${submitDataInfo.class.toString()}, - weight=${submitDataInfo.weight.toString()}, - partialFee=${submitDataInfo.partialFee.toHuman()} - `); - - //submit data - const txResult:ISubmittableResult = await new Promise( - (res) => { - api.tx.dataAvailability - .submitData(data) - .signAndSend( - keyring, - options, - (result) => { - elizaLogger.log( - `Tx status: ${result.status}` - ); - if (result.isFinalized || result.isError) { - res(result); - } - } - ); - } - ); - - // Rejected Transaction handling - if (txResult.isError) { - console.log('Transaction was not executed'); - } - - // Failed Transaction handling - const error = txResult.dispatchError; - if (error !== undefined) { - if (error.isModule) { - const decoded = api.registry.findMetaError( - error.asModule - ); - const { docs, name, section } = decoded; - console.log(`${section}.${name}: ${docs.join(" ")}`); - } else { - console.log(error.toString()); - } - } - - elizaLogger.success( - `Data submitted successfully! tx: \nTx Hash: ${txResult.txHash as H256}, Block Hash: ${txResult.status.asFinalized as H256}` - ); - if (callback) { - callback({ - text: `Data submitted successfully! tx hash: ${txResult.txHash as H256} Block Hash: ${txResult.status.asFinalized as H256} `, - content: {}, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error during data submission:", error); - if (callback) { - callback({ - text: `Error submitting data: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - } else { - elizaLogger.log("No data mentioned to be submitted"); - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Submit the following data to Avail 'Hello World!'", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send the data 'Hello World!' to Avail now.", - action: "SUBMIT_DATA", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully submitted the data 'Hello World!' to Avail \nTransaction: 0x748057951ff79cea6de0e13b2ef70a1e9f443e9c83ed90e5601f8b45144a4ed4", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Submit 'Don't Fight, Unite!' to Avail", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send the data 'Don't Fight, Unite!' to Avail now.", - action: "SUBMIT_DATA", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully submitted the data 'Don't Fight, Unite!' to Avail \nTransaction: 0x748057951ff79cea6de0e13b2ef70a1e9f443e9c83ed90e5601f8b45144a4ed4", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-avail/src/actions/transfer.ts b/packages/plugin-avail/src/actions/transfer.ts deleted file mode 100644 index 85e66280ed252..0000000000000 --- a/packages/plugin-avail/src/actions/transfer.ts +++ /dev/null @@ -1,259 +0,0 @@ -import { - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, - elizaLogger, - composeContext, - generateObjectDeprecated, -} from "@elizaos/core"; -import { validateAvailConfig } from "../environment"; -import { - getDecimals, - initialize, - formatNumberToBalance, - getKeyringFromSeed, - isValidAddress, -} from "avail-js-sdk"; -import type { ISubmittableResult } from "@polkadot/types/types/extrinsic"; -import type { H256 } from "@polkadot/types/interfaces/runtime"; - -export interface TransferContent extends Content { - recipient: string; - amount: string | number; -} - -export function isTransferContent( - content: TransferContent -): content is TransferContent { - // Validate types - const validTypes = - typeof content.recipient === "string" && - (typeof content.amount === "string" || - typeof content.amount === "number"); - if (!validTypes) { - return false; - } - - // Validate addresses - const validAddresses = isValidAddress(content.recipient); - return validAddresses; -} - -const transferTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - - -Example response: -\`\`\`json -{ - "recipient": "5GWbvXjefEvXXETtKQH7YBsUaPc379KAQATW1eqeJT26cbsK", - "amount": "1000" -} -\`\`\` - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested AVAIL token transfer: -- Recipient wallet address -- Amount of AVAIL to transfer - -Respond with a JSON markdown block containing only the extracted values.`; - -export default { - name: "SEND_AVAIL", - similes: [ - "TRANSFER_AVAIL_TOKEN", - "TRANSFER_TOKEN", - "TRANSFER_TOKENS_ON_AVAIL", - "TRANSFER_TOKEN_ON_AVAIL", - "SEND_TOKENS_ON_AVAIL", - "SEND_TOKENS_ON_AVAIL_NETWORK", - "SEND_AVAIL_ON_AVAIL_NETWORK", - "SEND_AVAIL_TOKEN_ON_AVAIL_DA", - "PAY_ON_AVAIL", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateAvailConfig(runtime); - return true; - }, - description: - "Transfer AVAIL tokens from the agent's wallet to another address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting SEND_TOKEN handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose transfer context - const transferContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - - // Generate transfer content - const content = await generateObjectDeprecated({ - runtime, - context: transferContext, - modelClass: ModelClass.SMALL, - }); - - // Validate transfer content - if (!isTransferContent(content)) { - console.log(content); - console.error("Invalid content for TRANSFER_TOKEN action."); - if (callback) { - callback({ - text: "Unable to process transfer request. Invalid content provided.", - content: { error: "Invalid transfer content" }, - }); - } - return false; - } - - if (content.amount != null && content.recipient != null) { - try { - const SEED = runtime.getSetting("AVAIL_SEED"); - if (!SEED) throw new Error("AVAIL_SEED not set"); - //const PUBLIC_KEY = runtime.getSetting("AVAIL_ADDRESS")!; - const ENDPOINT = runtime.getSetting("AVAIL_RPC_URL"); - - const api = await initialize(ENDPOINT); - const keyring = getKeyringFromSeed(SEED); - const options = { app_id: 0, nonce: -1 }; - const decimals = getDecimals(api); - const amount = formatNumberToBalance(content.amount, decimals); - - const oldBalance = await api.query.system.account(content.recipient); - elizaLogger.log( - `Balance before the transfer call: ${oldBalance.toString()}` - ); - - // Transaction call - const txResult:ISubmittableResult = await new Promise( - (res) => { - api.tx.balances - .transferKeepAlive(content.recipient, amount) - .signAndSend( - keyring, - options, - (result) => { - elizaLogger.log( - `Tx status: ${result.status}` - ); - if (result.isFinalized || result.isError) { - res(result); - } - } - ); - } - ); - - // Error handling - const error = txResult.dispatchError; - if (txResult.isError) { - elizaLogger.log("Transaction was not executed"); - } else if (error !== undefined) { - if (error.isModule) { - const decoded = api.registry.findMetaError( - error.asModule - ); - const { docs, name, section } = decoded; - elizaLogger.log( - `${section}.${name}: ${docs.join(" ")}` - ); - } else { - elizaLogger.log(error.toString()); - } - } - - const newBalance = await api.query.system.account(content.recipient); - elizaLogger.log( - `Balance after the transfer call: ${newBalance.toString()}` - ); - - elizaLogger.success( - `Transfer completed successfully! tx: \nTx Hash: ${txResult.txHash as H256}, Block Hash: ${txResult.status.asFinalized as H256}` - ); - if (callback) { - callback({ - text: `Transfer completed successfully! tx hash: ${txResult.txHash as H256} Block Hash: ${txResult.status.asFinalized as H256} `, - content: {}, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error during token transfer:", error); - if (callback) { - callback({ - text: `Error transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - } else { - elizaLogger.log("Either amount or recipient not specified"); - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 100 AVAIL to 5GWbvXjefEvXXETtKQH7YBsUaPc379KAQATW1eqeJT26cbsK", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 100 AVAIL to that address now.", - action: "SEND_AVAIL", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 100 AVAIL to 5GWbvXjefEvXXETtKQH7YBsUaPc379KAQATW1eqeJT26cbsK\nTransaction: 0x748057951ff79cea6de0e13b2ef70a1e9f443e9c83ed90e5601f8b45144a4ed4", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Please send 100 AVAIL tokens to 5GWbvXjefEvXXETtKQH7YBsUaPc379KAQATW1eqeJT26cbsK", - }, - }, - { - user: "{{agent}}", - content: { - text: "Of course. Sending 100 AVAIL to that address now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 100 AVAIL to 5GWbvXjefEvXXETtKQH7YBsUaPc379KAQATW1eqeJT26cbsK\nTransaction: 0x0b9f23e69ea91ba98926744472717960cc7018d35bc3165bdba6ae41670da0f0", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-avail/src/environment.ts b/packages/plugin-avail/src/environment.ts deleted file mode 100644 index 68b711526813c..0000000000000 --- a/packages/plugin-avail/src/environment.ts +++ /dev/null @@ -1,35 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const availEnvSchema = z.object({ - AVAIL_ADDRESS: z.string().min(1, "Avail address is required"), - AVAIL_SEED: z.string().min(1, "Avail account seed phrase is required"), -}); - -export type availConfig = z.infer; - -export async function validateAvailConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - AVAIL_ADDRESS: - runtime.getSetting("AVAIL_ADDRESS") || - process.env.AVAIL_ADDRESS, - AVAIL_SEED: - runtime.getSetting("AVAIL_SEED") || process.env.AVAIL_SEED, - }; - - return availEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Avail configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/plugin-avail/src/index.ts b/packages/plugin-avail/src/index.ts deleted file mode 100644 index 0f1a9e466dfbe..0000000000000 --- a/packages/plugin-avail/src/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { Plugin } from "@elizaos/core"; - -//export * from "./actions/bridge"; -export * from "./actions/submitData"; -export * from "./actions/transfer"; - -// import { bridgeAction } from "./actions/bridge"; -import transfer from "./actions/transfer"; -import submitData from "./actions/submitData"; - -export const availPlugin: Plugin = { - name: "avail", - description: "Avail DA integration plugin", - providers: [], - evaluators: [], - services: [], - actions: [transfer, submitData], -}; - -export default availPlugin; diff --git a/packages/plugin-avail/tsconfig.json b/packages/plugin-avail/tsconfig.json deleted file mode 100644 index 18c600eec05a2..0000000000000 --- a/packages/plugin-avail/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts", - ] -} \ No newline at end of file diff --git a/packages/plugin-avail/tsup.config.ts b/packages/plugin-avail/tsup.config.ts deleted file mode 100644 index 1a96f24afa1eb..0000000000000 --- a/packages/plugin-avail/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-avail/vitest.config.ts b/packages/plugin-avail/vitest.config.ts deleted file mode 100644 index 33dbc11090daa..0000000000000 --- a/packages/plugin-avail/vitest.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - include: ['__tests__/**/*.test.ts'], - exclude: ['**/node_modules/**', '**/dist/**'], - } -}); diff --git a/packages/plugin-avalanche/README.md b/packages/plugin-avalanche/README.md deleted file mode 100644 index 15cc16d26c7b8..0000000000000 --- a/packages/plugin-avalanche/README.md +++ /dev/null @@ -1,253 +0,0 @@ -# @elizaos/plugin-avalanche - -A plugin for interacting with the Avalanche blockchain network within the ElizaOS ecosystem. - -## Description - -The Avalanche plugin enables comprehensive DeFi operations on the Avalanche network, including token transfers, YAK swaps, yield strategy management, and token creation via Token Mill. - -## Installation - -```bash -pnpm install @elizaos/plugin-avalanche -``` - -## Configuration - -The plugin requires the following environment variable: - -```typescript -AVALANCHE_PRIVATE_KEY= -``` - -## Features - -### 1. Token Transfers - -- Send native AVAX and ERC20 tokens -- Support for multiple token standards -- Built-in address validation - -### 2. YAK Swaps - -- Decentralized token swaps -- Automatic best path finding -- Slippage protection (default: 0.2%) -- Support for all major tokens - -### 3. Yield Strategies - -- Deposit tokens into yield-generating strategies -- Support for multiple strategies including: - - YAK staking - - USDC Benqi - - gmYAK Token Mill - - PRINCESS staking - - JOE staking - -### 4. Token Mill - -- Create new tokens -- Configure custom tokenomics -- Automatic market creation - -## Supported Tokens - -```typescript -const TOKENS = { - AVAX: "0x0000000000000000000000000000000000000000", - WAVAX: "0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7", - YAK: "0x59414b3089ce2AF0010e7523Dea7E2b35d776ec7", - gmYAK: "0x3A30784c1af928CdFce678eE49370220aA716DC3", - USDC: "0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E", - // ... and more -}; -``` - -## Usage Examples - -### Token Transfer - -```typescript -// Send AVAX -"Send 10 AVAX to 0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7"; - -// Send ERC20 -"Transfer 100 USDC to [address]"; -``` - -### YAK Swap - -```typescript -// Swap tokens -"Swap 1 AVAX for USDC"; -"Swap 10 USDC for gmYAK"; -``` - -### Yield Strategy - -```typescript -// Deposit into strategies -"Deposit 1 USDC into the strategy"; -"Deposit 10 gmYAK to earn yield"; -``` - -### Token Creation - -```typescript -// Create new token -"Create a new memecoin called 'Test Token' with the symbol 'TEST'"; -``` - -## Providers - -### 1. Wallet Provider - -- Displays wallet balances -- Shows tokens in yield strategies -- Real-time balance updates - -### 2. Strategies Provider - -- Lists available yield strategies -- Shows deposit token requirements - -### 3. Tokens Provider - -- Lists supported tokens -- Shows token addresses - -## Development - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run linting: - -```bash -pnpm run lint -``` - -## Dependencies - -- viem: ^2.21.49 -- @elizaos/core: workspace:\* - -## Future Enhancements - -1. **Advanced DeFi Operations** - - - Multi-hop yield strategies - - Auto-compounding features - - Yield optimization algorithms - - Risk assessment tools - - Portfolio rebalancing automation - - Cross-chain yield farming - -2. **Enhanced Token Management** - - - Batch token operations - - Advanced token creation templates - - Token migration tools - - Automated token listing - - Token analytics dashboard - - Custom tokenomics implementation - -3. **YAK Protocol Integration** - - - Advanced routing algorithms - - MEV protection features - - Gas optimization strategies - - Liquidity analysis tools - - Price impact predictions - - Custom trading strategies - -4. **Benqi Protocol Features** - - - Collateral optimization - - Liquidation protection - - Interest rate monitoring - - Position management tools - - Risk assessment dashboard - - Auto-repayment features - -5. **Token Mill Improvements** - - - Advanced token customization - - Automated market making - - Token distribution tools - - Vesting schedule management - - Governance token features - - Token upgrade mechanisms - -6. **Security Enhancements** - - - Transaction simulation - - Smart contract auditing tools - - Real-time monitoring - - Automated safety checks - - Emergency shutdown features - - Multi-signature support - -7. **Developer Tools** - - - Enhanced debugging capabilities - - Testing framework improvements - - Documentation generator - - CLI tools for common operations - - Integration templates - - Performance monitoring - -8. **Analytics and Reporting** - - Portfolio tracking - - Performance metrics - - Gas usage optimization - - Transaction history analysis - - Yield comparison tools - - Risk assessment reports - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Avalanche](https://www.avax.network/): High-performance blockchain platform -- [avalanchejs](https://github.com/ava-labs/avalanchejs): Official Avalanche JavaScript library -- [YAK Protocol](https://yak.exchange/): Decentralized exchange aggregator -- [Benqi](https://benqi.fi/): Lending and borrowing protocol -- [Token Mill](https://tokenmill.xyz/): Token creation platform - -Special thanks to: - -- The Ava Labs team for developing Avalanche -- The YAK Protocol development team -- The Benqi protocol developers -- The Token Mill platform team -- The Avalanche Developer community -- The Eliza community for their contributions and feedback - -For more information about Avalanche capabilities: - -- [Avalanche Documentation](https://docs.avax.network/) -- [YAK Protocol Docs](https://yak.exchange/docs) -- [Benqi Documentation](https://docs.benqi.fi/) -- [Token Mill Guide](https://docs.tokenmill.xyz/) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-avalanche/__tests__/actions/tokenMillCreate.test.ts b/packages/plugin-avalanche/__tests__/actions/tokenMillCreate.test.ts deleted file mode 100644 index c0a821ceab41b..0000000000000 --- a/packages/plugin-avalanche/__tests__/actions/tokenMillCreate.test.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import tokenMillCreate from '../../src/actions/tokenMillCreate'; -import { generateObject, composeContext } from '@elizaos/core'; -import * as tokenMill from '../../src/utils/tokenMill'; - -vi.mock('@elizaos/core', () => ({ - generateObject: vi.fn(), - composeContext: vi.fn(), - elizaLogger: { - log: vi.fn(), - debug: vi.fn(), - error: vi.fn(), - success: vi.fn(), - }, - ModelClass: { - SMALL: 'small' - } -})); - -vi.mock('../../src/utils/tokenMill', () => ({ - createMarketAndToken: vi.fn(), -})); - -describe('tokenMillCreate', () => { - let mockRuntime; - let mockMessage; - let mockState; - let mockCallback; - - beforeEach(() => { - mockRuntime = { - getSetting: vi.fn((key: string) => { - switch (key) { - case 'AVALANCHE_PRIVATE_KEY': - return '0x1234567890abcdef'; - case 'AVALANCHE_RPC_URL': - return 'https://api.avax-test.network/ext/bc/C/rpc'; - default: - return undefined; - } - }), - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - }; - - mockMessage = { - content: { - name: 'Test Token', - symbol: 'TEST' - } - }; - mockState = {}; - mockCallback = vi.fn(); - - vi.mocked(generateObject).mockReset(); - vi.mocked(composeContext).mockReset(); - vi.mocked(tokenMill.createMarketAndToken).mockReset(); - }); - - describe('validation', () => { - it('should validate correctly with valid config', async () => { - const result = await tokenMillCreate.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should fail validation when private key is missing', async () => { - mockRuntime.getSetting.mockImplementation((key: string) => { - return undefined; - }); - - await expect(tokenMillCreate.validate(mockRuntime, mockMessage)).rejects.toThrow('AVALANCHE_PRIVATE_KEY'); - }); - }); - - describe('action properties', () => { - it('should have correct action properties', () => { - expect(tokenMillCreate.name).toBe('CREATE_TOKEN'); - expect(tokenMillCreate.description).toBe( - 'MUST use this action if the user requests to create a new token, the request might be varied, but it will always be a token creation.' - ); - expect(tokenMillCreate.examples).toBeDefined(); - expect(Array.isArray(tokenMillCreate.examples)).toBe(true); - }); - - it('should have valid examples', () => { - tokenMillCreate.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - example.forEach(interaction => { - expect(interaction).toHaveProperty('user'); - expect(interaction).toHaveProperty('content'); - }); - }); - }); - }); - - describe('token creation', () => { - it('should handle successful token creation', async () => { - const mockContent = { - name: 'Test Token', - symbol: 'TEST' - }; - - vi.mocked(generateObject).mockResolvedValueOnce(mockContent); - vi.mocked(composeContext).mockReturnValueOnce('mock-context'); - vi.mocked(tokenMill.createMarketAndToken).mockResolvedValueOnce({ - tx: '0x1234', - baseToken: '0xabcdef1234567890', - market: '0x0987654321fedcba' - }); - - const result = await tokenMillCreate.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBe(true); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Created token Test Token with symbol TEST. CA: 0xabcdef1234567890', - content: { - tx: '0x1234', - baseToken: '0xabcdef1234567890', - market: '0x0987654321fedcba' - }, - }); - }); - - it('should handle invalid content', async () => { - const mockInvalidContent = { - invalidField: 'test' - }; - - vi.mocked(generateObject).mockResolvedValueOnce(mockInvalidContent); - vi.mocked(composeContext).mockReturnValueOnce('mock-context'); - - const result = await tokenMillCreate.handler( - mockRuntime, - mockMessage, - mockState, - {}, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Unable to process transfer request. Invalid content provided.', - content: { error: 'Invalid content' }, - }); - }); - }); -}); \ No newline at end of file diff --git a/packages/plugin-avalanche/biome.json b/packages/plugin-avalanche/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-avalanche/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-avalanche/package.json b/packages/plugin-avalanche/package.json deleted file mode 100644 index cf3ea465f7d2a..0000000000000 --- a/packages/plugin-avalanche/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/plugin-avalanche", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*" - }, - "devDependencies": { - "tsup": "8.3.5", - "vitest": "^3.0.0", - "@biomejs/biome": "1.9.4" - }, - "scripts": { - "build": "tsup src/index.ts --format esm --no-dts", - "test": "vitest run", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-avalanche/src/actions/tokenMillCreate.ts b/packages/plugin-avalanche/src/actions/tokenMillCreate.ts deleted file mode 100644 index 805bcaaba9712..0000000000000 --- a/packages/plugin-avalanche/src/actions/tokenMillCreate.ts +++ /dev/null @@ -1,167 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - generateObject, - ModelClass, - type Content, -} from "@elizaos/core"; -import { validateAvalancheConfig } from "../environment"; -import { createMarketAndToken } from "../utils/tokenMill"; - -export interface TokenMillCreateContent extends Content { - name: string; - symbol: string; -} - -function isTokenMillCreateContent( - _runtime: IAgentRuntime, - content: any -): content is TokenMillCreateContent { - elizaLogger.debug("Content for create", content); - return ( - typeof content.name === "string" && typeof content.symbol === "string" - ); -} - -const transferTemplate = `Respond with a JSON markdown block containing only the extracted values. - -If the user did not provide enough details, respond with what you can. Name and Symbol are required. - -Example response for a new token: -\`\`\`json -{ - "name": "Test Token", - "symbol": "TEST" -} -\`\`\` - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested token creation: -- Name -- Symbol - -Respond with a JSON markdown block containing only the extracted values.`; - -export default { - name: "CREATE_TOKEN", - similes: [ - "LAUNCH_TOKEN", - "NEW_TOKEN", - "CREATE_MEMECOIN", - "CREATE_MEME_TOKEN", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateAvalancheConfig(runtime); - return true; - }, - description: - "MUST use this action if the user requests to create a new token, the request might be varied, but it will always be a token creation.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting CREATE_TOKEN handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose transfer context - const transferContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - - // Generate transfer content - const content = await generateObject({ - runtime, - context: transferContext, - modelClass: ModelClass.SMALL, - }); - - elizaLogger.debug("Create content:", content); - - // Validate transfer content - if (!isTokenMillCreateContent(runtime, content)) { - elizaLogger.error("Invalid content for CREATE_TOKEN action."); - callback?.({ - text: "Unable to process transfer request. Invalid content provided.", - content: { error: "Invalid content" }, - }); - return false; - } - - const { tx, baseToken, market } = await createMarketAndToken( - runtime, - content.name, - content.symbol - ); - callback?.({ - text: `Created token ${content.name} with symbol ${content.symbol}. CA: ${baseToken}`, - content: { tx, baseToken, market }, - }); - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Create a new memecoin called 'Test Token' with the symbol 'TEST'", - }, - }, - { - user: "{{user2}}", - content: { - action: "CREATE_TOKEN", - name: "Test Token", - symbol: "TEST", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Create a token called news" }, - }, - { - user: "{{user2}}", - content: { - action: "CREATE_TOKEN", - name: "News Token", - symbol: "NEWS", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Create a token" }, - }, - { - user: "{{user2}}", - content: { - action: "CREATE_TOKEN", - name: "Okay", - symbol: "OK", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-avalanche/src/actions/transfer.ts b/packages/plugin-avalanche/src/actions/transfer.ts deleted file mode 100644 index 4fed8d0d0d93b..0000000000000 --- a/packages/plugin-avalanche/src/actions/transfer.ts +++ /dev/null @@ -1,203 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - generateObject, - ModelClass, - type Content, -} from "@elizaos/core"; -import { getTxReceipt, sendNativeAsset, sendToken } from "../utils"; -import type { Address } from "viem"; -import { validateAvalancheConfig } from "../environment"; -import { TOKEN_ADDRESSES } from "../utils/constants"; - -export interface TransferContent extends Content { - tokenAddress: Address; - recipient: Address; - amount: string | number; -} - -function isTransferContent( - _runtime: IAgentRuntime, - content: unknown -): content is TransferContent { - elizaLogger.debug("Content for transfer", content); - return ( - typeof content === "object" && - content !== null && - "tokenAddress" in content && - "recipient" in content && - "amount" in content && - typeof (content as TransferContent).tokenAddress === "string" && - (content as TransferContent).tokenAddress.startsWith("0x") && - typeof (content as TransferContent).recipient === "string" && - (content as TransferContent).recipient.startsWith("0x") && - (typeof (content as TransferContent).amount === "string" || - typeof (content as TransferContent).amount === "number") - ); -} - -const transferTemplate = `Respond with a JSON markdown block containing only the extracted values -- Use null for any values that cannot be determined. -- Use address zero for native AVAX transfers. - -Example response for a 10 WAVAX transfer: -\`\`\`json -{ - "tokenAddress": "0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7", - "recipient": "0xDcEDF06Fd33E1D7b6eb4b309f779a0e9D3172e44", - "amount": "10" -} -\`\`\` - -Example response for a 0.1 AVAX transfer: -\`\`\`json -{ - "tokenAddress": "0x0000000000000000000000000000000000000000", - "recipient": "0xDcEDF06Fd33E1D7b6eb4b309f779a0e9D3172e44", - "amount": "0.1" -} -\`\`\` - -## Token Addresses - -${Object.entries(TOKEN_ADDRESSES) - .map(([key, value]) => `- ${key}: ${value}`) - .join("\n")} - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested token transfer: -- Token contract address -- Recipient wallet address -- Amount to transfer - -Respond with a JSON markdown block containing only the extracted values.`; - -export default { - name: "SEND_TOKEN", - similes: [ - "TRANSFER_TOKEN_ON_AVALANCHE", - "TRANSFER_TOKENS_ON_AVALANCHE", - "SEND_TOKENS_ON_AVALANCHE", - "SEND_AVAX_ON_AVALANCHE", - "PAY_ON_AVALANCHE", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateAvalancheConfig(runtime); - return true; - }, - description: - "MUST use this action if the user requests send a token or transfer a token, the request might be varied, but it will always be a token transfer.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting SEND_TOKEN handler..."); - - // Validate transfer - if (message.content.source === "direct") { - // - } else { - callback?.({ - text: "i can't do that for you.", - content: { error: "Transfer not allowed" }, - }); - return false; - } - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose transfer context - const transferContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - - // Generate transfer content - const content = await generateObject({ - runtime, - context: transferContext, - modelClass: ModelClass.SMALL, - }); - - elizaLogger.debug("Transfer content:", content); - - // Validate transfer content - if (!isTransferContent(runtime, content)) { - elizaLogger.error("Invalid content for TRANSFER_TOKEN action."); - callback?.({ - text: "Unable to process transfer request. Invalid content provided.", - content: { error: "Invalid transfer content" }, - }); - return false; - } - - let tx: `0x${string}` | undefined; - if ( - content.tokenAddress === - "0x0000000000000000000000000000000000000000" - ) { - tx = await sendNativeAsset( - runtime, - content.recipient, - content.amount as number - ); - } else { - tx = await sendToken( - runtime, - content.tokenAddress, - content.recipient, - content.amount as number - ); - } - - if (tx) { - const receipt = await getTxReceipt(runtime, tx); - if (receipt.status === "success") { - callback?.({ - text: "transfer successful", - content: { success: true, txHash: tx }, - }); - } else { - callback?.({ - text: "transfer failed", - content: { error: "Transfer failed" }, - }); - } - } else { - callback?.({ - text: "transfer failed", - content: { error: "Transfer failed" }, - }); - } - - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 10 AVAX to 0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-avalanche/src/actions/yakStrategy.ts b/packages/plugin-avalanche/src/actions/yakStrategy.ts deleted file mode 100644 index 7b5c9e008539b..0000000000000 --- a/packages/plugin-avalanche/src/actions/yakStrategy.ts +++ /dev/null @@ -1,220 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - generateObject, - ModelClass, - type Content, -} from "@elizaos/core"; -import { approve, deposit, getTxReceipt } from "../utils"; -import type { Address } from "viem"; -import { validateAvalancheConfig } from "../environment"; -import { STRATEGY_ADDRESSES, TOKEN_ADDRESSES } from "../utils/constants"; - -export interface StrategyContent extends Content { - depositTokenAddress: string; - strategyAddress: string; - amount: string | number; -} - -// refactoring zone -function isStrategyContent( - _runtime: IAgentRuntime, - content: unknown -): content is StrategyContent { - elizaLogger.debug("Content for strategy", content); - return ( - typeof content === "object" && - content !== null && - "depositTokenAddress" in content && - "strategyAddress" in content && - "amount" in content && - typeof (content as StrategyContent).depositTokenAddress === "string" && - typeof (content as StrategyContent).strategyAddress === "string" && - (typeof (content as StrategyContent).amount === "string" || - typeof (content as StrategyContent).amount === "number") - ); -} - -const strategyTemplate = `Respond with a JSON markdown block containing only the extracted values -- Use null for any values that cannot be determined. -- Use address zero for native AVAX. - -Example response for a 100 USDC deposit into a strategy: -\`\`\`json -{ - "depositTokenAddress": "0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E", - "strategyAddress": "0xFB692D03BBEA21D8665035779dd3082c2B1622d0", - "amount": "100" -} -\`\`\` - -Example response for a 10 WAVAX deposit into a strategy: -\`\`\`json -{ - "depositTokenAddress": "0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7", - "strategyAddress": "0x8B414448de8B609e96bd63Dcf2A8aDbd5ddf7fdd", - "amount": "10" -} -\`\`\` - -## Token Addresses - -${Object.entries(TOKEN_ADDRESSES) - .map(([key, value]) => `- ${key}: ${value}`) - .join("\n")} - -## Strategy Addresses - -${Object.entries(STRATEGY_ADDRESSES) - .map(([key, value]) => `- ${key}: ${value}`) - .join("\n")} - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested strategy management: -- Deposit token address (the token to deposit) -- Strategy address (the strategy to deposit into) -- Amount to deposit - -Respond with a JSON markdown block containing only the extracted values.`; - -export default { - name: "DEPOSIT_TO_STRATEGY", - similes: ["DEPOSIT_FOR_YIELD", "DEPOSIT_TOKENS"], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateAvalancheConfig(runtime); - return true; - }, - description: - "MUST use this action if the user requests to deposit into a yield-earning strategy, the request might be varied, but it will always be a deposit into a strategy.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting DEPOSIT_TO_STRATEGY handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose context - const strategyContext = composeContext({ - state: currentState, - template: strategyTemplate, - }); - - // Generate content - const content = await generateObject({ - runtime, - context: strategyContext, - modelClass: ModelClass.SMALL, - }); - - // Validate content - if (!isStrategyContent(runtime, content)) { - elizaLogger.error( - "Invalid content for DEPOSIT_TO_STRATEGY action." - ); - callback?.({ - text: "Unable to process deposit request. Invalid content provided.", - content: { error: "Invalid deposit content" }, - }); - return false; - } - - // Log the swap content - elizaLogger.debug("Deposit content:", content); - - if ( - content.depositTokenAddress === - "0x0000000000000000000000000000000000000000" - ) { - // todo: deposit from native - elizaLogger.log("Swapping from native AVAX"); - } else { - const tx = await approve( - runtime, - content.depositTokenAddress as Address, - content.strategyAddress as Address, - content.amount as number - ); - callback?.({ - text: "approving token...", - content: { success: true }, - }); - - if (tx) { - let receipt = await getTxReceipt(runtime, tx); - - if (receipt.status === "success") { - callback?.({ - text: "token approved, depositing...", - content: { success: true, txHash: tx }, - }); - - const depositTx = await deposit( - runtime, - content.depositTokenAddress as Address, - content.strategyAddress as Address, - content.amount as number - ); - if (depositTx) { - receipt = await getTxReceipt(runtime, depositTx); - if (receipt.status === "success") { - callback?.({ - text: "deposit successful", - content: { success: true, txHash: depositTx }, - }); - } else { - callback?.({ - text: "deposit failed", - content: { error: "Deposit failed" }, - }); - } - } - } else { - callback?.({ - text: "approve failed", - content: { error: "Approve failed" }, - }); - } - } else { - callback?.({ - text: "approve failed", - content: { error: "Approve failed" }, - }); - } - } - - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "Deposit 1 USDC into the strategy" }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Deposit 10 gmYAK to earn yield" }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-avalanche/src/actions/yakSwap.ts b/packages/plugin-avalanche/src/actions/yakSwap.ts deleted file mode 100644 index 1a788f7981f09..0000000000000 --- a/packages/plugin-avalanche/src/actions/yakSwap.ts +++ /dev/null @@ -1,248 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - generateObject, - ModelClass, - type Content, -} from "@elizaos/core"; -import { approve, getTxReceipt, swap, getQuote } from "../utils"; -import type { Address } from "viem"; -import { validateAvalancheConfig } from "../environment"; -import { TOKEN_ADDRESSES, YAK_SWAP_CONFIG } from "../utils/constants"; - -export interface SwapContent extends Content { - fromTokenAddress: string; - toTokenAddress: string; - recipient?: string; - amount: string | number; -} - -// refactoring zone -function isSwapContent( - _runtime: IAgentRuntime, - content: unknown -): content is SwapContent { - elizaLogger.debug("Content for swap", content); - return ( - typeof content === "object" && - content !== null && - "fromTokenAddress" in content && - "toTokenAddress" in content && - typeof (content as SwapContent).fromTokenAddress === "string" && - typeof (content as SwapContent).toTokenAddress === "string" && - (typeof (content as SwapContent).recipient === "string" || !(content as SwapContent).recipient) && - (typeof (content as SwapContent).amount === "string" || - typeof (content as SwapContent).amount === "number") - ); -} - -const transferTemplate = `Respond with a JSON markdown block containing only the extracted values -- Use null for any values that cannot be determined. -- Use address zero for native AVAX transfers. -- If our balance is not enough, use null for the amount. - -Example response for a 10 AVAX to USDC swap: -\`\`\`json -{ - "fromTokenAddress": "0x0000000000000000000000000000000000000000", - "toTokenAddress": "0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E", - "recipient": null, - "amount": "10" -} -\`\`\` - -Example response for a 10 WAVAX to USDC swap: -\`\`\`json -{ - "fromTokenAddress": "0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7", - "toTokenAddress": "0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E", - "recipient": "0xDcEDF06Fd33E1D7b6eb4b309f779a0e9D3172e44", - "amount": "10" -} -\`\`\` - -Example response to buy WAVAX with 5 USDC: -\`\`\`json -{ - "fromTokenAddress": "0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E", - "toTokenAddress": "0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7", - "recipient": "0xDcEDF06Fd33E1D7b6eb4b309f779a0e9D3172e44", - "amount": "5" -} -\`\`\` - -Example response to sell 5 USDC for gmYAK: -\`\`\`json -{ - "fromTokenAddress": "0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E", - "toTokenAddress": "0x3A30784c1af928CdFce678eE49370220aA716DC3", - "recipient": "0xDcEDF06Fd33E1D7b6eb4b309f779a0e9D3172e44", - "amount": "5" -} -\`\`\` - -## Token Addresses - -${Object.entries(TOKEN_ADDRESSES) - .map(([key, value]) => `- ${key}: ${value}`) - .join("\n")} - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested token transfer: -- From token address (the token to sell) -- To token address (the token to buy) -- Recipient wallet address (optional) -- Amount to sell - -Respond with a JSON markdown block containing only the extracted values.`; - -export default { - name: "SWAP_TOKEN", - similes: ["TRADE_TOKEN", "BUY_TOKEN", "SELL_TOKEN"], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateAvalancheConfig(runtime); - return true; - }, - description: - "MUST use this action if the user requests swap a token, the request might be varied, but it will always be a token swap.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting SWAP_TOKEN handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose swap context - const swapContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - - // Generate swap content - const content = await generateObject({ - runtime, - context: swapContext, - modelClass: ModelClass.SMALL, - }); - - // Validate swap content - if (!isSwapContent(runtime, content)) { - elizaLogger.error("Invalid content for SWAP_TOKEN action."); - callback?.({ - text: "Unable to process swap request. Invalid content provided.", - content: { error: "Invalid swap content" }, - }); - return false; - } - - // Log the swap content - elizaLogger.debug("Swap content:", content); - const quote = await getQuote( - runtime, - content.fromTokenAddress as Address, - content.toTokenAddress as Address, - content.amount as number - ); - // return - - if ( - content.fromTokenAddress === - "0x0000000000000000000000000000000000000000" - ) { - // todo: swap from native - elizaLogger.log("Swapping from native AVAX"); - } else if ( - content.toTokenAddress === - "0x0000000000000000000000000000000000000000" - ) { - // todo: swap to native - elizaLogger.log("Swapping to native AVAX"); - } else { - const yakRouterAddress = YAK_SWAP_CONFIG.router as Address; - const tx = await approve( - runtime, - content.fromTokenAddress as Address, - yakRouterAddress, - content.amount as number - ); - callback?.({ - text: "approving token...", - content: { success: true }, - }); - - if (tx) { - let receipt = await getTxReceipt(runtime, tx); - - if (receipt.status === "success") { - callback?.({ - text: "token approved, swapping...", - content: { success: true, txHash: tx }, - }); - const swapTx = await swap(runtime, quote); - if (swapTx) { - receipt = await getTxReceipt(runtime, swapTx); - if (receipt.status === "success") { - elizaLogger.log("Swap successful"); - callback?.({ - text: "swap successful", - content: { success: true, txHash: swapTx }, - }); - } else { - elizaLogger.error("Swap failed"); - callback?.({ - text: "swap failed", - content: { error: "Swap failed" }, - }); - } - } - } else { - elizaLogger.error("Approve failed"); - callback?.({ - text: "approve failed", - content: { error: "Approve failed" }, - }); - } - } else { - callback?.({ - text: "approve failed", - content: { error: "Approve failed" }, - }); - } - } - - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "Swap 1 AVAX for USDC" }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Swap 10 USDC for gmYAK" }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-avalanche/src/environment.ts b/packages/plugin-avalanche/src/environment.ts deleted file mode 100644 index 6a544454e29c4..0000000000000 --- a/packages/plugin-avalanche/src/environment.ts +++ /dev/null @@ -1,31 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const avalancheEnvSchema = z.object({ - AVALANCHE_PRIVATE_KEY: z - .string() - .min(1, "Avalanche private key is required"), -}); - -export type AvalancheConfig = z.infer; -export async function validateAvalancheConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - AVALANCHE_PRIVATE_KEY: - runtime.getSetting("AVALANCHE_PRIVATE_KEY") || - process.env.AVALANCHE_PRIVATE_KEY, - }; - - return avalancheEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error(errorMessages); - } - throw error; - } -} diff --git a/packages/plugin-avalanche/src/index.ts b/packages/plugin-avalanche/src/index.ts deleted file mode 100644 index 4f6c683da7c40..0000000000000 --- a/packages/plugin-avalanche/src/index.ts +++ /dev/null @@ -1,31 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import tokenMillCreate from "./actions/tokenMillCreate"; -import transfer from "./actions/transfer"; -import yakSwap from "./actions/yakSwap"; -import yakStrategy from "./actions/yakStrategy"; -import { tokensProvider } from "./providers/tokens"; -import { strategiesProvider } from "./providers/strategies"; -import { walletProvider } from "./providers/wallet"; -import { - TOKEN_ADDRESSES, - STRATEGY_ADDRESSES, - YAK_SWAP_CONFIG, - TOKEN_MILL_CONFIG, -} from "./utils/constants"; - -export const PROVIDER_CONFIG = { - TOKEN_ADDRESSES: TOKEN_ADDRESSES, - STRATEGY_ADDRESSES: STRATEGY_ADDRESSES, - YAK_SWAP_CONFIG: YAK_SWAP_CONFIG, - TOKEN_MILL_CONFIG: TOKEN_MILL_CONFIG, -}; - -export const avalanchePlugin: Plugin = { - name: "avalanche", - description: "Avalanche Plugin for Eliza", - actions: [transfer, yakSwap, yakStrategy, tokenMillCreate], - evaluators: [], - providers: [tokensProvider, strategiesProvider, walletProvider], -}; - -export default avalanchePlugin; diff --git a/packages/plugin-avalanche/src/providers/strategies.ts b/packages/plugin-avalanche/src/providers/strategies.ts deleted file mode 100644 index 496f8115099ef..0000000000000 --- a/packages/plugin-avalanche/src/providers/strategies.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { - type IAgentRuntime, - type Memory, - type Provider, - type State, - elizaLogger, -} from "@elizaos/core"; -import { STRATEGY_ADDRESSES } from "../utils/constants"; - -const strategiesProvider: Provider = { - get: async (_runtime: IAgentRuntime, _message: Memory, _state?: State) => { - elizaLogger.debug("strategiesProvider::get"); - const strategies = Object.entries(STRATEGY_ADDRESSES) - .map(([key, value]) => `${key}: ${value}`) - .join("\n"); - return `The available strategy addresses and their deposit tokens are:\n${strategies}`; - }, -}; - -export { strategiesProvider }; diff --git a/packages/plugin-avalanche/src/providers/tokens.ts b/packages/plugin-avalanche/src/providers/tokens.ts deleted file mode 100644 index 0f6ef9734ad67..0000000000000 --- a/packages/plugin-avalanche/src/providers/tokens.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { - type IAgentRuntime, - type Memory, - type Provider, - type State, - elizaLogger, -} from "@elizaos/core"; -import { TOKEN_ADDRESSES } from "../utils/constants"; - -const tokensProvider: Provider = { - get: async (_runtime: IAgentRuntime, _message: Memory, _state?: State) => { - elizaLogger.debug("tokensProvider::get"); - const tokens = Object.entries(TOKEN_ADDRESSES) - .map(([key, value]) => `${key}: ${value}`) - .join("\n"); - return `The available tokens and their addresses are:\n${tokens}`; - }, -}; - -export { tokensProvider }; diff --git a/packages/plugin-avalanche/src/providers/wallet.ts b/packages/plugin-avalanche/src/providers/wallet.ts deleted file mode 100644 index 9ee31035638cc..0000000000000 --- a/packages/plugin-avalanche/src/providers/wallet.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { - type IAgentRuntime, - type Memory, - type Provider, - type State, - elizaLogger, -} from "@elizaos/core"; -import { formatUnits } from "viem"; -import { getAccount, getDecimals, getTokenBalance } from "../utils"; -import { STRATEGY_ADDRESSES, TOKEN_ADDRESSES } from "../utils/constants"; - -const walletProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory, _state?: State) => { - elizaLogger.debug("walletProvider::get"); - const privateKey = runtime.getSetting("AVALANCHE_PRIVATE_KEY"); - if (!privateKey) { - throw new Error( - "AVALANCHE_PRIVATE_KEY not found in environment variables" - ); - } - - const account = getAccount(runtime); - - let output = "# Wallet Balances\n\n"; - output += `## Wallet Address\n\n\`${account.address}\`\n\n`; - - output += "## Latest Token Balances\n\n"; - for (const [token, address] of Object.entries(TOKEN_ADDRESSES)) { - const decimals = await getDecimals(runtime, address); - const balance = await getTokenBalance( - runtime, - address, - account.address - ); - output += `${token}: ${formatUnits(balance, decimals)}\n`; - } - output += "Note: These balances can be used at any time.\n\n"; - - output += "## Balances in Yield Strategies\n\n"; - for (const [strategy, address] of Object.entries(STRATEGY_ADDRESSES)) { - const balance = await getTokenBalance( - runtime, - address, - account.address - ); - const decimals = await getDecimals(runtime, address); - output += `${strategy}: ${formatUnits(balance, decimals)}\n`; - } - output += "Note: These balances must be withdrawn from the strategy before they can be used.\n\n"; - - elizaLogger.debug("walletProvider::get output:", output); - return output; - }, -}; - -export { walletProvider }; diff --git a/packages/plugin-avalanche/src/types/index.ts b/packages/plugin-avalanche/src/types/index.ts deleted file mode 100644 index b1104592eac05..0000000000000 --- a/packages/plugin-avalanche/src/types/index.ts +++ /dev/null @@ -1,35 +0,0 @@ -import type { Address } from "viem"; - -interface YakSwapQuote { - amounts: bigint[]; - adapters: Address[]; - path: Address[]; - gasEstimate: bigint; -} - -// struct MarketCreationParameters { -// uint96 tokenType; -// string name; -// string symbol; -// address quoteToken; -// uint256 totalSupply; -// uint16 creatorShare; -// uint16 stakingShare; -// uint256[] bidPrices; -// uint256[] askPrices; -// bytes args; -// } -interface TokenMillMarketCreationParameters { - tokenType: number; - name: string; - symbol: string; - quoteToken: Address; - totalSupply: bigint; - creatorShare: number; - stakingShare: number; - bidPrices: bigint[]; - askPrices: bigint[]; - args: string; -} - -export type { YakSwapQuote, TokenMillMarketCreationParameters }; diff --git a/packages/plugin-avalanche/src/utils/constants.ts b/packages/plugin-avalanche/src/utils/constants.ts deleted file mode 100644 index 815a8a2f26f4f..0000000000000 --- a/packages/plugin-avalanche/src/utils/constants.ts +++ /dev/null @@ -1,37 +0,0 @@ -import type { Address } from "viem"; - -const TOKEN_ADDRESSES: Record = { - AVAX: "0x0000000000000000000000000000000000000000", - WAVAX: "0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7", - YAK: "0x59414b3089ce2AF0010e7523Dea7E2b35d776ec7", - gmYAK: "0x3A30784c1af928CdFce678eE49370220aA716DC3", - USDC: "0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E", - JOE: "0x6e84a6216eA6dACC71eE8E6b0a5B7322EEbC0fDd", - AUSD: "0x00000000eFE302BEAA2b3e6e1b18d08D69a9012a", - PRINCESS: "0xB310Ed3A7F4Ae79E59dCa99784b312c2D19fFC7C", - KIMBO: "0x184ff13B3EBCB25Be44e860163A5D8391Dd568c1", - COQ: "0x420FcA0121DC28039145009570975747295f2329", -}; - -const STRATEGY_ADDRESSES: Record = { - YAK: "0x0C4684086914D5B1525bf16c62a0FF8010AB991A", // Yield Yak YAK - USDC: "0xFB692D03BBEA21D8665035779dd3082c2B1622d0", // Benqi USDC - gmYAK: "0x9db213cE52155A9462A869Af495234e4734DC08a", // Token Mill gmYAK - PRINCESS: "0xA714d1f61D14F0beDecC0e0812A5641BD01424eD", - JOE: "0x714e06410B4960D3C1FC033bCd53ad9EB2d1f874", // sJOE -}; - -const YAK_SWAP_CONFIG = { - router: "0xC4729E56b831d74bBc18797e0e17A295fA77488c", -}; - -const TOKEN_MILL_CONFIG = { - factory: "0x501ee2D4AA611C906F785e10cC868e145183FCE4", -}; - -export { - TOKEN_ADDRESSES, - STRATEGY_ADDRESSES, - YAK_SWAP_CONFIG, - TOKEN_MILL_CONFIG, -}; diff --git a/packages/plugin-avalanche/src/utils/index.ts b/packages/plugin-avalanche/src/utils/index.ts deleted file mode 100644 index efa1b461a3cc4..0000000000000 --- a/packages/plugin-avalanche/src/utils/index.ts +++ /dev/null @@ -1,464 +0,0 @@ -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { - createPublicClient, - createWalletClient, - type Hash, - http, - type Address, - parseUnits, -} from "viem"; -import { privateKeyToAccount } from "viem/accounts"; -import { avalanche } from "viem/chains"; -import type { YakSwapQuote } from "../types"; -import { YAK_SWAP_CONFIG } from "./constants"; - -export const getAccount = (runtime: IAgentRuntime) => { - const privateKey = - runtime.getSetting("AVALANCHE_PRIVATE_KEY") || - process.env.AVALANCHE_PRIVATE_KEY; - return privateKeyToAccount(`0x${privateKey.replace("0x", "")}`); -}; - -export const getPublicClient = (_runtime: IAgentRuntime) => { - return createPublicClient({ - chain: avalanche, - transport: http(), - }); -}; - -export const getWalletClient = (runtime: IAgentRuntime) => { - return createWalletClient({ - account: getAccount(runtime), - chain: avalanche, - transport: http(), - }); -}; - -export const getTxReceipt = async (runtime: IAgentRuntime, tx: Hash) => { - const publicClient = getPublicClient(runtime); - const receipt = await publicClient.waitForTransactionReceipt({ - hash: tx, - }); - return receipt; -}; - -export const getDecimals = async ( - runtime: IAgentRuntime, - tokenAddress: Address -) => { - if (tokenAddress === "0x0000000000000000000000000000000000000000") { - return avalanche.nativeCurrency.decimals; - } - const publicClient = getPublicClient(runtime); - const decimals = await publicClient.readContract({ - address: tokenAddress, - abi: [ - { - inputs: [], - name: "decimals", - outputs: [{ internalType: "uint8", name: "", type: "uint8" }], - stateMutability: "view", - type: "function", - }, - ], - functionName: "decimals", - }); - return decimals; -}; - -export const getNativeBalance = async ( - runtime: IAgentRuntime, - owner: Address -) => { - const publicClient = getPublicClient(runtime); - const balance = await publicClient.getBalance({ - address: owner, - }); - return balance; -}; - -export const getTokenBalance = async ( - runtime: IAgentRuntime, - tokenAddress: Address, - owner: Address -) => { - if (tokenAddress === "0x0000000000000000000000000000000000000000") { - return getNativeBalance(runtime, owner); - } - const publicClient = getPublicClient(runtime); - const balance = await publicClient.readContract({ - address: tokenAddress, - abi: [ - { - inputs: [ - { - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "balanceOf", - outputs: [ - { internalType: "uint256", name: "", type: "uint256" }, - ], - stateMutability: "view", - type: "function", - }, - ], - functionName: "balanceOf", - args: [owner], - }); - return balance; -}; - -export const getQuote = async ( - runtime: IAgentRuntime, - fromTokenAddress: Address, - toTokenAddress: Address, - amount: number -) => { - const publicClient = getPublicClient(runtime); - const decimals = await getDecimals(runtime, fromTokenAddress); - const maxSteps = 2; - const gasPrice = parseUnits("25", "gwei"); // todo: get gas price from runtime - const quote = await publicClient.readContract({ - address: YAK_SWAP_CONFIG.router, - abi: [ - { - inputs: [ - { - internalType: "uint256", - name: "_amountIn", - type: "uint256", - }, - { - internalType: "address", - name: "_tokenIn", - type: "address", - }, - { - internalType: "address", - name: "_tokenOut", - type: "address", - }, - { - internalType: "uint256", - name: "_maxSteps", - type: "uint256", - }, - { - internalType: "uint256", - name: "_gasPrice", - type: "uint256", - }, - ], - name: "findBestPathWithGas", - outputs: [ - { - components: [ - { - internalType: "uint256[]", - name: "amounts", - type: "uint256[]", - }, - { - internalType: "address[]", - name: "adapters", - type: "address[]", - }, - { - internalType: "address[]", - name: "path", - type: "address[]", - }, - { - internalType: "uint256", - name: "gasEstimate", - type: "uint256", - }, - ], - internalType: "struct YakRouter.FormattedOfferWithGas", - name: "", - type: "tuple", - }, - ], - stateMutability: "view", - type: "function", - }, - ], - functionName: "findBestPathWithGas", - args: [ - parseUnits(amount.toString(), decimals), - fromTokenAddress, - toTokenAddress, - maxSteps, - gasPrice, - ], - }); - elizaLogger.log("Quote:", quote); - return quote as YakSwapQuote; -}; - -export const sendNativeAsset = async ( - runtime: IAgentRuntime, - recipient: Address, - amount: number -) => { - const walletClient = getWalletClient(runtime); - const decimals = await getDecimals( - runtime, - "0x0000000000000000000000000000000000000000" - ); - const tx = await walletClient.sendTransaction({ - to: recipient, - value: parseUnits(amount.toString(), decimals), - }); - return tx as Hash; -}; - -export const sendToken = async ( - runtime: IAgentRuntime, - tokenAddress: Address, - recipient: Address, - amount: number -) => { - const decimals = await getDecimals(runtime, tokenAddress); - const publicClient = getPublicClient(runtime); - - try { - const { result, request } = await publicClient.simulateContract({ - account: getAccount(runtime), - address: tokenAddress, - abi: [ - { - inputs: [ - { - internalType: "address", - name: "dst", - type: "address", - }, - { - internalType: "uint256", - name: "amount", - type: "uint256", - }, - ], - name: "transfer", - outputs: [ - { - internalType: "bool", - name: "", - type: "bool", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - ], - functionName: "transfer", - args: [recipient, parseUnits(amount.toString(), decimals)], - }); - - if (!result) { - throw new Error("Transfer failed"); - } - - elizaLogger.debug("Request:", request); - - const walletClient = getWalletClient(runtime); - const tx = await walletClient.writeContract(request); - elizaLogger.log("Transaction:", tx); - return tx as Hash; - } catch (error) { - elizaLogger.error("Error simulating contract:", error); - return; - } -}; - -export const approve = async ( - runtime: IAgentRuntime, - tokenAddress: Address, - spender: Address, - amount: number -) => { - try { - const decimals = await getDecimals(runtime, tokenAddress); - const publicClient = getPublicClient(runtime); - const { result, request } = await publicClient.simulateContract({ - account: getAccount(runtime), - address: tokenAddress, - abi: [ - { - inputs: [ - { - internalType: "address", - name: "_spender", - type: "address", - }, - { - internalType: "uint256", - name: "_value", - type: "uint256", - }, - ], - name: "approve", - outputs: [ - { - internalType: "bool", - name: "", - type: "bool", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - ], - functionName: "approve", - args: [spender, parseUnits(amount.toString(), decimals)], - }); - - if (!result) { - throw new Error("Approve failed"); - } - - elizaLogger.debug("Request:", request); - - const walletClient = getWalletClient(runtime); - const tx = await walletClient.writeContract(request); - elizaLogger.log("Transaction:", tx); - return tx; - } catch (error) { - elizaLogger.error("Error approving:", error); - return; - } -}; - -export const swap = async ( - runtime: IAgentRuntime, - quote: YakSwapQuote, - recipient?: Address -) => { - const slippageBips = 20n; - const amountOut = quote.amounts[quote.amounts.length - 1]; - const allowedSlippage = (amountOut * slippageBips) / 10000n; - const trade = { - amountIn: quote.amounts[0], - amountOut: amountOut - allowedSlippage, - path: quote.path, - adapters: quote.adapters, - }; - try { - const account = getAccount(runtime); - const publicClient = getPublicClient(runtime); - const { _result, request } = await publicClient.simulateContract({ - account: account, - address: YAK_SWAP_CONFIG.router, - abi: [ - { - inputs: [ - { - components: [ - { - internalType: "uint256", - name: "amountIn", - type: "uint256", - }, - { - internalType: "uint256", - name: "amountOut", - type: "uint256", - }, - { - internalType: "address[]", - name: "path", - type: "address[]", - }, - { - internalType: "address[]", - name: "adapters", - type: "address[]", - }, - ], - internalType: "struct YakRouter.Trade", - name: "_trade", - type: "tuple", - }, - { - internalType: "address", - name: "_to", - type: "address", - }, - { - internalType: "uint256", - name: "_fee", - type: "uint256", - }, - ], - name: "swapNoSplit", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - ], - functionName: "swapNoSplit", - args: [trade, recipient || account.address, 0n], - }); - - elizaLogger.debug("Request:", request); - - const walletClient = getWalletClient(runtime); - const tx = await walletClient.writeContract(request); - elizaLogger.log("Transaction:", tx); - return tx; - } catch (error) { - elizaLogger.error("Error simulating contract:", error); - return; - } -}; - -export const deposit = async ( - runtime: IAgentRuntime, - depositTokenAddress: Address, - strategyAddress: Address, - amount: number -) => { - try { - const decimals = await getDecimals(runtime, depositTokenAddress); - const publicClient = getPublicClient(runtime); - const { _result, request } = await publicClient.simulateContract({ - account: getAccount(runtime), - address: strategyAddress, - abi: [ - { - inputs: [ - { - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "deposit", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - ], - functionName: "deposit", - args: [parseUnits(amount.toString(), decimals)], - }); - - // if (!result) { - // throw new Error('Deposit failed') - // } - - elizaLogger.debug("Request:", request); - - const walletClient = getWalletClient(runtime); - const tx = await walletClient.writeContract(request); - elizaLogger.log("Transaction:", tx); - return tx; - } catch (error) { - elizaLogger.error("Error depositing:", error); - return; - } -}; diff --git a/packages/plugin-avalanche/src/utils/tokenMill.ts b/packages/plugin-avalanche/src/utils/tokenMill.ts deleted file mode 100644 index 37803418b69c1..0000000000000 --- a/packages/plugin-avalanche/src/utils/tokenMill.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { getAccount, getWalletClient, getPublicClient } from "./index"; -import { TOKEN_ADDRESSES, TOKEN_MILL_CONFIG } from "./constants"; -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import type { TokenMillMarketCreationParameters } from "../types"; -import { type Address, encodeAbiParameters, parseUnits } from "viem"; - -export const createMarketAndToken = async ( - runtime: IAgentRuntime, - name: string, - symbol: string -) => { - const account = getAccount(runtime); - const publicClient = getPublicClient(runtime); - const abi = [ - { - inputs: [ - { - components: [ - { - internalType: "uint96", - name: "tokenType", - type: "uint96", - }, - { - internalType: "string", - name: "name", - type: "string", - }, - { - internalType: "string", - name: "symbol", - type: "string", - }, - { - internalType: "address", - name: "quoteToken", - type: "address", - }, - { - internalType: "uint256", - name: "totalSupply", - type: "uint256", - }, - { - internalType: "uint16", - name: "creatorShare", - type: "uint16", - }, - { - internalType: "uint16", - name: "stakingShare", - type: "uint16", - }, - { - internalType: "uint256[]", - name: "bidPrices", - type: "uint256[]", - }, - { - internalType: "uint256[]", - name: "askPrices", - type: "uint256[]", - }, - { - internalType: "bytes", - name: "args", - type: "bytes", - }, - ], - internalType: "struct ITMFactory.MarketCreationParameters", - name: "parameters", - type: "tuple", - }, - ], - name: "createMarketAndToken", - outputs: [ - { - internalType: "address", - name: "baseToken", - type: "address", - }, - { - internalType: "address", - name: "market", - type: "address", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - ]; - - if (name.length === 0) { - throw new Error("Name must be provided"); - } - - if (name.length > 32) { - throw new Error("Name must be less than 12 characters"); - } - - if (symbol.length === 0) { - throw new Error("Symbol must be provided"); - } - - if (symbol.length > 8) { - throw new Error("Symbol must be less than 8 characters"); - } - - const params: TokenMillMarketCreationParameters = { - tokenType: 1, - name, - symbol, - quoteToken: TOKEN_ADDRESSES.WAVAX, - totalSupply: parseUnits("100000000", 18), - creatorShare: 2000, - stakingShare: 6000, - bidPrices: [ - 0, 0.018117, 0.042669, 0.075735, 0.12078, 0.18018, 0.26235, - 0.37124999999999997, 0.51975, 0.71973, 0.99, - ].map((price) => parseUnits(price.toString(), 18)), - askPrices: [ - 0, 0.0183, 0.0431, 0.0765, 0.122, 0.182, 0.265, 0.375, 0.525, 0.727, - 1, - ].map((price) => parseUnits(price.toString(), 18)), - args: encodeAbiParameters( - [{ name: "decimals", type: "uint256" }], - [18n] - ), - }; - - const { result, request } = await publicClient.simulateContract({ - account, - address: TOKEN_MILL_CONFIG.factory as Address, - abi, - functionName: "createMarketAndToken", - args: [params], - }); - - if (!result) { - throw new Error("Create failed"); - } - - elizaLogger.debug("request", request); - elizaLogger.debug("result", result); - - elizaLogger.debug("Request:", request); - - const walletClient = getWalletClient(runtime); - const tx = await walletClient.writeContract(request); - elizaLogger.log("Transaction:", tx); - - return { - tx: tx, - baseToken: result[0], - market: result[1], - }; -}; diff --git a/packages/plugin-avalanche/tsconfig.json b/packages/plugin-avalanche/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/plugin-avalanche/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-avalanche/tsup.config.ts b/packages/plugin-avalanche/tsup.config.ts deleted file mode 100644 index 062c43212f6f5..0000000000000 --- a/packages/plugin-avalanche/tsup.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - format: ["esm"], - dts: true, - clean: true, - skipNodeModulesBundle: true, - noExternal: ["viem"], -}); diff --git a/packages/plugin-avalanche/vitest.config.ts b/packages/plugin-avalanche/vitest.config.ts deleted file mode 100644 index adbf725538008..0000000000000 --- a/packages/plugin-avalanche/vitest.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - }, -}); diff --git a/packages/plugin-b2/.npmignore b/packages/plugin-b2/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-b2/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-b2/README.md b/packages/plugin-b2/README.md deleted file mode 100644 index 1e5ba623cabf7..0000000000000 --- a/packages/plugin-b2/README.md +++ /dev/null @@ -1,169 +0,0 @@ -# @elizaos/plugin-b2 - -A plugin for interacting with the B2-Network within the ElizaOS ecosystem. - -## Description - -The B2 Network Plugin offers a set of features that can be integrated into the Eliza platform to enhance its capabilities. This plugin enables seamless token transfers on the B2-Network. It provides functionality to transfer both native B2-BTC and ERC20 tokens using secure wallet operations. - -## Installation - -```bash -pnpm install @elizaos/plugin-b2 -``` - -## Configuration - -The plugin requires the following environment variable: - -```typescript -B2_PRIVATE_KEY= -``` - -## Features - -### 1. Token Transfers - -- Send native B2-BTC and ERC20 tokens -- Support for multiple token standards -- Built-in address validation - -## Supported Tokens - -```typescript -const TOKENS = { - "B2-BTC": "0x0000000000000000000000000000000000000000", - uBTC: "0x796e4D53067FF374B89b2Ac101ce0c1f72ccaAc2", - USDC: "0xE544e8a38aDD9B1ABF21922090445Ba93f74B9E5", - USDT: "0x681202351a488040Fa4FdCc24188AfB582c9DD62", - // ... and more -}; -``` - -## Usage Examples - -### Token Transfer - -```typescript -// Send B2-BTC -"Send 1 B2-BTC to 0x4f9e2dc50B4Cd632CC2D24edaBa3Da2a9338832a"; - -// Send ERC20 -"Transfer 100 USDC to [address]"; -``` - -## Providers - -### 1. Wallet Provider - -- Displays wallet balances -- Real-time balance updates - -### 2. Tokens Provider - -- Lists supported tokens -- Shows token addresses - -## Development - -1. Clone the repository -2. Install dependencies: -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run linting: - -```bash -pnpm run lint -``` - -## Dependencies - -- viem: ^2.21.49 -- @elizaos/core: workspace:\* - -## Future Enhancements - -1. **Advanced DeFi Operations** - - - Multi-hop yield strategies - - Auto-compounding features - - Yield optimization algorithms - - Risk assessment tools - - Portfolio rebalancing automation - - Cross-chain yield farming - -2. **Enhanced Token Management** - - - Batch token operations - - Advanced token creation templates - - Token migration tools - - Automated token listing - - Token analytics dashboard - - Custom tokenomics implementation - -3. **YAK Protocol Integration** - - - Advanced routing algorithms - - MEV protection features - - Gas optimization strategies - - Liquidity analysis tools - - Price impact predictions - - Custom trading strategies - -4. **Benqi Protocol Features** - - - Collateral optimization - - Liquidation protection - - Interest rate monitoring - - Position management tools - - Risk assessment dashboard - - Auto-repayment features - -5. **Token Mill Improvements** - - - Advanced token customization - - Automated market making - - Token distribution tools - - Vesting schedule management - - Governance token features - - Token upgrade mechanisms - -6. **Security Enhancements** - - - Transaction simulation - - Smart contract auditing tools - - Real-time monitoring - - Automated safety checks - - Emergency shutdown features - - Multi-signature support - -7. **Developer Tools** - - - Enhanced debugging capabilities - - Testing framework improvements - - Documentation generator - - CLI tools for common operations - - Integration templates - - Performance monitoring - -8. **Analytics and Reporting** - - Portfolio tracking - - Performance metrics - - Gas usage optimization - - Transaction history analysis - - Yield comparison tools - - Risk assessment reports - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-b2/biome.json b/packages/plugin-b2/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-b2/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-b2/package.json b/packages/plugin-b2/package.json deleted file mode 100644 index fd4f0b9495f9d..0000000000000 --- a/packages/plugin-b2/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/plugin-b2", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "tsup": "8.3.5" - }, - "devDependencies": { - "@biomejs/biome": "1.5.3", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup src/index.ts --format esm --no-dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "format": "biome format . --write", - "check": "biome check --apply ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-b2/src/actions/stake.ts b/packages/plugin-b2/src/actions/stake.ts deleted file mode 100644 index 1d605c874ed71..0000000000000 --- a/packages/plugin-b2/src/actions/stake.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - generateObjectDeprecated, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - ModelClass, -} from "@elizaos/core"; -import { getTxReceipt, depositBTC } from "../utils"; -import type { Hash } from "viem"; -import { validateB2NetworkConfig } from "../environment"; -import { stakeTemplate } from "../templates"; -import type { WalletProvider } from "../providers"; -import type { StakeParams } from "../types"; -import { initWalletProvider } from "../providers"; -import { FARM_ADDRESS } from "../utils/constants"; - -// Exported for tests -export class StakeAction { - - constructor(private walletProvider: WalletProvider) {} - - async stake(params: StakeParams): Promise { - try { - const balance = await this.walletProvider.getNativeBalance(this.walletProvider.getAddress()); - if ( balance == BigInt(0) ) { - throw new Error(`The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account.`); - } - const txHash = await depositBTC( - this.walletProvider, - FARM_ADDRESS, - params.amount, - ); - return txHash; - } catch(error) { - elizaLogger.error(`Stake failed: ${error.message}`); - throw new Error(`Stake failed: ${error.message}`); - } - } - - async txReceipt(tx: Hash) { - const receipt = await getTxReceipt(this.walletProvider, tx); - if (receipt.status === "success") { - return true; - } else { - return false; - } - } - - async buildStakeDetails( - state: State, - runtime: IAgentRuntime, - ): Promise { - const context = composeContext({ - state, - template: stakeTemplate, - }); - - const stakeDetails = (await generateObjectDeprecated({ - runtime, - context, - modelClass: ModelClass.SMALL, - })) as StakeParams; - - return stakeDetails; - } -} - -export const stakeAction: Action = { - name: "STAKE", - similes: [ - "STAKE_BTC_ON_B2", - "STAKE_NATIVE_BTC_ON_B2", - "DEPOSIT_BTC_ON_B2", - "DEPOSIT_NATIVE_BTC_ON_B2", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateB2NetworkConfig(runtime); - return true; - }, - description: - "stake B2-BTC.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.debug("Starting STAKE handler..."); - - // Initialize or update state - if (!state) { - state = (await runtime.composeState(message)) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - - elizaLogger.debug("stake action handler called"); - const walletProvider = await initWalletProvider(runtime); - const action = new StakeAction(walletProvider); - - // Compose stake context - const paramOptions = await action.buildStakeDetails( - state, - runtime, - ); - - elizaLogger.debug("Stake paramOptions:", paramOptions); - - const txHash = await action.stake(paramOptions); - if (txHash) { - const result = await action.txReceipt(txHash); - if (result) { - callback?.({ - text: "stake successful", - content: { success: true, txHash: txHash }, - }); - } else { - callback?.({ - text: "stake failed", - content: { error: "Stake failed" }, - }); - } - } else { - callback?.({ - text: "stake failed", - content: { error: "Stake failed" }, - }); - } - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Stake 1 B2-BTC", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-b2/src/actions/transfer.ts b/packages/plugin-b2/src/actions/transfer.ts deleted file mode 100644 index 27a1f5bce7a61..0000000000000 --- a/packages/plugin-b2/src/actions/transfer.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - generateObjectDeprecated, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - ModelClass, -} from "@elizaos/core"; -import { getTxReceipt, sendNativeAsset, sendToken } from "../utils"; -import type { Address, Hash } from "viem"; -import { validateB2NetworkConfig } from "../environment"; -import { transferTemplate } from "../templates"; -import type { WalletProvider } from "../providers"; -import type { Transaction, TransferParams } from "../types"; -import { initWalletProvider } from "../providers"; -import { TOKEN_ADDRESSES } from "../utils/constants" -// Exported for tests -export class TransferAction { - - constructor(private walletProvider: WalletProvider) {} - - async transfer(params: TransferParams): Promise { - try { - let txHash: Hash; - if (params.tokenAddress === TOKEN_ADDRESSES["B2-BTC"]) { - txHash = await sendNativeAsset( - this.walletProvider, - params.recipient as Address, - params.amount as number - ); - } else { - txHash = await sendToken( - this.walletProvider, - params.tokenAddress as Address, - params.recipient as Address, - params.amount as number - ); - } - return { - hash: txHash, - from: this.walletProvider.getAddress(), - tokenAddress: params.tokenAddress, - recipient: params.recipient, - amount: params.amount, - }; - } catch(error) { - elizaLogger.error(`Transfer failed: ${error.message}`); - throw new Error(`Transfer failed: ${error.message}`); - } - } - - async txReceipt(tx: Hash) { - const receipt = await getTxReceipt(this.walletProvider, tx); - if (receipt.status === "success") { - return true; - } else { - return false; - } - } - - async buildTransferDetails( - state: State, - runtime: IAgentRuntime, - ): Promise { - const context = composeContext({ - state, - template: transferTemplate, - }); - - const transferDetails = (await generateObjectDeprecated({ - runtime, - context, - modelClass: ModelClass.SMALL, - })) as TransferParams; - - return transferDetails; - } -} - -export const transferAction: Action = { - name: "SEND_TOKEN", - similes: [ - "TRANSFER_TOKEN_ON_B2", - "TRANSFER_TOKENS_ON_B2", - "SEND_TOKENS_ON_B2", - "SEND_B2BTC_ON_B2", - "PAY_ON_B2", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateB2NetworkConfig(runtime); - return true; - }, - description: - "MUST use this action if the user requests send a token or transfer a token, the request might be varied, but it will always be a token transfer.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.debug("Starting SEND_TOKEN handler..."); - - // Initialize or update state - if (!state) { - state = (await runtime.composeState(message)) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - - elizaLogger.debug("Transfer action handler called"); - const walletProvider = await initWalletProvider(runtime); - const action = new TransferAction(walletProvider); - - // Compose transfer context - const paramOptions = await action.buildTransferDetails( - state, - runtime, - ); - - elizaLogger.debug("Transfer paramOptions:", paramOptions); - - const tx = await action.transfer(paramOptions); - if (tx) { - const result = await action.txReceipt(tx.hash); - if (result) { - callback?.({ - text: "transfer successful", - content: { success: true, txHash: tx.hash }, - }); - } else { - callback?.({ - text: "transfer failed", - content: { error: "Transfer failed" }, - }); - } - } else { - callback?.({ - text: "transfer failed", - content: { error: "Transfer failed" }, - }); - } - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 1 B2-BTC to 0x4f9e2dc50B4Cd632CC2D24edaBa3Da2a9338832a", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-b2/src/actions/unstake.ts b/packages/plugin-b2/src/actions/unstake.ts deleted file mode 100644 index 10876f775917b..0000000000000 --- a/packages/plugin-b2/src/actions/unstake.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - generateObjectDeprecated, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - ModelClass, -} from "@elizaos/core"; -import { getTxReceipt, unstake } from "../utils"; -import type { Hash } from "viem"; -import { validateB2NetworkConfig } from "../environment"; -import { unstakeTemplate } from "../templates"; -import type { WalletProvider } from "../providers"; -import type { UnstakeParams } from "../types"; -import { initWalletProvider } from "../providers"; -import { FARM_ADDRESS } from "../utils/constants"; - -// Exported for tests -export class UnstakeAction { - - constructor(private walletProvider: WalletProvider) {} - - async unstake(params: UnstakeParams): Promise { - try { - const balance = await this.walletProvider.getNativeBalance(this.walletProvider.getAddress()); - if ( balance == BigInt(0) ) { - throw new Error(`The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account.`); - } - const txHash = await unstake( - this.walletProvider, - FARM_ADDRESS, - params.amount, - ); - return txHash; - } catch(error) { - elizaLogger.error(`Unstake failed: ${error.message}`); - throw new Error(`Unstake failed: ${error.message}`); - } - } - - async txReceipt(tx: Hash) { - const receipt = await getTxReceipt(this.walletProvider, tx); - if (receipt.status === "success") { - return true; - } else { - return false; - } - } - - async buildUnstakeDetails( - state: State, - runtime: IAgentRuntime, - ): Promise { - const context = composeContext({ - state, - template: unstakeTemplate, - }); - - const unstakeDetails = (await generateObjectDeprecated({ - runtime, - context, - modelClass: ModelClass.SMALL, - })) as UnstakeParams; - - return unstakeDetails; - } -} - -export const unstakeAction: Action = { - name: "UNSTAKE", - similes: [ - "UNSTAKE_BTC_ON_B2", - "UNSTAKE_NATIVE_BTC_ON_B2", - "UNSTAKE_BTC_ON_B2", - "UNSTAKE_NATIVE_BTC_ON_B2", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateB2NetworkConfig(runtime); - return true; - }, - description: - "unstake B2-BTC.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.debug("Starting UNSTAKE handler..."); - - // Initialize or update state - if (!state) { - state = (await runtime.composeState(message)) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - - elizaLogger.debug("unstake action handler called"); - const walletProvider = await initWalletProvider(runtime); - const action = new UnstakeAction(walletProvider); - - // Compose unstake context - const paramOptions = await action.buildUnstakeDetails( - state, - runtime, - ); - - elizaLogger.debug("Unstake paramOptions:", paramOptions); - - const txHash = await action.unstake(paramOptions); - if (txHash) { - const result = await action.txReceipt(txHash); - if (result) { - callback?.({ - text: "unstake successful", - content: { success: true, txHash: txHash }, - }); - } else { - callback?.({ - text: "unstake failed", - content: { error: "Unstake failed" }, - }); - } - } else { - callback?.({ - text: "unstake failed", - content: { error: "Unstake failed" }, - }); - } - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Unstake 1 B2-BTC", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-b2/src/actions/withdraw.ts b/packages/plugin-b2/src/actions/withdraw.ts deleted file mode 100644 index e8b0ccc064b03..0000000000000 --- a/packages/plugin-b2/src/actions/withdraw.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - generateObjectDeprecated, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, - ModelClass, -} from "@elizaos/core"; -import { getTxReceipt, withdraw } from "../utils"; -import type { Hash } from "viem"; -import { validateB2NetworkConfig } from "../environment"; -import { withdrawTemplate } from "../templates"; -import type { WalletProvider } from "../providers"; -import type { WithdrawParams } from "../types"; -import { initWalletProvider } from "../providers"; -import { FARM_ADDRESS } from "../utils/constants"; - -// Exported for tests -export class WithdrawAction { - - constructor(private walletProvider: WalletProvider) {} - - async withdraw(_params: WithdrawParams): Promise { - try { - const balance = await this.walletProvider.getNativeBalance(this.walletProvider.getAddress()); - if ( balance === BigInt(0) ) { - throw new Error("The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account."); - } - const txHash = await withdraw( - this.walletProvider, - FARM_ADDRESS, - ); - return txHash; - } catch(error) { - elizaLogger.log(`Withdraw failed: ${error.message}`); - throw new Error(`Withdraw failed: ${error.message}`); - } - } - - async txReceipt(tx: Hash) { - const receipt = await getTxReceipt(this.walletProvider, tx); - if (receipt.status === "success") { - return true; - } - return false; - } - - async buildWithdrawDetails( - state: State, - runtime: IAgentRuntime, - ): Promise { - const context = composeContext({ - state, - template: withdrawTemplate, - }); - - const withdrawDetails = (await generateObjectDeprecated({ - runtime, - context, - modelClass: ModelClass.SMALL, - })) as WithdrawParams; - - return withdrawDetails; - } -} - -export const withdrawAction: Action = { - name: "WITHDRAW", - similes: [ - "WITHDRAW_BTC_ON_B2", - "WITHDRAW_NATIVE_BTC_ON_B2", - "WITHDRAW_BTC_ON_B2", - "WITHDRAW_NATIVE_BTC_ON_B2", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateB2NetworkConfig(runtime); - return true; - }, - description: - "withdraw B2-BTC.", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - elizaLogger.debug("Starting WITHDRAW handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - elizaLogger.debug("withdraw action handler called"); - const walletProvider = await initWalletProvider(runtime); - const action = new WithdrawAction(walletProvider); - - // Compose withdraw context - const paramOptions = await action.buildWithdrawDetails( - currentState, - runtime, - ); - elizaLogger.debug("Withdraw paramOptions:", paramOptions); - - const txHash = await action.withdraw(paramOptions); - if (txHash) { - const result = await action.txReceipt(txHash); - if (result) { - callback?.({ - text: "withdraw successful", - content: { success: true, txHash: txHash }, - }); - } else { - callback?.({ - text: "withdraw failed", - content: { error: "Withdraw failed" }, - }); - } - } else { - callback?.({ - text: "withdraw failed", - content: { error: "Withdraw failed" }, - }); - } - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Withdraw B2-BTC", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-b2/src/environment.ts b/packages/plugin-b2/src/environment.ts deleted file mode 100644 index b6d4cb50d9030..0000000000000 --- a/packages/plugin-b2/src/environment.ts +++ /dev/null @@ -1,31 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const b2NetworkEnvSchema = z.object({ - B2_PRIVATE_KEY: z - .string() - .min(1, "b2 network private key is required"), -}); - -export type b2NetworkConfig = z.infer; -export async function validateB2NetworkConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - B2_PRIVATE_KEY: - runtime.getSetting("B2_PRIVATE_KEY") || - process.env.B2_PRIVATE_KEY, - }; - - return b2NetworkEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error(errorMessages); - } - throw error; - } -} diff --git a/packages/plugin-b2/src/index.ts b/packages/plugin-b2/src/index.ts deleted file mode 100644 index a5d670d8f03b9..0000000000000 --- a/packages/plugin-b2/src/index.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { transferAction } from "./actions/transfer"; -import { stakeAction } from "./actions/stake"; -import { unstakeAction } from "./actions/unstake"; -import { withdrawAction } from "./actions/withdraw"; -import { walletProvider } from "./providers"; - -export const b2Plugin: Plugin = { - name: "b2", - description: "B2 Network Plugin for Eliza", - actions: [transferAction, stakeAction, unstakeAction, withdrawAction], - providers: [walletProvider], - evaluators: [], - services: [], - clients: [], -}; - -export default b2Plugin; diff --git a/packages/plugin-b2/src/providers/index.ts b/packages/plugin-b2/src/providers/index.ts deleted file mode 100644 index 6293c760b8e03..0000000000000 --- a/packages/plugin-b2/src/providers/index.ts +++ /dev/null @@ -1,226 +0,0 @@ -import { - type IAgentRuntime, - type Memory, - type Provider, - type State, - elizaLogger, -} from "@elizaos/core"; -import { privateKeyToAccount } from "viem/accounts"; -import { - formatUnits, - type Address, - type Chain, - type Account, - type WalletClient, - type PrivateKeyAccount, - type PublicClient, - type HttpTransport, - http, - createPublicClient, - createWalletClient, -} from "viem"; -import { TOKEN_ADDRESSES } from "../utils/constants"; -import { b2Network } from "../utils/chains"; - -export class WalletProvider implements Provider { - private account: PrivateKeyAccount; - - constructor(accountOrPrivateKey: PrivateKeyAccount | `0x${string}`) { - this.setAccount(accountOrPrivateKey); - } - - private setAccount = ( - accountOrPrivateKey: PrivateKeyAccount | `0x${string}` - ) => { - if (typeof accountOrPrivateKey === "string") { - this.account = privateKeyToAccount(accountOrPrivateKey); - } else { - this.account = accountOrPrivateKey; - } - }; - - async getNativeBalance ( - owner: Address - ) { - const publicClient = this.getPublicClient(); - const balance = await publicClient.getBalance({ - address: owner, - }); - return balance; - }; - - async getTokenBalance ( - tokenAddress: Address, - owner: Address - ) { - if (tokenAddress === TOKEN_ADDRESSES["B2-BTC"]) { - return this.getNativeBalance(owner); - } - const publicClient = this.getPublicClient(); - const balance = await publicClient.readContract({ - address: tokenAddress, - abi: [ - { - inputs: [ - { - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "balanceOf", - outputs: [ - { internalType: "uint256", name: "", type: "uint256" }, - ], - stateMutability: "view", - type: "function", - }, - ], - functionName: "balanceOf", - args: [owner], - }); - return balance; - }; - - getAccount(): Account { - return this.account; - } - - getAddress(): Address { - return this.account.address; - } - - - // Refactor area - getPublicClient(): PublicClient { - const transport = http(b2Network.rpcUrls.default.http[0]); - return createPublicClient({ - chain: b2Network, - transport, - }) as PublicClient; - } - - getWalletClient(): WalletClient { - const transport = http(b2Network.rpcUrls.default.http[0]); - const walletClient = createWalletClient({ - chain: b2Network, - transport, - account: this.account, - }); - return walletClient; - } - - async getDecimals(tokenAddress: Address) { - if (tokenAddress === TOKEN_ADDRESSES["B2-BTC"]) { - return b2Network.nativeCurrency.decimals; - } - const publicClient = this.getPublicClient(); - const decimals = await publicClient.readContract({ - address: tokenAddress, - abi: [ - { - inputs: [], - name: "decimals", - outputs: [{ internalType: "uint8", name: "", type: "uint8" }], - stateMutability: "view", - type: "function", - }, - ], - functionName: "decimals", - }); - return decimals; - } - - async get( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - elizaLogger.debug("walletProvider::get"); - try { - const privateKey = runtime.getSetting("B2_PRIVATE_KEY") as `0x${string}`; - if (!privateKey) { - throw new Error( - "B2_PRIVATE_KEY not found in environment variables" - ); - } - let accountAddress: Address; - if (this.account) { - accountAddress = this.getAddress(); - } else { - const walletProvider = await initWalletProvider(runtime); - accountAddress = walletProvider.getAddress(); - } - - let output = "# Wallet Balances\n\n"; - output += "## Wallet Address\n\n"; - output += `${accountAddress}\n\n`; - - output += "## Latest Token Balances\n\n"; - for (const [token, address] of Object.entries(TOKEN_ADDRESSES)) { - const decimals = await this.getDecimals(address); - const balance = await this.getTokenBalance( - address, - accountAddress, - ); - output += `${token}: ${formatUnits(balance, decimals)}\n`; - } - output += "Note: These balances can be used at any time.\n\n"; - elizaLogger.debug("walletProvider::get output:", output); - return output; - } catch (error) { - elizaLogger.error("Error in b2 wallet provider:", error); - return null; - } - } - -}; - -export const initWalletProvider = async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("B2_PRIVATE_KEY"); - if (!privateKey) { - throw new Error( - "B2_PRIVATE_KEY not found in environment variables" - ); - } - return new WalletProvider(privateKey as `0x${string}`); -}; - -export const walletProvider: Provider = { - async get( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - elizaLogger.debug("walletProvider::get"); - const privateKey = runtime.getSetting("B2_PRIVATE_KEY"); - if (!privateKey) { - throw new Error( - "B2_PRIVATE_KEY not found in environment variables" - ); - } - try { - const walletProvider = await initWalletProvider(runtime); - const account = walletProvider.getAccount(); - let output = "# Wallet Balances\n\n"; - output += "## Wallet Address\n\n"; - output += `${account.address}\n\n`; - - output += "## Latest Token Balances\n\n"; - for (const [token, address] of Object.entries(TOKEN_ADDRESSES)) { - const decimals = await walletProvider.getDecimals(address); - const balance = await walletProvider.getTokenBalance( - address, - account.address - ); - output += `${token}: ${formatUnits(balance, decimals)}\n`; - } - output += "Note: These balances can be used at any time.\n\n"; - elizaLogger.debug("walletProvider::get output:", output); - return output; - } catch (error) { - elizaLogger.error("Error in b2 wallet provider:", error); - return null; - } - } -}; \ No newline at end of file diff --git a/packages/plugin-b2/src/templates/index.ts b/packages/plugin-b2/src/templates/index.ts deleted file mode 100644 index 238a506a12298..0000000000000 --- a/packages/plugin-b2/src/templates/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { transferTemplate } from "./transfer"; -import { stakeTemplate } from "./stake"; -import { unstakeTemplate } from "./unstake"; -import {withdrawTemplate} from "./withdraw" - -export { - transferTemplate, - stakeTemplate, - unstakeTemplate, - withdrawTemplate -}; \ No newline at end of file diff --git a/packages/plugin-b2/src/templates/stake.ts b/packages/plugin-b2/src/templates/stake.ts deleted file mode 100644 index 1bb0fb9bbadf3..0000000000000 --- a/packages/plugin-b2/src/templates/stake.ts +++ /dev/null @@ -1,17 +0,0 @@ -export const stakeTemplate = `Respond with a JSON markdown block containing only the extracted values - -Example response for a 10 B2-BTC stake: -\`\`\`json -{ - "amount": "10" -} -\`\`\` - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested stake: -- Amount to stake - -Respond with a JSON markdown block containing only the extracted values.`; \ No newline at end of file diff --git a/packages/plugin-b2/src/templates/transfer.ts b/packages/plugin-b2/src/templates/transfer.ts deleted file mode 100644 index d9e23698940ee..0000000000000 --- a/packages/plugin-b2/src/templates/transfer.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { TOKEN_ADDRESSES } from "../utils/constants"; - -export const transferTemplate = `Respond with a JSON markdown block containing only the extracted values -- Use null for any values that cannot be determined. -- Use address zero for native B2-BTC transfers. - -Example response for a 10 uBTC transfer: -\`\`\`json -{ - "tokenAddress": "0x796e4D53067FF374B89b2Ac101ce0c1f72ccaAc2", - "recipient": "0x4f9e2dc50B4Cd632CC2D24edaBa3Da2a9338832a", - "amount": "10" -} -\`\`\` - -Example response for a 0.1 B2-BTC transfer: -\`\`\`json -{ - "tokenAddress": "0x0000000000000000000000000000000000000000", - "recipient": "0x4f9e2dc50B4Cd632CC2D24edaBa3Da2a9338832a", - "amount": "0.1" -} -\`\`\` - -## Token Addresses - -${Object.entries(TOKEN_ADDRESSES) - .map(([key, value]) => `- ${key}: ${value}`) - .join("\n")} - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested token transfer: -- Token contract address -- Recipient wallet address -- Amount to transfer - -Respond with a JSON markdown block containing only the extracted values.`; \ No newline at end of file diff --git a/packages/plugin-b2/src/templates/unstake.ts b/packages/plugin-b2/src/templates/unstake.ts deleted file mode 100644 index 35f2ee0418e20..0000000000000 --- a/packages/plugin-b2/src/templates/unstake.ts +++ /dev/null @@ -1,18 +0,0 @@ -export const unstakeTemplate = `Respond with a JSON markdown block containing only the extracted values -- Use null for any values that cannot be determined. - -Example response for a 5 B2-BTC unstake: -\`\`\`json -{ - "amount": "5" -} -\`\`\` - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested unstake: -- Amount to unstake - -Respond with a JSON markdown block containing only the extracted values.`; diff --git a/packages/plugin-b2/src/templates/withdraw.ts b/packages/plugin-b2/src/templates/withdraw.ts deleted file mode 100644 index 751df636ef48c..0000000000000 --- a/packages/plugin-b2/src/templates/withdraw.ts +++ /dev/null @@ -1,15 +0,0 @@ -export const withdrawTemplate = `Respond with a JSON markdown block containing only the extracted values -- This action does not require any parameters. - -Example response for a withdraw request: -\`\`\`json -{} -\`\`\` - -## Recent Messages - -{{recentMessages}} - -Given the recent messages, confirm the request for withdrawal. - -Respond with a JSON markdown block containing only an empty object.`; diff --git a/packages/plugin-b2/src/tests/stake.test.ts b/packages/plugin-b2/src/tests/stake.test.ts deleted file mode 100644 index 4d9523adf1563..0000000000000 --- a/packages/plugin-b2/src/tests/stake.test.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { describe, it, expect, beforeEach } from "vitest"; -import { generatePrivateKey } from "viem/accounts"; - -import { StakeAction } from "../actions/stake"; -import { WalletProvider } from "../providers"; -import type { StakeParams } from "../types"; - -describe("Stake Action", () => { - let wp: WalletProvider; - - beforeEach(async () => { - const pk = generatePrivateKey(); - wp = new WalletProvider(pk); - }); - describe("Constructor", () => { - it("should initialize with stake action", () => { - const sa = new StakeAction(wp); - expect(sa).toBeDefined(); - }); - }); - describe("Stake", () => { - let sa: StakeAction; - beforeEach(() => { - sa = new StakeAction(wp); - expect(sa).toBeDefined(); - }); - it("should initialize with stake action", () => { - const sa = new StakeAction(wp); - expect(sa).toBeDefined(); - }); - - it("throws if not enough gas", async () => { - const params = { - amount: "1", - } as StakeParams; - await expect( - sa.stake(params) - ).rejects.toThrow( - "Stake failed: The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account." - ); - }); - - }); -}); diff --git a/packages/plugin-b2/src/tests/transfer.test.ts b/packages/plugin-b2/src/tests/transfer.test.ts deleted file mode 100644 index e200a68057bf5..0000000000000 --- a/packages/plugin-b2/src/tests/transfer.test.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { describe, it, expect, beforeEach } from "vitest"; -import { generatePrivateKey } from "viem/accounts"; -import { getEnvVariable } from "@elizaos/core"; - -import { TransferAction } from "../actions/transfer"; -import { WalletProvider } from "../providers"; -import type { TransferParams } from "../types"; -import { TOKEN_ADDRESSES } from "../utils/constants"; - -describe("Transfer Action", () => { - let wp: WalletProvider; - let wp1: WalletProvider; - - beforeEach(async () => { - const pk = generatePrivateKey(); - const pk1 = getEnvVariable("ARTHERA_PRIVATE_KEY") as `0x${string}`; - wp = new WalletProvider(pk); - console.log(wp.getAddress()); - if (pk1) { - wp1 = new WalletProvider(pk1); - } - }); - describe("Constructor", () => { - it("should initialize with transfer action", () => { - const ta = new TransferAction(wp); - - expect(ta).toBeDefined(); - }); - }); - describe("Transfer", () => { - let ta: TransferAction; - let ta1: TransferAction; - let receiverAddress: `0x${string}`; - - beforeEach(() => { - ta = new TransferAction(wp); - if (wp1) { - ta1 = new TransferAction(wp1); - receiverAddress = wp1.getAddress(); - } - else { - receiverAddress = wp.getAddress(); - } - }); - - it("throws if not enough gas", async () => { - const params = { - tokenAddress: TOKEN_ADDRESSES["B2-BTC"], - recipient: receiverAddress, - amount: "1", - } as TransferParams; - await expect( - ta.transfer(params) - ).rejects.toThrow( - "Transfer failed: The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account." - ); - }); - - if (wp1) { - console.log("----------------------------------------------"); - it("transfers tokens", async () => { - const params = { - tokenAddress: TOKEN_ADDRESSES["B2-BTC"], - recipient: receiverAddress, - amount: "0.001", - } as TransferParams; - const tx = await ta1.transfer(params); - expect(tx).toBeDefined(); - expect(tx.from).toEqual(wp1.getAddress()); - expect(tx.recipient).toEqual(receiverAddress); - expect(tx.amount).toEqual(1000000000000000n); - }); - } - }); -}); diff --git a/packages/plugin-b2/src/tests/unstake.test.ts b/packages/plugin-b2/src/tests/unstake.test.ts deleted file mode 100644 index 774f31dcd41e0..0000000000000 --- a/packages/plugin-b2/src/tests/unstake.test.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { describe, it, expect, beforeEach } from "vitest"; -import { generatePrivateKey } from "viem/accounts"; - -import { UnstakeAction } from "../actions/unstake"; -import { WalletProvider } from "../providers"; -import type { UnstakeParams } from "../types"; - -describe("Unstake Action", () => { - let wp: WalletProvider; - - beforeEach(async () => { - const pk = generatePrivateKey(); - wp = new WalletProvider(pk); - }); - describe("Constructor", () => { - it("should initialize with unstake action", () => { - const ua = new UnstakeAction(wp); - expect(ua).toBeDefined(); - }); - }); - describe("Unstake", () => { - let ua: UnstakeAction; - - beforeEach(() => { - ua = new UnstakeAction(wp); - expect(ua).toBeDefined(); - }); - it("should initialize with unstake action", () => { - const ua = new UnstakeAction(wp); - expect(ua).toBeDefined(); - }); - - it("throws if not enough gas", async () => { - const params = { - amount: "1", - } as UnstakeParams; - await expect( - ua.unstake(params) - ).rejects.toThrow( - "Unstake failed: The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account." - ); - }); - }); -}); diff --git a/packages/plugin-b2/src/tests/wallet.test.ts b/packages/plugin-b2/src/tests/wallet.test.ts deleted file mode 100644 index b597b26c958b0..0000000000000 --- a/packages/plugin-b2/src/tests/wallet.test.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; -import { initWalletProvider, WalletProvider } from "../providers"; -import { generatePrivateKey } from "viem/accounts"; - -describe("B2 Network Wallet Provider", () => { - //let walletProvider: WalletProvider; - let mockRuntime; - - beforeEach(() => { - vi.clearAllMocks(); - const pk = generatePrivateKey(); - //walletProvider = new WalletProvider(pk); - mockRuntime = { - getSetting: vi.fn(), - }; - mockRuntime.getSetting.mockImplementation((key: string) => { - const settings = { - B2_PRIVATE_KEY: pk, - }; - return settings[key]; - }); - }); - - afterEach(() => { - vi.clearAllTimers(); - }); - - describe("Constructor", () => { - it("new wallet provider", () => { - const pk = generatePrivateKey(); - const ta = new WalletProvider(pk); - expect(ta).toBeDefined(); - }); - it("init wallet provider",async () => { - const ta = await initWalletProvider(mockRuntime); - expect(ta).toBeDefined(); - }); - }); -}); diff --git a/packages/plugin-b2/src/tests/withdraw.test.ts b/packages/plugin-b2/src/tests/withdraw.test.ts deleted file mode 100644 index 98ba61f567b44..0000000000000 --- a/packages/plugin-b2/src/tests/withdraw.test.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { describe, it, expect, beforeEach } from "vitest"; -import { generatePrivateKey } from "viem/accounts"; -import { WithdrawAction } from "../actions/withdraw"; -import { WalletProvider } from "../providers"; -import type { WithdrawParams } from "../types"; - -describe("Withdraw Action", () => { - let wp: WalletProvider; - - beforeEach(async () => { - const pk = generatePrivateKey(); - wp = new WalletProvider(pk); - }); - describe("Constructor", () => { - it("should initialize with withdraw action", () => { - const wa = new WithdrawAction(wp); - expect(wa).toBeDefined(); - }); - }); - describe("Withdraw", () => { - let wa: WithdrawAction; - beforeEach(() => { - wa = new WithdrawAction(wp); - expect(wa).toBeDefined(); - }); - it("should initialize with withdraw action", () => { - wa = new WithdrawAction(wp); - expect(wa).toBeDefined(); - }); - it("throws if not enough gas", async () => { - const params = {} as WithdrawParams; - wa = new WithdrawAction(wp); - await expect( - wa.withdraw(params) - ).rejects.toThrow( - "Withdraw failed: The total cost (gas * gas fee + value) of executing this transaction exceeds the balance of the account." - ); - }); - - }); -}); diff --git a/packages/plugin-b2/src/types/index.ts b/packages/plugin-b2/src/types/index.ts deleted file mode 100644 index 9273b77877d4c..0000000000000 --- a/packages/plugin-b2/src/types/index.ts +++ /dev/null @@ -1,31 +0,0 @@ -import type { - Address, - Hash, -} from "viem"; - -export interface Transaction { - hash: Hash; - from: Address; - tokenAddress: string; - recipient: string; - amount: string | number; -} - -export interface TransferParams { - tokenAddress: string; - recipient: string; - amount: string | number; -} - -export interface StakeParams { - amount: string | number; -} - -export interface UnstakeParams { - amount: string | number; -} - -export interface WithdrawParams { - // Since withdraw doesn't need parameters based on the action implementation, - // we'll keep it as an empty interface for type safety and future extensibility -} diff --git a/packages/plugin-b2/src/utils/chains.ts b/packages/plugin-b2/src/utils/chains.ts deleted file mode 100644 index 0eec893a83532..0000000000000 --- a/packages/plugin-b2/src/utils/chains.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { defineChain } from 'viem' - -export const b2Network = defineChain({ - id: 223, - name: 'B2Network', - network: 'B2Network', - nativeCurrency: { - decimals: 18, - name: 'Bitcoin', - symbol: 'BTC', - }, - blockExplorers: { - default: { - name: 'B2Network', - url: 'https://explorer.bsquared.network/' - } - }, - rpcUrls: { - default: { - http: ['https://rpc.bsquared.network/'], - }, - public: { - http: ['https://rpc.bsquared.network/'], - }, - }, -}) \ No newline at end of file diff --git a/packages/plugin-b2/src/utils/constants.ts b/packages/plugin-b2/src/utils/constants.ts deleted file mode 100644 index 2aa2e63c2ba7d..0000000000000 --- a/packages/plugin-b2/src/utils/constants.ts +++ /dev/null @@ -1,15 +0,0 @@ -import type { Address } from "viem"; - -const TOKEN_ADDRESSES: Record = { - "B2-BTC": "0x0000000000000000000000000000000000000000", - uBTC: "0x796e4D53067FF374B89b2Ac101ce0c1f72ccaAc2", - USDC: "0xE544e8a38aDD9B1ABF21922090445Ba93f74B9E5", - USDT: "0x681202351a488040Fa4FdCc24188AfB582c9DD62", -}; - -const FARM_ADDRESS: Address = "0xd5B5f1CA0fa5636ac54b0a0007BA374A1513346e"; - -export { - TOKEN_ADDRESSES, - FARM_ADDRESS, -}; diff --git a/packages/plugin-b2/src/utils/index.ts b/packages/plugin-b2/src/utils/index.ts deleted file mode 100644 index 51805b02d7af4..0000000000000 --- a/packages/plugin-b2/src/utils/index.ts +++ /dev/null @@ -1,289 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import { - type Hash, - type Address, - parseUnits, - encodeFunctionData, -} from "viem"; -import { b2Network } from "./chains"; -import type { WalletProvider } from "../providers"; -import { TOKEN_ADDRESSES } from "./constants"; - -export const getTxReceipt = async (walletProvider: WalletProvider, tx: Hash) => { - const publicClient = walletProvider.getPublicClient(); - const receipt = await publicClient.waitForTransactionReceipt({ - hash: tx, - }); - return receipt; -}; - -export const sendNativeAsset = async ( - walletProvider: WalletProvider, - recipient: Address, - amount: number -) => { - const decimals = await walletProvider.getDecimals(TOKEN_ADDRESSES["B2-BTC"]); - const walletClient = walletProvider.getWalletClient(); - - const args = { - account: walletProvider.getAddress(), - to: recipient, - value: parseUnits(amount.toString(), decimals), - kzg: undefined, - chain: b2Network - }; - const tx = await walletClient.sendTransaction(args); - return tx as Hash; -}; - -export const sendToken = async ( - walletProvider: WalletProvider, - tokenAddress: Address, - recipient: Address, - amount: number -) => { - const decimals = await walletProvider.getDecimals(tokenAddress); - const publicClient = walletProvider.getPublicClient(); - try { - const { result, request } = await publicClient.simulateContract({ - account: walletProvider.getAccount(), - address: tokenAddress, - abi: [ - { - inputs: [ - { - internalType: "address", - name: "dst", - type: "address", - }, - { - internalType: "uint256", - name: "amount", - type: "uint256", - }, - ], - name: "transfer", - outputs: [ - { - internalType: "bool", - name: "", - type: "bool", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - ], - functionName: "transfer", - args: [recipient, parseUnits(amount.toString(), decimals)], - }); - - if (!result) { - throw new Error("Transfer failed"); - } - - elizaLogger.debug("Request:", request); - const walletClient = walletProvider.getWalletClient(); - const tx = await walletClient.writeContract(request); - elizaLogger.debug("Transaction:", tx); - return tx as Hash; - } catch (error) { - elizaLogger.error("Error simulating contract:", error); - return; - } -}; - -export const approve = async ( - walletProvider: WalletProvider, - tokenAddress: Address, - spender: Address, - amount: number -) => { - try { - const decimals = await walletProvider.getDecimals(tokenAddress); - const publicClient = walletProvider.getPublicClient(); - const { result, request } = await publicClient.simulateContract({ - account: walletProvider.getAccount(), - address: tokenAddress, - abi: [ - { - inputs: [ - { - internalType: "address", - name: "_spender", - type: "address", - }, - { - internalType: "uint256", - name: "_value", - type: "uint256", - }, - ], - name: "approve", - outputs: [ - { - internalType: "bool", - name: "", - type: "bool", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - ], - functionName: "approve", - args: [spender, parseUnits(amount.toString(), decimals)], - }); - - if (!result) { - throw new Error("Approve failed"); - } - elizaLogger.debug("Request:", request); - const walletClient = walletProvider.getWalletClient(); - const tx = await walletClient.writeContract(request); - elizaLogger.debug("Transaction:", tx); - return tx; - } catch (error) { - elizaLogger.error("Error approving:", error); - return; - } -}; - -export const depositBTC = async ( - walletProvider: WalletProvider, - farmAddress: Address, - amount: string | number -) => { - try { - const decimals = b2Network.nativeCurrency.decimals; - // const publicClient = walletProvider.getPublicClient(); - - const walletClient = walletProvider.getWalletClient(); - const data = encodeFunctionData({ - abi: [ - { - "inputs": [ - - ], - "name": "depositBTC", - "outputs": [ - - ], - "stateMutability": "payable", - "type": "function" - }, - ], - functionName: 'depositBTC', - args: [], - }); - - const args = { - account: walletProvider.getAddress(), - to: farmAddress, - data, - value: parseUnits(amount.toString(), decimals), - kzg: undefined, - chain: b2Network - }; - const txHash = await walletClient.sendTransaction(args); - - elizaLogger.debug("Transaction hash:", txHash); - return txHash; - } catch (error) { - elizaLogger.error("Error depositBTC:", error); - return; - } -}; - -// function unstake(uint256 _pid, uint256 _amount) public {} -export const unstake = async ( - walletProvider: WalletProvider, - farmAddress: Address, - amount: string | number -) => { - try { - const BTC_PID = 0; - const decimals = b2Network.nativeCurrency.decimals; - const publicClient = walletProvider.getPublicClient(); - const { request } = await publicClient.simulateContract({ - account: walletProvider.getAccount(), - address: farmAddress, - abi: [ - { - "inputs": [ - { - "internalType": "uint256", - "name": "_pid", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_amount", - "type": "uint256" - } - ], - "name": "unstake", - "outputs": [ - - ], - "stateMutability": "nonpayable", - "type": "function" - }, - ], - functionName: "unstake", - args: [BigInt(BTC_PID), parseUnits(amount.toString(), decimals)], - }); - elizaLogger.debug("Request:", request); - - const walletClient = walletProvider.getWalletClient(); - const tx = await walletClient.writeContract(request); - elizaLogger.debug("Transaction:", tx); - return tx; - } catch (error) { - elizaLogger.error("Error unstake:", error); - return; - } -}; - -// function withdraw(uint256 _pid) public {} -export const withdraw = async ( - walletProvider: WalletProvider, - farmAddress: Address, -) => { - try { - const BTC_PID = 0; - const publicClient = walletProvider.getPublicClient(); - const { request } = await publicClient.simulateContract({ - account: walletProvider.getAccount(), - address: farmAddress, - abi: [ - { - "inputs": [ - { - "internalType": "uint256", - "name": "_pid", - "type": "uint256" - } - ], - "name": "withdraw", - "outputs": [ - - ], - "stateMutability": "nonpayable", - "type": "function" - }, - ], - functionName: "withdraw", - args: [BigInt(BTC_PID)], - }); - elizaLogger.debug("Request:", request); - - const walletClient = walletProvider.getWalletClient(); - const tx = await walletClient.writeContract(request); - elizaLogger.debug("Transaction:", tx); - return tx; - } catch (error) { - elizaLogger.error("Error withdraw:", error); - return; - } -}; \ No newline at end of file diff --git a/packages/plugin-b2/tsconfig.json b/packages/plugin-b2/tsconfig.json deleted file mode 100644 index e9c2e9f852778..0000000000000 --- a/packages/plugin-b2/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": ["node"] - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-b2/tsup.config.ts b/packages/plugin-b2/tsup.config.ts deleted file mode 100644 index 1a96f24afa1eb..0000000000000 --- a/packages/plugin-b2/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-binance/README.md b/packages/plugin-binance/README.md deleted file mode 100644 index 6748933720228..0000000000000 --- a/packages/plugin-binance/README.md +++ /dev/null @@ -1,88 +0,0 @@ -# Binance Plugin for Eliza - -This plugin enables Eliza to interact with the Binance cryptocurrency exchange, providing capabilities for checking prices, executing trades, and managing spot wallet balances. - -## Features - -- 📊 Real-time cryptocurrency price checks -- 💱 Spot trading (market and limit orders) -- 💰 Wallet balance inquiries -- ✅ Comprehensive error handling -- 🔒 Secure API integration - -## Prerequisites - -1. **Binance Account**: You need a Binance account to use this plugin -2. **API Keys**: Generate API keys from your Binance account: - - Go to your Binance account settings - - Navigate to API Management - - Create a new API key - - Enable spot trading permissions - - Store your API key and secret securely - -## Configuration - -Set the following environment variables: - -```env -BINANCE_API_KEY=your_api_key -BINANCE_SECRET_KEY=your_secret_key -``` - -## Installation - -Add the plugin to your Eliza configuration: - -```json -{ - "plugins": ["@elizaos/plugin-binance"] -} -``` - -## Available Actions - -The plugin provides the following actions: - -1. **GET_PRICE**: Check cryptocurrency prices - - - Example: "What's the current price of Bitcoin?" - - Example: "Check ETH price in USDT" - -2. **EXECUTE_SPOT_TRADE**: Execute spot trades - - - Example: "Buy 0.1 BTC at market price" - - Example: "Sell 100 USDT worth of ETH" - -3. **GET_SPOT_BALANCE**: Check wallet balances - - Example: "What's my BTC balance?" - - Example: "Show all my wallet balances" - -## Important Notes - -1. **API Rate Limits**: Binance implements rate limiting: - - - 1200 requests per minute for most endpoints - - Some endpoints have specific weight limits - - The plugin handles rate limiting errors appropriately - -2. **Minimum Order Sizes**: Binance enforces minimum order sizes and notional values: - - - Minimum order size varies by trading pair - - Minimum notional value (quantity × price) must be met - - The plugin validates these requirements before order execution - -3. **Error Handling**: The plugin provides detailed error messages for: - - Invalid API credentials - - Insufficient balance - - Invalid trading pairs - - Minimum notional value not met - - Other API-specific errors - -## Service Architecture - -The plugin is organized into specialized services: - -- `PriceService`: Handles price-related operations -- `TradeService`: Manages trading operations -- `AccountService`: Handles balance and account operations -- `BaseService`: Provides common functionality diff --git a/packages/plugin-binance/__tests__/account.test.ts b/packages/plugin-binance/__tests__/account.test.ts deleted file mode 100644 index 7c9aed88ecd5e..0000000000000 --- a/packages/plugin-binance/__tests__/account.test.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { AccountService } from '../src/services/account'; -import { AuthenticationError } from '../src/types/internal/error'; - -// Mock the Binance client -const mockAccount = vi.fn(); -vi.mock('@binance/connector', () => ({ - Spot: vi.fn().mockImplementation(() => ({ - account: mockAccount - })) -})); - -describe('AccountService', () => { - let accountService: AccountService; - const mockApiKey = 'test-api-key'; - const mockSecretKey = 'test-secret-key'; - - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('initialization', () => { - it('should initialize with API credentials', () => { - accountService = new AccountService({ - apiKey: mockApiKey, - secretKey: mockSecretKey - }); - expect(accountService).toBeInstanceOf(AccountService); - }); - }); - - describe('getBalance', () => { - it('should throw AuthenticationError when credentials are missing', async () => { - accountService = new AccountService(); - await expect(accountService.getBalance({})) - .rejects - .toThrow(AuthenticationError); - }); - - it('should filter non-zero balances', async () => { - accountService = new AccountService({ - apiKey: mockApiKey, - secretKey: mockSecretKey - }); - - const mockAccountInfo = { - balances: [ - { asset: 'BTC', free: '1.0', locked: '0.0' }, - { asset: 'ETH', free: '0.0', locked: '0.0' }, - { asset: 'USDT', free: '100.0', locked: '50.0' } - ] - }; - - mockAccount.mockResolvedValueOnce({ data: mockAccountInfo }); - - const result = await accountService.getBalance({}); - expect(result.balances).toHaveLength(2); // Only BTC and USDT have non-zero balances - expect(result.balances).toEqual( - expect.arrayContaining([ - expect.objectContaining({ asset: 'BTC' }), - expect.objectContaining({ asset: 'USDT' }) - ]) - ); - }); - - it('should filter by asset when specified', async () => { - accountService = new AccountService({ - apiKey: mockApiKey, - secretKey: mockSecretKey - }); - - const mockAccountInfo = { - balances: [ - { asset: 'BTC', free: '1.0', locked: '0.0' }, - { asset: 'ETH', free: '2.0', locked: '0.0' }, - ] - }; - - mockAccount.mockResolvedValueOnce({ data: mockAccountInfo }); - - const result = await accountService.getBalance({ asset: 'BTC' }); - expect(result.balances).toHaveLength(1); - expect(result.balances[0]).toEqual( - expect.objectContaining({ - asset: 'BTC', - free: '1.0', - locked: '0.0' - }) - ); - }); - }); - - describe('checkBalance', () => { - it('should return true when balance is sufficient', async () => { - accountService = new AccountService({ - apiKey: mockApiKey, - secretKey: mockSecretKey - }); - - const mockAccountInfo = { - balances: [ - { asset: 'BTC', free: '1.0', locked: '0.0' } - ] - }; - - mockAccount.mockResolvedValueOnce({ data: mockAccountInfo }); - - const result = await accountService.checkBalance('BTC', 0.5); - expect(result).toBe(true); - }); - - it('should return false when balance is insufficient', async () => { - accountService = new AccountService({ - apiKey: mockApiKey, - secretKey: mockSecretKey - }); - - const mockAccountInfo = { - balances: [ - { asset: 'BTC', free: '0.1', locked: '0.0' } - ] - }; - - mockAccount.mockResolvedValueOnce({ data: mockAccountInfo }); - - const result = await accountService.checkBalance('BTC', 1.0); - expect(result).toBe(false); - }); - }); -}); diff --git a/packages/plugin-binance/__tests__/price.test.ts b/packages/plugin-binance/__tests__/price.test.ts deleted file mode 100644 index 73e306f9dc5e3..0000000000000 --- a/packages/plugin-binance/__tests__/price.test.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { PriceService } from '../src/services/price'; -import { BinanceError } from '../src/types/internal/error'; -import { ERROR_MESSAGES } from '../src/constants/errors'; - -// Mock elizaLogger -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - error: vi.fn() - } -})); - -describe('PriceService', () => { - let service: PriceService; - let mockClient: any; - - beforeEach(() => { - mockClient = { - tickerPrice: vi.fn() - }; - service = new PriceService(); - // @ts-ignore - we're mocking the client - service.client = mockClient; - }); - - describe('getPrice', () => { - const validRequest = { - symbol: 'BTC', - quoteCurrency: 'USDT' - }; - - it('should return price data for valid symbol', async () => { - const mockPrice = '42150.25'; - mockClient.tickerPrice.mockResolvedValueOnce({ - data: { price: mockPrice } - }); - - const result = await service.getPrice(validRequest); - - expect(mockClient.tickerPrice).toHaveBeenCalledWith('BTCUSDT'); - expect(result).toEqual({ - symbol: 'BTCUSDT', - price: mockPrice, - timestamp: expect.any(Number) - }); - }); - - it('should throw error for invalid symbol length', async () => { - const invalidRequest = { - symbol: 'B', // Too short - quoteCurrency: 'USDT' - }; - - await expect(service.getPrice(invalidRequest)) - .rejects - .toThrow(ERROR_MESSAGES.INVALID_SYMBOL); - }); - - it('should handle API errors', async () => { - const apiError = new Error('API Error'); - mockClient.tickerPrice.mockRejectedValueOnce(apiError); - - await expect(service.getPrice(validRequest)) - .rejects - .toBeInstanceOf(BinanceError); - }); - }); - - describe('formatPrice', () => { - it('should format string price correctly', () => { - expect(PriceService.formatPrice('42150.25')).toBe('42,150.25'); - expect(PriceService.formatPrice('0.00012345')).toBe('0.00012345'); - }); - - it('should format number price correctly', () => { - expect(PriceService.formatPrice(42150.25)).toBe('42,150.25'); - expect(PriceService.formatPrice(0.00012345)).toBe('0.00012345'); - }); - - it('should handle large numbers', () => { - expect(PriceService.formatPrice('1234567.89')).toBe('1,234,567.89'); - }); - - it('should handle small decimal numbers', () => { - expect(PriceService.formatPrice('0.00000001')).toBe('0.00000001'); - }); - }); -}); diff --git a/packages/plugin-binance/__tests__/trade.test.ts b/packages/plugin-binance/__tests__/trade.test.ts deleted file mode 100644 index a921efe3c1cd9..0000000000000 --- a/packages/plugin-binance/__tests__/trade.test.ts +++ /dev/null @@ -1,237 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { TradeService } from '../src/services/trade'; -import { AuthenticationError, InvalidSymbolError, MinNotionalError, ApiError } from '../src/types/internal/error'; -import { ORDER_TYPES, TIME_IN_FORCE } from '../src/constants/api'; - -// Mock the Binance client -const mockNewOrder = vi.fn(); -const mockExchangeInfo = vi.fn(); -vi.mock('@binance/connector', () => ({ - Spot: vi.fn().mockImplementation(() => ({ - newOrder: mockNewOrder, - exchangeInfo: mockExchangeInfo - })) -})); - -describe('TradeService', () => { - let tradeService: TradeService; - const mockApiKey = 'test-api-key'; - const mockSecretKey = 'test-secret-key'; - - beforeEach(() => { - vi.clearAllMocks(); - tradeService = new TradeService({ - apiKey: mockApiKey, - secretKey: mockSecretKey - }); - }); - - describe('initialization', () => { - it('should initialize with API credentials', () => { - expect(tradeService).toBeInstanceOf(TradeService); - }); - - it('should throw AuthenticationError when credentials are missing', async () => { - tradeService = new TradeService(); - await expect(tradeService.executeTrade({ - symbol: 'BTCUSDT', - side: 'BUY', - type: ORDER_TYPES.MARKET, - quantity: 1 - })).rejects.toThrow(AuthenticationError); - }); - }); - - describe('executeTrade', () => { - const mockSymbolInfo = { - symbol: 'BTCUSDT', - status: 'TRADING', - baseAsset: 'BTC', - quoteAsset: 'USDT', - filters: [ - { - filterType: 'NOTIONAL', - minNotional: '10.00000000' - } - ] - }; - - const mockExchangeInfoResponse = { - data: { - symbols: [mockSymbolInfo] - } - }; - - beforeEach(() => { - mockExchangeInfo.mockResolvedValue(mockExchangeInfoResponse); - }); - - it('should execute a market order successfully', async () => { - const mockOrderResponse = { - data: { - symbol: 'BTCUSDT', - orderId: 12345, - status: 'FILLED', - executedQty: '1.0', - cummulativeQuoteQty: '50000.0', - price: '50000.0', - type: ORDER_TYPES.MARKET, - side: 'BUY' - } - }; - - mockNewOrder.mockResolvedValueOnce(mockOrderResponse); - - const result = await tradeService.executeTrade({ - symbol: 'BTCUSDT', - side: 'BUY', - type: ORDER_TYPES.MARKET, - quantity: 1 - }); - - expect(result).toEqual({ - symbol: 'BTCUSDT', - orderId: 12345, - status: 'FILLED', - executedQty: '1.0', - cummulativeQuoteQty: '50000.0', - price: '50000.0', - type: ORDER_TYPES.MARKET, - side: 'BUY' - }); - - expect(mockNewOrder).toHaveBeenCalledWith( - 'BTCUSDT', - 'BUY', - ORDER_TYPES.MARKET, - expect.objectContaining({ - quantity: '1' - }) - ); - }); - - it('should execute a limit order successfully', async () => { - const mockOrderResponse = { - data: { - symbol: 'BTCUSDT', - orderId: 12345, - status: 'NEW', - executedQty: '0.0', - cummulativeQuoteQty: '0.0', - price: '50000.0', - type: ORDER_TYPES.LIMIT, - side: 'BUY' - } - }; - - mockNewOrder.mockResolvedValueOnce(mockOrderResponse); - - const result = await tradeService.executeTrade({ - symbol: 'BTCUSDT', - side: 'BUY', - type: ORDER_TYPES.LIMIT, - quantity: 1, - price: 50000, - timeInForce: TIME_IN_FORCE.GTC - }); - - expect(result).toEqual({ - symbol: 'BTCUSDT', - orderId: 12345, - status: 'NEW', - executedQty: '0.0', - cummulativeQuoteQty: '0.0', - price: '50000.0', - type: ORDER_TYPES.LIMIT, - side: 'BUY' - }); - - expect(mockNewOrder).toHaveBeenCalledWith( - 'BTCUSDT', - 'BUY', - ORDER_TYPES.LIMIT, - expect.objectContaining({ - quantity: '1', - price: '50000', - timeInForce: TIME_IN_FORCE.GTC - }) - ); - }); - - it('should throw error for invalid symbol', async () => { - mockExchangeInfo.mockResolvedValueOnce({ - data: { - symbols: [] // No symbols match - } - }); - - await expect(tradeService.executeTrade({ - symbol: 'INVALID', - side: 'BUY', - type: ORDER_TYPES.MARKET, - quantity: 1 - })).rejects.toThrow(InvalidSymbolError); - }); - - it('should throw error for insufficient notional value', async () => { - // Mock successful exchange info response first - mockExchangeInfo.mockResolvedValueOnce(mockExchangeInfoResponse); - - // Mock order response with error - mockNewOrder.mockRejectedValueOnce({ - response: { - data: { - code: -1013, - msg: 'Filter failure: NOTIONAL' - } - } - }); - - await expect(tradeService.executeTrade({ - symbol: 'BTCUSDT', - side: 'BUY', - type: ORDER_TYPES.MARKET, - quantity: 0.0001 // Very small amount - })).rejects.toThrow(MinNotionalError); - }); - - it('should throw error for limit order without price', async () => { - // Mock successful exchange info response - mockExchangeInfo.mockResolvedValueOnce(mockExchangeInfoResponse); - - await expect(tradeService.executeTrade({ - symbol: 'BTCUSDT', - side: 'BUY', - type: ORDER_TYPES.LIMIT, - quantity: 1 - // price is missing - })).rejects.toThrow('Price is required for LIMIT orders'); - }); - - it('should handle API timeout', async () => { - // Mock successful exchange info response first - mockExchangeInfo.mockResolvedValueOnce(mockExchangeInfoResponse); - - // Mock order request to timeout - mockNewOrder.mockImplementationOnce(() => - Promise.reject(new ApiError('Request timed out', -1001)) - ); - - const service = new TradeService({ - apiKey: 'test', - secretKey: 'test', - timeout: 100 // Lower timeout as we're mocking the error - }); - - await expect(() => service.executeTrade({ - symbol: 'BTCUSDT', - side: 'BUY', - type: ORDER_TYPES.MARKET, - quantity: 1 - })).rejects.toMatchObject({ - message: 'Request timed out', - code: -1001 - }); - }); - }); -}); diff --git a/packages/plugin-binance/biome.json b/packages/plugin-binance/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-binance/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-binance/package.json b/packages/plugin-binance/package.json deleted file mode 100644 index e40f1a5a89a77..0000000000000 --- a/packages/plugin-binance/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@elizaos/plugin-binance", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@binance/connector": "^3.6.0", - "@elizaos/core": "workspace:*" - }, - "devDependencies": { - "@types/node": "^20.0.0", - "tsup": "8.3.5", - "vite-tsconfig-paths": "^5.1.4", - "vitest": "^3.0.2" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest" - } -} diff --git a/packages/plugin-binance/src/actions/priceCheck.ts b/packages/plugin-binance/src/actions/priceCheck.ts deleted file mode 100644 index bf116f37c2476..0000000000000 --- a/packages/plugin-binance/src/actions/priceCheck.ts +++ /dev/null @@ -1,162 +0,0 @@ -import { - type ActionExample, - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import { BinanceService } from "../services"; - -const priceCheckTemplate = `Look at ONLY your LAST RESPONSE message in this conversation, where you just said which cryptocurrency price you would check. -Based on ONLY that last message, provide the trading symbol. - -For example: -- If your last message was "I'll check the current Ethereum price..." -> return "ETH" -- If your last message was "I'll check the current Solana price..." -> return "SOL" -- If your last message was "I'll check the current Bitcoin price..." -> return "BTC" - -\`\`\`json -{ - "symbol": "", - "quoteCurrency": "" -} -\`\`\` - -Last part of conversation: -{{recentMessages}}`; - -export const priceCheck: Action = { - name: "GET_PRICE", - similes: [ - "CHECK_PRICE", - "PRICE_CHECK", - "GET_CRYPTO_PRICE", - "CRYPTO_PRICE", - "CHECK_CRYPTO_PRICE", - "PRICE_LOOKUP", - "CURRENT_PRICE", - ], - description: "Get current price information for a cryptocurrency pair", - validate: async () => true, // Public endpoint - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ): Promise => { - try { - // Initialize or update state - let localState = state; - localState = !localState - ? await runtime.composeState(message) - : await runtime.updateRecentMessageState(localState); - - const context = composeContext({ - state: localState, - template: priceCheckTemplate, - }); - - const rawContent = await generateObjectDeprecated({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - if (!rawContent?.symbol) { - throw new Error( - "Could not determine which cryptocurrency to check" - ); - } - - // Ensure the content has the required shape - const content = { - symbol: rawContent.symbol.toString().toUpperCase().trim(), - quoteCurrency: (rawContent.quoteCurrency || "USDT") - .toString() - .toUpperCase() - .trim(), - }; - - if (content.symbol.length < 2 || content.symbol.length > 10) { - throw new Error("Invalid cryptocurrency symbol"); - } - - const binanceService = new BinanceService(); - const priceData = await binanceService.getPrice(content); - - if (callback) { - callback({ - text: `The current ${content.symbol} price is ${BinanceService.formatPrice(priceData.price)} ${content.quoteCurrency}`, - content: priceData, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in price check:", error); - if (callback) { - const errorMessage = error.message.includes("Invalid API key") - ? "Unable to connect to Binance API" - : error.message.includes("Invalid symbol") - ? "Sorry, could not find price for the cryptocurrency symbol you provided" - : `Sorry, I encountered an error: ${error.message}`; - - callback({ - text: errorMessage, - content: { error: error.message }, - }); - } - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What's the current price of Bitcoin?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the current Bitcoin price for you right away.", - action: "GET_PRICE", - }, - }, - { - user: "{{agent}}", - content: { - text: "The current BTC price is 42,150.25 USDT", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Can you check ETH price in EUR?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch the current Ethereum price in euros for you.", - action: "GET_PRICE", - }, - }, - { - user: "{{agent}}", - content: { - text: "The current ETH price is 2,245.80 EUR", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-binance/src/actions/spotBalance.ts b/packages/plugin-binance/src/actions/spotBalance.ts deleted file mode 100644 index 0003e5ba4edca..0000000000000 --- a/packages/plugin-binance/src/actions/spotBalance.ts +++ /dev/null @@ -1,180 +0,0 @@ -import { - type ActionExample, - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import { validateBinanceConfig } from "../environment"; -import { BinanceService } from "../services"; -import type { BalanceCheckRequest } from "../types"; - -const spotBalanceTemplate = `Look at ONLY your LAST RESPONSE message in this conversation, where you just confirmed which cryptocurrency balance to check. -Based on ONLY that last message, extract the cryptocurrency symbol. - -For example: -- If your last message was "I'll fetch your Solana wallet balance..." -> return "SOL" -- If your last message was "I'll check your BTC balance..." -> return "BTC" -- If your last message was "I'll get your ETH balance..." -> return "ETH" - -\`\`\`json -{ - "asset": "" -} -\`\`\` - -Last part of conversation: -{{recentMessages}}`; - -export const spotBalance: Action = { - name: "GET_SPOT_BALANCE", - similes: [ - "CHECK_BALANCE", - "BALANCE_CHECK", - "GET_WALLET_BALANCE", - "WALLET_BALANCE", - "CHECK_WALLET", - "VIEW_BALANCE", - "SHOW_BALANCE", - ], - description: "Get current spot wallet balance for one or all assets", - validate: async (runtime: IAgentRuntime) => { - try { - await validateBinanceConfig(runtime); - return true; - } catch { - return false; - } - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const balanceContext = composeContext({ - state: currentState, - template: spotBalanceTemplate, - }); - - const content = (await generateObjectDeprecated({ - runtime, - context: balanceContext, - modelClass: ModelClass.SMALL, - })) as BalanceCheckRequest; - - try { - const binanceService = new BinanceService({ - apiKey: runtime.getSetting("BINANCE_API_KEY"), - secretKey: runtime.getSetting("BINANCE_SECRET_KEY"), - }); - - const balanceData = await binanceService.getBalance(content); - - if (content.asset) { - const assetBalance = balanceData.balances[0]; - if (assetBalance) { - if (callback) { - callback({ - text: `${content.asset} Balance:\nAvailable: ${assetBalance.free}\nLocked: ${assetBalance.locked}`, - content: assetBalance, - }); - } - } else { - if (callback) { - callback({ - text: `No balance found for ${content.asset}`, - content: { error: "Asset not found" }, - }); - } - } - } else { - const balanceText = balanceData.balances - .map( - (b) => - `${b.asset}: Available: ${b.free}, Locked: ${b.locked}` - ) - .join("\n"); - - if (callback) { - callback({ - text: `Spot Wallet Balances:\n${balanceText}`, - content: balanceData.balances, - }); - } - } - - return true; - } catch (error) { - elizaLogger.error("Error in balance check:", { - message: error.message, - code: error.code, - }); - if (callback) { - callback({ - text: error.message, - content: { error: error.message }, - }); - } - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What's my current Bitcoin balance?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check your BTC balance for you.", - action: "GET_SPOT_BALANCE", - }, - }, - { - user: "{{agent}}", - content: { - text: "BTC Balance:\nAvailable: 0.5\nLocked: 0.1", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Show me all my wallet balances", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch all your spot wallet balances.", - action: "GET_SPOT_BALANCE", - }, - }, - { - user: "{{agent}}", - content: { - text: "Spot Wallet Balances:\nBTC: Available: 0.5, Locked: 0.1\nETH: Available: 2.0, Locked: 0.0\nUSDT: Available: 1000.0, Locked: 0.0", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-binance/src/actions/spotTrade.ts b/packages/plugin-binance/src/actions/spotTrade.ts deleted file mode 100644 index d19468bea5a31..0000000000000 --- a/packages/plugin-binance/src/actions/spotTrade.ts +++ /dev/null @@ -1,173 +0,0 @@ -import { - type ActionExample, - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import { BinanceService } from "../services"; -import { SpotTradeSchema } from "../types"; - -const spotTradeTemplate = `Look at your LAST RESPONSE in the conversation where you confirmed a trade/swap request. -Based on ONLY that last message, extract the trading details: - -Trading pairs on Binance must include USDT or BUSD or USDC. For example: -- For "swap SOL for USDC" -> use "SOLUSDC" as symbol -- For "swap ETH for USDT" -> use "ETHUSDT" as symbol -- For "buy BTC with USDT" -> use "BTCUSDT" as symbol - -\`\`\`json -{ - "symbol": "", - "side": "SELL", - "type": "MARKET", - "quantity": "" -} -\`\`\` - -Recent conversation: -{{recentMessages}}`; - -export const spotTrade: Action = { - name: "EXECUTE_SPOT_TRADE", - similes: [ - "SPOT_TRADE", - "MARKET_ORDER", - "LIMIT_ORDER", - "BUY_CRYPTO", - "SELL_CRYPTO", - "PLACE_ORDER", - ], - description: "Execute a spot trade on Binance", - validate: async (runtime: IAgentRuntime) => { - return !!( - runtime.getSetting("BINANCE_API_KEY") && - runtime.getSetting("BINANCE_SECRET_KEY") - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ): Promise => { - let content; - try { - let currentState = state; - if (!currentState) { - currentState = await runtime.composeState(message); - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const context = composeContext({ - state: currentState, - template: spotTradeTemplate, - }); - - content = await generateObjectDeprecated({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - // Convert quantity to number if it's a string - if (content && typeof content.quantity === "string") { - content.quantity = Number.parseFloat(content.quantity); - } - - const parseResult = SpotTradeSchema.safeParse(content); - if (!parseResult.success) { - throw new Error( - `Invalid spot trade content: ${JSON.stringify(parseResult.error.errors, null, 2)}` - ); - } - - const binanceService = new BinanceService({ - apiKey: runtime.getSetting("BINANCE_API_KEY"), - secretKey: runtime.getSetting("BINANCE_SECRET_KEY"), - }); - - const tradeResult = await binanceService.executeTrade(content); - - if (callback) { - const orderType = - content.type === "MARKET" - ? "market" - : content.price - ? `limit at ${BinanceService.formatPrice(content.price)}` - : "market"; - - callback({ - text: `Successfully placed a ${orderType} order to ${content.side.toLowerCase()} ${content.quantity} ${content.symbol}\nOrder ID: ${tradeResult.orderId}\nStatus: ${tradeResult.status}`, - content: tradeResult, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error executing trade:", { - content, - message: error.message, - code: error.code, - }); - if (callback) { - callback({ - text: `Error executing trade: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Buy 0.1 BTC at market price", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll execute a market order to buy 0.1 BTC now.", - action: "EXECUTE_SPOT_TRADE", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully placed a market order to buy 0.1 BTCUSDT\nOrder ID: 123456789\nStatus: FILLED", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Place a limit order to sell 100 BNB at 250 USDT", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll place a limit order to sell 100 BNB at 250 USDT.", - action: "EXECUTE_SPOT_TRADE", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully placed a limit order to sell 100 BNBUSDT at 250\nOrder ID: 987654321\nStatus: NEW", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-binance/src/constants/api.ts b/packages/plugin-binance/src/constants/api.ts deleted file mode 100644 index b1d62179230b0..0000000000000 --- a/packages/plugin-binance/src/constants/api.ts +++ /dev/null @@ -1,31 +0,0 @@ -export const API_DEFAULTS = { - BASE_URL: "https://api.binance.com", - TIMEOUT: 30000, // 30 seconds - RATE_LIMIT: { - MAX_REQUESTS_PER_MINUTE: 1200, - WEIGHT_PER_REQUEST: 1, - }, -}; - -export const API_ENDPOINTS = { - TICKER: "/api/v3/ticker/price", - ACCOUNT: "/api/v3/account", - ORDER: "/api/v3/order", - EXCHANGE_INFO: "/api/v3/exchangeInfo", -}; - -export const ORDER_TYPES = { - MARKET: "MARKET", - LIMIT: "LIMIT", -} as const; - -export const ORDER_SIDES = { - BUY: "BUY", - SELL: "SELL", -} as const; - -export const TIME_IN_FORCE = { - GTC: "GTC", // Good Till Cancel - IOC: "IOC", // Immediate or Cancel - FOK: "FOK", // Fill or Kill -} as const; diff --git a/packages/plugin-binance/src/constants/defaults.ts b/packages/plugin-binance/src/constants/defaults.ts deleted file mode 100644 index ff34231098cf8..0000000000000 --- a/packages/plugin-binance/src/constants/defaults.ts +++ /dev/null @@ -1,22 +0,0 @@ -export const TRADE_DEFAULTS = { - QUOTE_CURRENCY: "USDT", - TIME_IN_FORCE: "GTC", - ORDER_TYPE: "MARKET", - PRICE_PRECISION: 8, - QUANTITY_PRECISION: 8, -}; - -export const DISPLAY_DEFAULTS = { - PRICE_FORMAT: { - MIN_FRACTION_DIGITS: 2, - MAX_FRACTION_DIGITS: 8, - LOCALE: "en-US", - }, -}; - -export const VALIDATION = { - SYMBOL: { - MIN_LENGTH: 2, - MAX_LENGTH: 10, - }, -}; diff --git a/packages/plugin-binance/src/constants/errors.ts b/packages/plugin-binance/src/constants/errors.ts deleted file mode 100644 index 2558fe9c8f452..0000000000000 --- a/packages/plugin-binance/src/constants/errors.ts +++ /dev/null @@ -1,33 +0,0 @@ -export const ERROR_CODES = { - INVALID_CREDENTIALS: 401, - INVALID_PARAMETERS: 400, - INSUFFICIENT_BALANCE: -1012, - MIN_NOTIONAL_NOT_MET: -1013, - UNKNOWN_ORDER_COMPOSITION: -1111, - PRICE_QTY_EXCEED_HARD_LIMITS: -1021, -} as const; - -export const ERROR_MESSAGES = { - INVALID_CREDENTIALS: - "Invalid API credentials. Please check your API key and secret.", - INVALID_SYMBOL: "Invalid trading pair symbol", - SYMBOL_NOT_FOUND: (symbol: string) => - `Trading pair ${symbol} is not available`, - MIN_NOTIONAL_NOT_MET: (minNotional?: string) => - `Order value is too small. Please increase the quantity to meet the minimum order value requirement.${ - minNotional ? ` Minimum order value is ${minNotional} USDC.` : "" - }`, - LIMIT_ORDER_PRICE_REQUIRED: "Price is required for LIMIT orders", - BALANCE_FETCH_ERROR: (asset?: string) => - asset - ? `Failed to fetch balance for ${asset}` - : "Failed to fetch account balances", - PRICE_FETCH_ERROR: (symbol: string) => - `Failed to fetch price for ${symbol}`, -} as const; - -export const VALIDATION_ERRORS = { - MISSING_API_KEY: "BINANCE_API_KEY is required but not configured", - MISSING_SECRET_KEY: "BINANCE_SECRET_KEY is required but not configured", - INVALID_SYMBOL_LENGTH: "Invalid cryptocurrency symbol length", -} as const; diff --git a/packages/plugin-binance/src/environment.ts b/packages/plugin-binance/src/environment.ts deleted file mode 100644 index 31f88503c86c8..0000000000000 --- a/packages/plugin-binance/src/environment.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const binanceEnvSchema = z.object({ - BINANCE_API_KEY: z.string().min(1, "Binance API key is required"), - BINANCE_SECRET_KEY: z.string().min(1, "Binance secret key is required"), -}); - -export type BinanceConfig = z.infer; - -export async function validateBinanceConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - BINANCE_API_KEY: runtime.getSetting("BINANCE_API_KEY"), - BINANCE_SECRET_KEY: runtime.getSetting("BINANCE_SECRET_KEY"), - }; - - return binanceEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Binance configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/plugin-binance/src/index.ts b/packages/plugin-binance/src/index.ts deleted file mode 100644 index e11b6fc48dc06..0000000000000 --- a/packages/plugin-binance/src/index.ts +++ /dev/null @@ -1,15 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { priceCheck } from "./actions/priceCheck"; -import { spotBalance } from "./actions/spotBalance"; -import { spotTrade } from "./actions/spotTrade"; - -// Export the plugin configuration -export const binancePlugin: Plugin = { - name: "binance", - description: "Binance Plugin for Eliza", - actions: [spotTrade, priceCheck, spotBalance], - evaluators: [], - providers: [], -}; - -export default binancePlugin; diff --git a/packages/plugin-binance/src/services/account.ts b/packages/plugin-binance/src/services/account.ts deleted file mode 100644 index b495920fc5136..0000000000000 --- a/packages/plugin-binance/src/services/account.ts +++ /dev/null @@ -1,93 +0,0 @@ -import type { BinanceAccountInfo, BinanceBalance } from "../types/api/account"; -import type { BalanceCheckRequest, BalanceResponse } from "../types/internal/config"; -import { BaseService } from "./base"; - -/** - * Service for handling account-related operations - */ -export class AccountService extends BaseService { - /** - * Get account balance for all assets or a specific asset - */ - async getBalance(request: BalanceCheckRequest): Promise { - try { - this.validateCredentials(); - - const response = await this.client.account(); - const accountInfo = response.data as BinanceAccountInfo; - - let balances = this.filterNonZeroBalances(accountInfo.balances); - - if (request.asset) { - balances = this.filterByAsset(balances, request.asset); - } - - return { - balances, - timestamp: Date.now(), - }; - } catch (error) { - throw this.handleError( - error, - request.asset ? `Asset: ${request.asset}` : "All assets" - ); - } - } - - /** - * Filter out zero balances - */ - private filterNonZeroBalances( - balances: BinanceBalance[] - ): BinanceBalance[] { - return balances.filter( - (balance) => - Number.parseFloat(balance.free) > 0 || Number.parseFloat(balance.locked) > 0 - ); - } - - /** - * Filter balances by asset - */ - private filterByAsset( - balances: BinanceBalance[], - asset: string - ): BinanceBalance[] { - return balances.filter( - (b) => b.asset.toUpperCase() === asset.toUpperCase() - ); - } - - /** - * Get account trading status - */ - async getTradingStatus(): Promise { - try { - this.validateCredentials(); - const response = await this.client.account(); - const accountInfo = response.data as BinanceAccountInfo; - return accountInfo.canTrade; - } catch (error) { - throw this.handleError(error, "Trading status check"); - } - } - - /** - * Check if account has sufficient balance for a trade - */ - async checkBalance(asset: string, required: number): Promise { - try { - const { balances } = await this.getBalance({ asset }); - const balance = balances[0]; - - if (!balance) { - return false; - } - - const available = Number.parseFloat(balance.free); - return available >= required; - } catch (error) { - throw this.handleError(error, `Balance check for ${asset}`); - } - } -} diff --git a/packages/plugin-binance/src/services/base.ts b/packages/plugin-binance/src/services/base.ts deleted file mode 100644 index 25ff02a0ba129..0000000000000 --- a/packages/plugin-binance/src/services/base.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { Spot } from "@binance/connector"; -import { elizaLogger } from "@elizaos/core"; -import { API_DEFAULTS } from "../constants/api"; -import { ERROR_MESSAGES } from "../constants/errors"; -import type { BinanceConfig, ServiceOptions } from "../types/internal/config"; -import { - ApiError, - AuthenticationError, - BinanceError, - InvalidSymbolError, - MinNotionalError, -} from "../types/internal/error"; - -interface BinanceApiError { - response?: { - status?: number; - data?: { - code?: number; - msg?: string; - }; - }; - code?: number; - message?: string; -} - -/** - * Base service class with common functionality - */ -export abstract class BaseService { - protected client: Spot; - protected config: BinanceConfig; - - constructor(config?: BinanceConfig) { - this.config = { - baseURL: API_DEFAULTS.BASE_URL, - timeout: API_DEFAULTS.TIMEOUT, - ...config, - }; - - this.client = new Spot(this.config.apiKey, this.config.secretKey, { - baseURL: this.config.baseURL, - timeout: this.config.timeout, - }); - } - - /** - * Handles common error scenarios and transforms them into appropriate error types - */ - protected handleError(error: unknown, context?: string): never { - if (error instanceof BinanceError) { - throw error; - } - - const apiError = error as BinanceApiError; - const errorResponse = apiError.response?.data; - const errorCode = errorResponse?.code || apiError.code; - const errorMessage = errorResponse?.msg || apiError.message; - - // Handle authentication errors - if (apiError.response?.status === 401) { - throw new AuthenticationError(ERROR_MESSAGES.INVALID_CREDENTIALS); - } - - // Handle minimum notional errors - if (errorCode === -1013 && errorMessage?.includes("NOTIONAL")) { - throw new MinNotionalError(); - } - - // Handle invalid symbol errors - if (errorMessage?.includes("Invalid symbol")) { - throw new InvalidSymbolError(context || "Unknown"); - } - - // Log unexpected errors for debugging - elizaLogger.error("Unexpected API error:", { - context, - code: errorCode, - message: errorMessage, - response: errorResponse, - }); - - throw new ApiError( - errorMessage || "An unexpected error occurred", - errorCode || 500, - errorResponse - ); - } - - /** - * Validates required API credentials - */ - protected validateCredentials(): void { - if (!this.config.apiKey || !this.config.secretKey) { - throw new AuthenticationError("API credentials are required"); - } - } - - /** - * Merges default options with provided options - */ - protected mergeOptions(options?: ServiceOptions): ServiceOptions { - return { - timeout: this.config.timeout, - ...options, - }; - } -} diff --git a/packages/plugin-binance/src/services/index.ts b/packages/plugin-binance/src/services/index.ts deleted file mode 100644 index 13abe994fede8..0000000000000 --- a/packages/plugin-binance/src/services/index.ts +++ /dev/null @@ -1,52 +0,0 @@ -import type { BinanceConfig } from "../types/internal/config"; -import { AccountService } from "./account"; -import { PriceService } from "./price"; -import { TradeService } from "./trade"; - -/** - * Main service facade that coordinates between specialized services - */ -export class BinanceService { - private priceService: PriceService; - private tradeService: TradeService; - private accountService: AccountService; - - constructor(config?: BinanceConfig) { - this.priceService = new PriceService(config); - this.tradeService = new TradeService(config); - this.accountService = new AccountService(config); - } - - /** - * Price-related operations - */ - async getPrice(...args: Parameters) { - return this.priceService.getPrice(...args); - } - - static formatPrice = PriceService.formatPrice; - - /** - * Trading operations - */ - async executeTrade(...args: Parameters) { - return this.tradeService.executeTrade(...args); - } - - /** - * Account operations - */ - async getBalance(...args: Parameters) { - return this.accountService.getBalance(...args); - } - - async getTradingStatus() { - return this.accountService.getTradingStatus(); - } - - async checkBalance(...args: Parameters) { - return this.accountService.checkBalance(...args); - } -} - -export { AccountService, PriceService, TradeService }; diff --git a/packages/plugin-binance/src/services/price.ts b/packages/plugin-binance/src/services/price.ts deleted file mode 100644 index 3f86bd1f6cbf4..0000000000000 --- a/packages/plugin-binance/src/services/price.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { VALIDATION } from "../constants/defaults"; -import { ERROR_MESSAGES } from "../constants/errors"; -import type { BinanceTickerResponse } from "../types/api/price"; -import type { PriceCheckRequest, PriceResponse } from "../types/internal/config"; -import { BinanceError } from "../types/internal/error"; -import { BaseService } from "./base"; - -/** - * Service for handling price-related operations - */ -export class PriceService extends BaseService { - /** - * Get current price for a symbol - */ - async getPrice(request: PriceCheckRequest): Promise { - try { - this.validateSymbol(request.symbol); - - const symbol = `${request.symbol}${request.quoteCurrency}`; - const response = await this.client.tickerPrice(symbol); - const data = response.data as BinanceTickerResponse; - - return { - symbol, - price: data.price, - timestamp: Date.now(), - }; - } catch (error) { - throw this.handleError(error, request.symbol); - } - } - - /** - * Validates symbol format - */ - private validateSymbol(symbol: string): void { - const trimmedSymbol = symbol.trim(); - if ( - trimmedSymbol.length < VALIDATION.SYMBOL.MIN_LENGTH || - trimmedSymbol.length > VALIDATION.SYMBOL.MAX_LENGTH - ) { - throw new BinanceError(ERROR_MESSAGES.INVALID_SYMBOL); - } - } - - /** - * Format price for display - */ - static formatPrice(price: number | string): string { - const numPrice = typeof price === "string" ? Number.parseFloat(price) : price; - return new Intl.NumberFormat("en-US", { - style: "decimal", - minimumFractionDigits: 2, - maximumFractionDigits: 8, - }).format(numPrice); - } -} diff --git a/packages/plugin-binance/src/services/trade.ts b/packages/plugin-binance/src/services/trade.ts deleted file mode 100644 index 1b431abedaec9..0000000000000 --- a/packages/plugin-binance/src/services/trade.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { ORDER_TYPES, TIME_IN_FORCE } from "../constants/api"; -import { ERROR_MESSAGES } from "../constants/errors"; -import type { - BinanceExchangeInfo, - BinanceSymbolFilter, - BinanceSymbolInfo, -} from "../types/api/price"; -import type { - BinanceNewOrderParams, - BinanceOrderResponse, -} from "../types/api/trade"; -import type { SpotTradeRequest, TradeResponse } from "../types/internal/config"; -import { InvalidSymbolError, MinNotionalError } from "../types/internal/error"; -import { BaseService } from "./base"; - -/** - * Service for handling trading operations - */ -export class TradeService extends BaseService { - /** - * Execute a spot trade - */ - async executeTrade(request: SpotTradeRequest): Promise { - try { - this.validateCredentials(); - await this.validateSymbol(request.symbol); - - const orderParams = this.buildOrderParams(request); - const response = await this.client.newOrder( - orderParams.symbol, - orderParams.side, - orderParams.type, - orderParams - ); - - const data = response.data as BinanceOrderResponse; - return { - symbol: data.symbol, - orderId: data.orderId, - status: data.status, - executedQty: data.executedQty, - cummulativeQuoteQty: data.cummulativeQuoteQty, - price: data.price, - type: data.type, - side: data.side, - }; - } catch (error) { - throw this.handleError(error, request.symbol); - } - } - - /** - * Validate trading pair and get symbol information - */ - private async validateSymbol(symbol: string): Promise { - const exchangeInfo = await this.client.exchangeInfo(); - const data = exchangeInfo.data as BinanceExchangeInfo; - - const symbolInfo = data.symbols.find((s) => s.symbol === symbol); - if (!symbolInfo) { - throw new InvalidSymbolError(symbol); - } - - return symbolInfo; - } - - /** - * Build order parameters for the Binance API - */ - private buildOrderParams(request: SpotTradeRequest): BinanceNewOrderParams { - const params: BinanceNewOrderParams = { - symbol: request.symbol.toUpperCase(), - side: request.side, - type: request.type, - quantity: request.quantity.toString(), - }; - - if (request.type === ORDER_TYPES.LIMIT) { - if (!request.price) { - throw new Error(ERROR_MESSAGES.LIMIT_ORDER_PRICE_REQUIRED); - } - params.timeInForce = request.timeInForce || TIME_IN_FORCE.GTC; - params.price = request.price.toString(); - } - - return params; - } - - /** - * Get minimum notional value from symbol filters - */ - private getMinNotional(filters: BinanceSymbolFilter[]): string | undefined { - const notionalFilter = filters.find((f) => f.filterType === "NOTIONAL"); - return notionalFilter?.minNotional; - } - - /** - * Check if order meets minimum notional value - */ - private checkMinNotional( - symbolInfo: BinanceSymbolInfo, - quantity: number, - price?: number - ): void { - const minNotional = this.getMinNotional(symbolInfo.filters); - if (!minNotional) return; - - const notionalValue = price ? quantity * price : quantity; // For market orders, quantity is in quote currency - - if (Number.parseFloat(minNotional) > notionalValue) { - throw new MinNotionalError(minNotional); - } - } -} diff --git a/packages/plugin-binance/src/types.ts b/packages/plugin-binance/src/types.ts deleted file mode 100644 index 659b21aa57a65..0000000000000 --- a/packages/plugin-binance/src/types.ts +++ /dev/null @@ -1,85 +0,0 @@ -// types.ts -import { z } from "zod"; - -// Base configuration types -export interface BinanceConfig { - apiKey?: string; - secretKey?: string; - baseURL?: string; -} - -// Enhanced schemas with better validation -export const PriceCheckSchema = z.object({ - symbol: z.string().min(1).toUpperCase(), - quoteCurrency: z.string().min(1).toUpperCase().default("USDT"), -}); - -export const SpotTradeSchema = z.object({ - symbol: z.string().min(1).toUpperCase(), - side: z.enum(["BUY", "SELL"]), - type: z.enum(["MARKET", "LIMIT"]), - quantity: z.number().positive(), - price: z.number().positive().optional(), - timeInForce: z.enum(["GTC", "IOC", "FOK"]).optional().default("GTC"), -}); - -// Inferred types from schemas -export type PriceCheckRequest = z.infer; -export type SpotTradeRequest = z.infer; - -// Response types -export interface PriceResponse { - symbol: string; - price: string; - timestamp: number; -} - -export interface TradeResponse { - symbol: string; - orderId: number; - status: "NEW" | "PARTIALLY_FILLED" | "FILLED" | "CANCELED" | "REJECTED"; - executedQty: string; - cummulativeQuoteQty: string; - price: string; - type: SpotTradeRequest["type"]; - side: SpotTradeRequest["side"]; -} - -// Error handling types -export class BinanceError extends Error { - constructor( - message: string, - public code?: number, - public details?: unknown - ) { - super(message); - this.name = "BinanceError"; - } -} - -// Constants -export const TRADE_STATUS = { - NEW: "NEW", - PARTIALLY_FILLED: "PARTIALLY_FILLED", - FILLED: "FILLED", - CANCELED: "CANCELED", - REJECTED: "REJECTED", -} as const; - -export type TradeStatus = keyof typeof TRADE_STATUS; - -// Balance types -export interface BalanceCheckRequest { - asset?: string; -} - -export interface AssetBalance { - asset: string; - free: string; - locked: string; -} - -export interface BalanceResponse { - balances: AssetBalance[]; - timestamp: number; -} diff --git a/packages/plugin-binance/src/types/api/account.ts b/packages/plugin-binance/src/types/api/account.ts deleted file mode 100644 index 1f9e88655c379..0000000000000 --- a/packages/plugin-binance/src/types/api/account.ts +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Binance API account information response - */ -export interface BinanceAccountInfo { - makerCommission: number; - takerCommission: number; - buyerCommission: number; - sellerCommission: number; - canTrade: boolean; - canWithdraw: boolean; - canDeposit: boolean; - updateTime: number; - accountType: string; - balances: BinanceBalance[]; - permissions: string[]; -} - -/** - * Balance information for a single asset - */ -export interface BinanceBalance { - asset: string; - free: string; // Available balance - locked: string; // Locked in orders -} - -/** - * Account trade list response - */ -export interface BinanceAccountTrade { - symbol: string; - id: number; - orderId: number; - orderListId: number; - price: string; - qty: string; - quoteQty: string; - commission: string; - commissionAsset: string; - time: number; - isBuyer: boolean; - isMaker: boolean; - isBestMatch: boolean; -} - -/** - * Parameters for account trade list query - */ -export interface BinanceTradeListParams { - symbol: string; - orderId?: number; - startTime?: number; - endTime?: number; - fromId?: number; - limit?: number; -} - -/** - * Account status response - */ -export interface BinanceAccountStatus { - data: string; // "Normal", "Margin", "Futures", etc. -} - -/** - * API trading status response - */ -export interface BinanceApiTradingStatus { - data: { - isLocked: boolean; - plannedRecoverTime: number; - triggerCondition: { - gcr: number; - ifer: number; - ufr: number; - }; - updateTime: number; - }; -} diff --git a/packages/plugin-binance/src/types/api/price.ts b/packages/plugin-binance/src/types/api/price.ts deleted file mode 100644 index 2990bdc2d2f2a..0000000000000 --- a/packages/plugin-binance/src/types/api/price.ts +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Binance API response for ticker price endpoint - */ -export interface BinanceTickerResponse { - symbol: string; - price: string; -} - -/** - * Binance API response for 24hr ticker - */ -export interface BinanceTickerStatistics { - symbol: string; - priceChange: string; - priceChangePercent: string; - weightedAvgPrice: string; - prevClosePrice: string; - lastPrice: string; - lastQty: string; - bidPrice: string; - bidQty: string; - askPrice: string; - askQty: string; - openPrice: string; - highPrice: string; - lowPrice: string; - volume: string; - quoteVolume: string; - openTime: number; - closeTime: number; - firstId: number; - lastId: number; - count: number; -} - -/** - * Exchange information for a symbol - */ -export interface BinanceSymbolInfo { - symbol: string; - status: string; - baseAsset: string; - baseAssetPrecision: number; - quoteAsset: string; - quotePrecision: number; - quoteAssetPrecision: number; - filters: BinanceSymbolFilter[]; -} - -/** - * Symbol filter types - */ -export interface BinanceSymbolFilter { - filterType: string; - minPrice?: string; - maxPrice?: string; - tickSize?: string; - minQty?: string; - maxQty?: string; - stepSize?: string; - minNotional?: string; - limit?: number; - multiplierUp?: string; - multiplierDown?: string; - avgPriceMins?: number; -} - -/** - * Exchange information response - */ -export interface BinanceExchangeInfo { - timezone: string; - serverTime: number; - rateLimits: Array<{ - rateLimitType: string; - interval: string; - intervalNum: number; - limit: number; - }>; - symbols: BinanceSymbolInfo[]; -} diff --git a/packages/plugin-binance/src/types/api/trade.ts b/packages/plugin-binance/src/types/api/trade.ts deleted file mode 100644 index 2ce3346fb7ed9..0000000000000 --- a/packages/plugin-binance/src/types/api/trade.ts +++ /dev/null @@ -1,81 +0,0 @@ -import type { ORDER_SIDES, ORDER_TYPES, TIME_IN_FORCE } from "../../constants/api"; - -export type OrderType = (typeof ORDER_TYPES)[keyof typeof ORDER_TYPES]; -export type OrderSide = (typeof ORDER_SIDES)[keyof typeof ORDER_SIDES]; -export type TimeInForce = (typeof TIME_IN_FORCE)[keyof typeof TIME_IN_FORCE]; - -/** - * Binance API new order response - */ -export interface BinanceOrderResponse { - symbol: string; - orderId: number; - orderListId: number; - clientOrderId: string; - transactTime: number; - price: string; - origQty: string; - executedQty: string; - cummulativeQuoteQty: string; - status: OrderStatus; - timeInForce: TimeInForce; - type: OrderType; - side: OrderSide; - fills?: OrderFill[]; -} - -/** - * Order fill information - */ -export interface OrderFill { - price: string; - qty: string; - commission: string; - commissionAsset: string; - tradeId: number; -} - -/** - * Order status types - */ -export type OrderStatus = - | "NEW" - | "PARTIALLY_FILLED" - | "FILLED" - | "CANCELED" - | "PENDING_CANCEL" - | "REJECTED" - | "EXPIRED"; - -/** - * New order parameters for Binance API - */ -export interface BinanceNewOrderParams { - symbol: string; - side: OrderSide; - type: OrderType; - timeInForce?: TimeInForce; - quantity?: string | number; - quoteOrderQty?: string | number; - price?: string | number; - newClientOrderId?: string; - stopPrice?: string | number; - icebergQty?: string | number; - newOrderRespType?: "ACK" | "RESULT" | "FULL"; -} - -/** - * Order query parameters - */ -export interface BinanceOrderQueryParams { - symbol: string; - orderId?: number; - origClientOrderId?: string; -} - -/** - * Cancel order parameters - */ -export interface BinanceCancelOrderParams extends BinanceOrderQueryParams { - newClientOrderId?: string; -} diff --git a/packages/plugin-binance/src/types/index.ts b/packages/plugin-binance/src/types/index.ts deleted file mode 100644 index 8b2f12fbcb6bf..0000000000000 --- a/packages/plugin-binance/src/types/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -// API Types -export * from "./api/account"; -export * from "./api/price"; -export * from "./api/trade"; - -// Internal Types -export * from "./internal/config"; -export * from "./internal/error"; diff --git a/packages/plugin-binance/src/types/internal/config.ts b/packages/plugin-binance/src/types/internal/config.ts deleted file mode 100644 index f1648c9c9a954..0000000000000 --- a/packages/plugin-binance/src/types/internal/config.ts +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Binance service configuration - */ -export interface BinanceConfig { - apiKey?: string; - secretKey?: string; - baseURL?: string; - timeout?: number; -} - -/** - * Service options that can be passed to any service method - */ -export interface ServiceOptions { - timeout?: number; - recvWindow?: number; -} - -/** - * Price check request parameters - */ -export interface PriceCheckRequest { - symbol: string; - quoteCurrency: string; -} - -/** - * Price response data - */ -export interface PriceResponse { - symbol: string; - price: string; - timestamp: number; -} - -/** - * Spot trade request parameters - */ -export interface SpotTradeRequest { - symbol: string; - side: "BUY" | "SELL"; - type: "MARKET" | "LIMIT"; - quantity: number; - price?: number; - timeInForce?: "GTC" | "IOC" | "FOK"; -} - -/** - * Trade response data - */ -export interface TradeResponse { - symbol: string; - orderId: number; - status: string; - executedQty: string; - cummulativeQuoteQty: string; - price: string; - type: string; - side: string; -} - -/** - * Balance check request parameters - */ -export interface BalanceCheckRequest { - asset?: string; -} - -/** - * Balance response data - */ -export interface BalanceResponse { - balances: Array<{ - asset: string; - free: string; - locked: string; - }>; - timestamp: number; -} diff --git a/packages/plugin-binance/src/types/internal/error.ts b/packages/plugin-binance/src/types/internal/error.ts deleted file mode 100644 index 1709172d1ee0a..0000000000000 --- a/packages/plugin-binance/src/types/internal/error.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { ERROR_CODES } from "../../constants/errors"; - -type ErrorCode = (typeof ERROR_CODES)[keyof typeof ERROR_CODES]; - -/** - * Base error class for Binance-related errors - */ -export class BinanceError extends Error { - public readonly code: ErrorCode | number; - public readonly originalError?: unknown; - - constructor( - message: string, - code: ErrorCode | number = ERROR_CODES.INVALID_PARAMETERS, - originalError?: unknown - ) { - super(message); - this.name = "BinanceError"; - this.code = code; - this.originalError = originalError; - - // Maintains proper stack trace for where error was thrown - if (Error.captureStackTrace) { - Error.captureStackTrace(this, BinanceError); - } - } -} - -/** - * Error thrown when API credentials are invalid or missing - */ -export class AuthenticationError extends BinanceError { - constructor(message = "Invalid API credentials") { - super(message, ERROR_CODES.INVALID_CREDENTIALS); - this.name = "AuthenticationError"; - } -} - -/** - * Error thrown when order validation fails - */ -export class OrderValidationError extends BinanceError { - constructor( - message: string, - code: ErrorCode | number = ERROR_CODES.INVALID_PARAMETERS - ) { - super(message, code); - this.name = "OrderValidationError"; - } -} - -/** - * Error thrown when minimum notional value is not met - */ -export class MinNotionalError extends OrderValidationError { - constructor(minNotional?: string) { - super( - `Order value is too small. ${ - minNotional ? `Minimum order value is ${minNotional} USDC.` : "" - }`, - ERROR_CODES.MIN_NOTIONAL_NOT_MET - ); - this.name = "MinNotionalError"; - } -} - -/** - * Error thrown when insufficient balance - */ -export class InsufficientBalanceError extends OrderValidationError { - constructor(asset: string) { - super( - `Insufficient ${asset} balance`, - ERROR_CODES.INSUFFICIENT_BALANCE - ); - this.name = "InsufficientBalanceError"; - } -} - -/** - * Error thrown when symbol is invalid - */ -export class InvalidSymbolError extends BinanceError { - constructor(symbol: string) { - super( - `Trading pair ${symbol} is not available`, - ERROR_CODES.INVALID_PARAMETERS - ); - this.name = "InvalidSymbolError"; - } -} - -/** - * Error thrown when API request fails - */ -export class ApiError extends BinanceError { - constructor( - message: string, - code: number, - public readonly response?: unknown - ) { - super(message, code); - this.name = "ApiError"; - } -} diff --git a/packages/plugin-binance/tsconfig.json b/packages/plugin-binance/tsconfig.json deleted file mode 100644 index 834c4dce26957..0000000000000 --- a/packages/plugin-binance/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-binance/tsup.config.ts b/packages/plugin-binance/tsup.config.ts deleted file mode 100644 index 5cb9389e71f2e..0000000000000 --- a/packages/plugin-binance/tsup.config.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - dts: true, - splitting: false, - bundle: true, - minify: false, - external: [ - "@binance/connector", - "events", - "crypto", - "buffer", - "url", - "querystring", - "os", - ], - platform: "node", - target: "node18", -}); diff --git a/packages/plugin-binance/vitest.config.ts b/packages/plugin-binance/vitest.config.ts deleted file mode 100644 index a5e5defb7b0ae..0000000000000 --- a/packages/plugin-binance/vitest.config.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { defineConfig } from 'vitest/config'; -import { resolve } from 'path'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - testTimeout: 10000 - }, - resolve: { - alias: { - '@elizaos/core': resolve(__dirname, '../../packages/core/src/index.ts') - }, - extensions: ['.ts', '.js', '.json'] - } -}); diff --git a/packages/plugin-birdeye/.npmignore b/packages/plugin-birdeye/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-birdeye/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-birdeye/.nvmrc b/packages/plugin-birdeye/.nvmrc deleted file mode 100644 index fa12cf298e4d1..0000000000000 --- a/packages/plugin-birdeye/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -v23.3.0 \ No newline at end of file diff --git a/packages/plugin-birdeye/README.md b/packages/plugin-birdeye/README.md deleted file mode 100644 index 0ac543844f4e4..0000000000000 --- a/packages/plugin-birdeye/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# Eliza Birdeye Plugin - -A powerful plugin for Eliza that integrates with Birdeye's comprehensive DeFi and token analytics API. This plugin provides real-time access to blockchain data, token metrics, and DeFi analytics across multiple networks. - -## Features - -### Provider Featurs - -- **Agent Portfolio Provider** - - - If `BIRDEYE_WALLET_ADDR` is set, this provider will fetch the wallet's portfolio data from Birdeye and be able to respond to questions related to the wallet's holdings. - -### Action Features - -- **Token Search Address** - - - This action will search input message for token addresses and when present will query Birdeye for token information - -- **Token Search Symbol** - - - This action will search input message for token symbols in the format of `$SYMBOL` and when present will query Birdeye for token information. Note that this action currently only supports SOL, SUI, and ETH addresses. - - _Any addresses that look like EVM addresses will be treated as ETH addresses since there is no easy way to distinguish between the other EVM chains that are supported by Birdeye_. - -- **Wallet Search Address** - - - This action will search input message for wallet addresses and when present will query Birdeye for wallet information - -## API Reference - -The plugin provides access to a subset of Birdeye API endpoints through structured interfaces. For detailed API documentation, visit [Birdeye's API Documentation](https://public-api.birdeye.so). - -## License - -See parent project for license information. - -## Contributing - -Contributions are welcome! See parent project for contribution guidelines. diff --git a/packages/plugin-birdeye/biome.json b/packages/plugin-birdeye/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-birdeye/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-birdeye/package.json b/packages/plugin-birdeye/package.json deleted file mode 100644 index da34c8ed207fa..0000000000000 --- a/packages/plugin-birdeye/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/plugin-birdeye", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@coral-xyz/anchor": "0.30.1", - "@elizaos/core": "workspace:*", - "@solana/spl-token": "0.4.9", - "@solana/web3.js": "1.95.8", - "bignumber": "1.1.0", - "bignumber.js": "9.1.2", - "bs58": "6.0.0", - "fomo-sdk-solana": "1.3.2", - "node-cache": "5.1.2", - "pumpdotfun-sdk": "1.3.2", - "tsup": "8.3.5", - "vitest": "2.1.9" - }, - "devDependencies": { - "@biomejs/biome": "1.5.3", - "@types/node": "^22.10.2", - "ts-node": "^10.9.2", - "tsconfig-paths": "^4.2.0", - "typescript": "^5.7.2" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "format": "biome format . --write", - "check": "biome check --apply ." - }, - "peerDependencies": { - "form-data": "4.0.1", - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-birdeye/src/actions/test-all-endpoints.ts b/packages/plugin-birdeye/src/actions/test-all-endpoints.ts deleted file mode 100644 index a1a876360013c..0000000000000 --- a/packages/plugin-birdeye/src/actions/test-all-endpoints.ts +++ /dev/null @@ -1,399 +0,0 @@ -import { - type Action, - type ActionExample, - elizaLogger, - type HandlerCallback, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { BirdeyeProvider } from "../birdeye"; -import { waitFor } from "../utils"; - -// This is a dummy action generated solely to test all Birdeye endpoints and should not be used in production -export const testAllEndpointsAction = { - name: "BIRDEYE_TEST_ALL_ENDPOINTS", - similes: [], - description: "Test all Birdeye endpoints with sample data", - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - _state: State, - _options: Record, - callback?: HandlerCallback - ) => { - try { - elizaLogger.info("Testing all endpoints"); - - await waitFor(1000); - - const birdeyeProvider = new BirdeyeProvider(runtime.cacheManager); - - // Sample data for testing - const sampleParams = { - token: "EKpQGSJtjMFqKZ9KQanSqYXRcF8fBopzLHYxdM65zcjm", - address: "MfDuWeqSHEqTFVYZ7LoexgAK9dxk7cy4DFJWjWMGVWa", - network: "solana", - list_address: "EKpQGSJtjMFqKZ9KQanSqYXRcF8fBopzLHYxdM65zcjm", - address_type: "token", - type: "1D", - tx_type: "all", - sort_type: "desc", - unixtime: 1234567890, - base_address: "EKpQGSJtjMFqKZ9KQanSqYXRcF8fBopzLHYxdM65zcjm", - quote_address: "EKpQGSJtjMFqKZ9KQanSqYXRcF8fBopzLHYxdM65zcjm", - time_to: 1672531199, // Unix timestamp - meme_platform_enabled: true, - time_frame: "1D", - sort_by: undefined, - list_addresses: "EKpQGSJtjMFqKZ9KQanSqYXRcF8fBopzLHYxdM65zcjm", - wallet: "MfDuWeqSHEqTFVYZ7LoexgAK9dxk7cy4DFJWjWMGVWa", - token_address: "EKpQGSJtjMFqKZ9KQanSqYXRcF8fBopzLHYxdM65zcjm", - pair: "samplePair", - before_time: 1672531199, - after_time: 1672331199, - }; - - // Test each fetch function - elizaLogger.info("fetchDefiSupportedNetworks"); - await birdeyeProvider.fetchDefiSupportedNetworks(); - elizaLogger.success("fetchDefiSupportedNetworks: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiPrice"); - await birdeyeProvider.fetchDefiPrice({ ...sampleParams }); - elizaLogger.success("fetchDefiPrice: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiPriceMultiple"); - await birdeyeProvider.fetchDefiPriceMultiple({ ...sampleParams }); - elizaLogger.success("fetchDefiPriceMultiple: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiPriceMultiple_POST"); - await birdeyeProvider.fetchDefiPriceMultiple_POST({ - ...sampleParams, - }); - elizaLogger.success("fetchDefiPriceMultiple_POST: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiPriceHistorical"); - await birdeyeProvider.fetchDefiPriceHistorical({ - ...sampleParams, - address_type: "token", - type: "1D", - }); - elizaLogger.success("fetchDefiPriceHistorical: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiPriceHistoricalByUnixTime"); - await birdeyeProvider.fetchDefiPriceHistoricalByUnixTime({ - address: sampleParams.token, - }); - elizaLogger.success("fetchDefiPriceHistoricalByUnixTime: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiTradesToken"); - await birdeyeProvider.fetchDefiTradesToken({ - address: sampleParams.token, - }); - elizaLogger.success("fetchDefiTradesToken: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiTradesPair"); - await birdeyeProvider.fetchDefiTradesPair({ - address: sampleParams.token, - }); - elizaLogger.success("fetchDefiTradesPair: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiTradesTokenSeekByTime"); - await birdeyeProvider.fetchDefiTradesTokenSeekByTime({ - address: sampleParams.token, - before_time: sampleParams.before_time, - }); - elizaLogger.success("fetchDefiTradesTokenSeekByTime: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiTradesPairSeekByTime"); - await birdeyeProvider.fetchDefiTradesPairSeekByTime({ - address: sampleParams.token, - after_time: sampleParams.after_time, - }); - elizaLogger.success("fetchDefiTradesPairSeekByTime: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiOHLCV"); - await birdeyeProvider.fetchDefiOHLCV({ - ...sampleParams, - type: "1D", - }); - elizaLogger.success("fetchDefiOHLCV: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiOHLCVPair"); - await birdeyeProvider.fetchDefiOHLCVPair({ - ...sampleParams, - type: "1D", - }); - elizaLogger.success("fetchDefiOHLCVPair: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiOHLCVBaseQuote"); - await birdeyeProvider.fetchDefiOHLCVBaseQuote({ - ...sampleParams, - type: "1D", - }); - elizaLogger.success("fetchDefiOHLCVBaseQuote: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchDefiPriceVolume"); - await birdeyeProvider.fetchDefiPriceVolume({ - address: sampleParams.token, - }); - elizaLogger.success("fetchDefiPriceVolume: SUCCESS!"); - await waitFor(500); - - // this endpoint is for enterprise users only - // elizaLogger.info("fetchDefiPriceVolumeMulti_POST"); - // await birdeyeProvider.fetchDefiPriceVolumeMulti_POST({ - // list_address: sampleParams.token, - // }); - // elizaLogger.success("fetchDefiPriceVolumeMulti_POST: SUCCESS!"); - // await waitFor(500); - - elizaLogger.info("fetchTokenList"); - await birdeyeProvider.fetchTokenList({ - ...sampleParams, - sort_by: "mc", - sort_type: "desc", - }); - elizaLogger.success("fetchTokenList: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenSecurityByAddress"); - await birdeyeProvider.fetchTokenSecurityByAddress({ - ...sampleParams, - }); - elizaLogger.success("fetchTokenSecurityByAddress: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenOverview"); - await birdeyeProvider.fetchTokenOverview({ ...sampleParams }); - elizaLogger.success("fetchTokenOverview: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenCreationInfo"); - await birdeyeProvider.fetchTokenCreationInfo({ ...sampleParams }); - elizaLogger.success("fetchTokenCreationInfo: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenTrending"); - await birdeyeProvider.fetchTokenTrending({ - ...sampleParams, - sort_by: "volume24hUSD", - sort_type: "desc", - }); - elizaLogger.success("fetchTokenTrending: SUCCESS!"); - await waitFor(500); - - // this endpoint is for enterprise users only - // elizaLogger.info("fetchTokenListV2_POST"); - // await birdeyeProvider.fetchTokenListV2_POST({}); - // elizaLogger.success("fetchTokenListV2_POST: SUCCESS!"); - // await waitFor(500); - - elizaLogger.info("fetchTokenNewListing"); - await birdeyeProvider.fetchTokenNewListing({ - time_to: new Date().getTime(), - meme_platform_enabled: true, - }); - elizaLogger.success("fetchTokenNewListing: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenTopTraders"); - await birdeyeProvider.fetchTokenTopTraders({ - ...sampleParams, - time_frame: "24h", - sort_type: "asc", - sort_by: "volume", - }); - elizaLogger.success("fetchTokenTopTraders: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenAllMarketsList"); - await birdeyeProvider.fetchTokenAllMarketsList({ - ...sampleParams, - time_frame: "12H", - sort_type: "asc", - sort_by: "volume24h", - }); - elizaLogger.success("fetchTokenAllMarketsList: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenMetadataSingle"); - await birdeyeProvider.fetchTokenMetadataSingle({ ...sampleParams }); - elizaLogger.success("fetchTokenMetadataSingle: SUCCESS!"); - await waitFor(500); - - // this endpoint is for enterprise users only - // elizaLogger.info("fetchTokenMetadataMulti"); - // await birdeyeProvider.fetchTokenMetadataMulti({ ...sampleParams }); - // elizaLogger.success("fetchTokenMetadataMulti: SUCCESS!"); - // await waitFor(500); - - elizaLogger.info("fetchTokenMarketData"); - await birdeyeProvider.fetchTokenMarketData({ ...sampleParams }); - elizaLogger.success("fetchTokenMarketData: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenTradeDataSingle"); - await birdeyeProvider.fetchTokenTradeDataSingle({ - ...sampleParams, - }); - elizaLogger.success("fetchTokenTradeDataSingle: SUCCESS!"); - await waitFor(500); - - // this endpoint is for enterprise users only - // elizaLogger.info("fetchTokenTradeDataMultiple"); - // await birdeyeProvider.fetchTokenTradeDataMultiple({ - // ...sampleParams, - // }); - // elizaLogger.success("fetchTokenTradeDataMultiple: SUCCESS!"); - // await waitFor(500); - - elizaLogger.info("fetchTokenHolders"); - await birdeyeProvider.fetchTokenHolders({ ...sampleParams }); - elizaLogger.success("fetchTokenHolders: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTokenMintBurn"); - await birdeyeProvider.fetchTokenMintBurn({ - ...sampleParams, - sort_by: "block_time", - sort_type: "desc", - type: "all", - }); - elizaLogger.success("fetchTokenMintBurn: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchWalletSupportedNetworks"); - await birdeyeProvider.fetchWalletSupportedNetworks(); - elizaLogger.success("fetchWalletSupportedNetworks: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchWalletPortfolio"); - await birdeyeProvider.fetchWalletPortfolio({ ...sampleParams }); - elizaLogger.success("fetchWalletPortfolio: SUCCESS!"); - await waitFor(500); - - // elizaLogger.info("fetchWalletPortfolioMultichain"); - // await birdeyeProvider.fetchWalletPortfolioMultichain({ - // ...sampleParams, - // }); - // elizaLogger.success("fetchWalletPortfolioMultichain: SUCCESS!"); - // await waitFor(500); - - elizaLogger.info("fetchWalletTokenBalance"); - await birdeyeProvider.fetchWalletTokenBalance({ ...sampleParams }); - elizaLogger.success("fetchWalletTokenBalance: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchWalletTransactionHistory"); - await birdeyeProvider.fetchWalletTransactionHistory({ - ...sampleParams, - }); - elizaLogger.success("fetchWalletTransactionHistory: SUCCESS!"); - await waitFor(500); - - // elizaLogger.info("fetchWalletTransactionHistoryMultichain"); - // await birdeyeProvider.fetchWalletTransactionHistoryMultichain({ - // ...sampleParams, - // }); - // elizaLogger.success( - // "fetchWalletTransactionHistoryMultichain: SUCCESS!" - // ); - // await waitFor(500); - - elizaLogger.info("fetchWalletTransactionSimulate_POST"); - await birdeyeProvider.fetchWalletTransactionSimulate_POST({ - from: sampleParams.token, - to: sampleParams.token, - data: JSON.stringify({ test: "ok" }), - value: "100000", - }); - elizaLogger.success( - "fetchWalletTransactionSimulate_POST: SUCCESS!" - ); - await waitFor(500); - - elizaLogger.info("fetchTraderGainersLosers"); - await birdeyeProvider.fetchTraderGainersLosers({ - ...sampleParams, - type: "today", - sort_type: "asc", - }); - elizaLogger.success("fetchTraderGainersLosers: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchTraderTransactionsSeek"); - await birdeyeProvider.fetchTraderTransactionsSeek({ - ...sampleParams, - tx_type: "all", - before_time: undefined, - }); - elizaLogger.success("fetchTraderTransactionsSeek: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("fetchPairOverviewSingle"); - await birdeyeProvider.fetchPairOverviewSingle({ ...sampleParams }); - elizaLogger.success("fetchPairOverviewSingle: SUCCESS!"); - await waitFor(500); - - // this endpoint is for enterprise users only - // elizaLogger.info("fetchMultiPairOverview"); - // await birdeyeProvider.fetchMultiPairOverview({ ...sampleParams }); - // elizaLogger.success("fetchMultiPairOverview: SUCCESS!"); - // await waitFor(500); - - // this endpoint is for enterprise users only - // elizaLogger.info("fetchPairOverviewMultiple"); - // await birdeyeProvider.fetchPairOverviewMultiple({ - // ...sampleParams, - // }); - // elizaLogger.success("fetchPairOverviewMultiple: SUCCESS!"); - // await waitFor(500); - - elizaLogger.info("fetchSearchTokenMarketData"); - await birdeyeProvider.fetchSearchTokenMarketData({ - ...sampleParams, - sort_type: "asc", - }); - elizaLogger.success("fetchSearchTokenMarketData: SUCCESS!"); - await waitFor(500); - - elizaLogger.info("All endpoints tested successfully"); - callback?.({ text: "All endpoints tested successfully!" }); - return true; - } catch (error) { - console.error("Error in testAllEndpointsAction:", error.message); - callback?.({ text: `Error: ${error.message}` }); - return false; - } - }, - validate: async (_runtime: IAgentRuntime, message: Memory) => { - // only run if explicitly triggered by user - return message.content.text.includes("BIRDEYE_TEST_ALL_ENDPOINTS"); - }, - examples: [ - [ - { - user: "user", - content: { - text: "I want you to BIRDEYE_TEST_ALL_ENDPOINTS", - action: "BIRDEYE_TEST_ALL_ENDPOINTS", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-birdeye/src/actions/token-search-address.ts b/packages/plugin-birdeye/src/actions/token-search-address.ts deleted file mode 100644 index ccc67f517113e..0000000000000 --- a/packages/plugin-birdeye/src/actions/token-search-address.ts +++ /dev/null @@ -1,290 +0,0 @@ -import { - type Action, - type ActionExample, - elizaLogger, - formatTimestamp, - type HandlerCallback, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { BirdeyeProvider } from "../birdeye"; -import type { - TokenMarketDataResponse, - TokenOverviewResponse, - TokenSecurityResponse, - TokenTradeDataSingleResponse, -} from "../types/api/token"; -import type { BaseAddress } from "../types/shared"; -import { - extractAddresses, - formatPercentChange, - formatPrice, - formatValue, - shortenAddress, -} from "../utils"; - -type TokenAddressSearchResult = { - overview: TokenOverviewResponse; - tradeData: TokenTradeDataSingleResponse; - security: TokenSecurityResponse; - marketData: TokenMarketDataResponse; -}; - -export const tokenSearchAddressAction = { - name: "TOKEN_SEARCH_ADDRESS", - similes: [ - "SEARCH_TOKEN_ADDRESS", - "FIND_TOKEN_ADDRESS", - "LOOKUP_TOKEN_ADDRESS", - "CHECK_TOKEN_ADDRESS", - "GET_TOKEN_BY_ADDRESS", - "TOKEN_ADDRESS_INFO", - "TOKEN_ADDRESS_LOOKUP", - "TOKEN_ADDRESS_SEARCH", - "TOKEN_ADDRESS_CHECK", - "TOKEN_ADDRESS_DETAILS", - "TOKEN_CONTRACT_SEARCH", - "TOKEN_CONTRACT_LOOKUP", - "TOKEN_CONTRACT_INFO", - "TOKEN_CONTRACT_CHECK", - "VERIFY_TOKEN_ADDRESS", - "VALIDATE_TOKEN_ADDRESS", - "GET_TOKEN_INFO", - "TOKEN_INFO", - "TOKEN_REPORT", - "TOKEN_ANALYSIS", - "TOKEN_OVERVIEW", - "TOKEN_SUMMARY", - "TOKEN_INSIGHT", - "TOKEN_DATA", - "TOKEN_STATS", - "TOKEN_METRICS", - "TOKEN_PROFILE", - "TOKEN_REVIEW", - "TOKEN_CHECK", - "TOKEN_LOOKUP", - "TOKEN_FIND", - "TOKEN_DISCOVER", - "TOKEN_EXPLORE", - ], - description: - "Search for detailed token information including security and trade data by address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State, - _options: Record, - callback?: HandlerCallback - ) => { - try { - const provider = new BirdeyeProvider(runtime.cacheManager); - - // get all contract addresses from the message - const addresses = extractAddresses(message.content.text); - - elizaLogger.info( - `Searching Birdeye provider for ${addresses.length} addresses` - ); - - // for each symbol, do a search in Birdeye. This will return a list of token results that may be amatch to the token symbol. - const results: TokenAddressSearchResult[] = await Promise.all( - addresses.map(async ({ address, chain: addressChain }) => { - // address detection can't distinguish between evm chains, so we currently only do address search on ETH for EVM addresses. Future support will be added for other chains if the user requests it. - const chain = - addressChain === "evm" ? "ethereum" : addressChain; - - const [overview, marketData, security, tradeData] = - await Promise.all([ - provider.fetchTokenOverview( - { - address, - }, - { - headers: { - "x-chain": chain, - }, - } - ), - provider.fetchTokenMarketData( - { - address, - }, - { - headers: { - "x-chain": chain, - }, - } - ), - provider.fetchTokenSecurityByAddress( - { - address, - }, - { - headers: { - "x-chain": chain, - }, - } - ), - provider.fetchTokenTradeDataSingle( - { - address, - }, - { - headers: { - "x-chain": chain, - }, - } - ), - ]); - - return { - overview, - marketData, - security, - tradeData, - }; - }) - ); - - console.log(results); - - const completeResults = `I performed a search for the token addresses you requested and found the following results:\n\n${results - .map( - (result, i) => - `${formatTokenReport(addresses[i], i, result)}` - ) - .join("\n\n")}`; - - callback?.({ text: completeResults }); - return true; - } catch (error) { - console.error("Error in searchTokens handler:", error.message); - callback?.({ text: `Error: ${error.message}` }); - return false; - } - }, - validate: async (_runtime: IAgentRuntime, message: Memory) => { - const addresses = extractAddresses(message.content.text); - return addresses.length > 0; - }, - examples: [ - [ - { - user: "user", - content: { - text: "Search for 0x7fc66500c84a76ad7e9c93437bfc5ac33e2ddae9", - action: "TOKEN_SEARCH_ADDRESS", - }, - }, - { - user: "user", - content: { - text: "Look up contract So11111111111111111111111111111111111111112", - action: "TOKEN_ADDRESS_LOOKUP", - }, - }, - { - user: "user", - content: { - text: "Check this address: 0x1f9840a85d5af5bf1d1762f925bdaddc4201f984", - action: "CHECK_TOKEN_ADDRESS", - }, - }, - { - user: "user", - content: { - text: "Get info for 0x2260fac5e5542a773aa44fbcfedf7c193bc2c599", - action: "TOKEN_ADDRESS_INFO", - }, - }, - { - user: "user", - content: { - text: "Analyze contract 0x514910771af9ca656af840dff83e8264ecf986ca", - action: "TOKEN_CONTRACT_SEARCH", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -// take all the details of the results and present to the user -const formatTokenReport = ( - address: BaseAddress, - _index: number, - result: TokenAddressSearchResult -) => { - let output = ""; - - if (result.overview?.data) { - output += "\n"; - output += "Token Overview:\n"; - output += `📝 Name: ${result.overview.data.name}\n`; - output += result.overview.data.symbol - ? `🔖 Symbol: ${result.overview.data.symbol.toUpperCase()}\n` - : ""; - output += `🔗 Address: ${address.address}\n`; - output += `🔢 Decimals: ${result.overview.data.decimals}\n`; - output += ""; - if (result.overview.data.extensions) { - const ext = result.overview.data.extensions; - output += "🔗 Links & Info:\n"; - if (ext.website) output += ` • Website: ${ext.website}\n`; - if (ext.twitter) output += ` • Twitter: ${ext.twitter}\n`; - if (ext.telegram) output += ` • Telegram: ${ext.telegram}\n`; - if (ext.discord) output += ` • Discord: ${ext.discord}\n`; - if (ext.medium) output += ` • Medium: ${ext.medium}\n`; - if (ext.coingeckoId) - output += ` • CoinGecko ID: ${ext.coingeckoId}\n`; - if (ext.serumV3Usdc) - output += ` • Serum V3 USDC: ${ext.serumV3Usdc}\n`; - if (ext.serumV3Usdt) - output += ` • Serum V3 USDT: ${ext.serumV3Usdt}\n`; - } - output += `💧 Liquidity: ${formatValue(result.overview.data.liquidity)}\n`; - output += `⏰ Last Trade Time: ${formatTimestamp(new Date(result.overview.data.lastTradeHumanTime).getTime() / 1000)}\n`; - output += `💵 Price: ${formatPrice(result.overview.data.price)}\n`; - output += `📜 Description: ${result.overview.data.extensions?.description ?? "N/A"}\n`; - } - - if (result.marketData?.data) { - output += "\n"; - output += "Market Data:\n"; - output += `💧 Liquidity: ${formatValue(result.marketData.data.liquidity)}\n`; - output += `💵 Price: ${formatPrice(result.marketData.data.price)}\n`; - output += `📦 Supply: ${formatValue(result.marketData.data.supply)}\n`; - output += `💰 Market Cap: ${formatValue(result.marketData.data.marketcap)}\n`; - output += `🔄 Circulating Supply: ${formatValue(result.marketData.data.circulating_supply)}\n`; - output += `💰 Circulating Market Cap: ${formatValue(result.marketData.data.circulating_marketcap)}\n`; - } - - if (result.tradeData?.data) { - output += "\n"; - output += "Trade Data:\n"; - output += `👥 Holders: ${result.tradeData.data.holder}\n`; - output += `📊 Unique Wallets (24h): ${result.tradeData.data.unique_wallet_24h}\n`; - output += `📉 Price Change (24h): ${formatPercentChange(result.tradeData.data.price_change_24h_percent)}\n`; - output += `💸 Volume (24h USD): ${formatValue(result.tradeData.data.volume_24h_usd)}\n`; - output += `💵 Current Price: $${formatPrice(result.tradeData.data.price)}\n`; - } - - if (result.security?.data) { - output += "\n"; - output += "Ownership Distribution:\n"; - output += `🏠 Owner Address: ${shortenAddress(result.security.data.ownerAddress)}\n`; - output += `👨‍💼 Creator Address: ${shortenAddress(result.security.data.creatorAddress)}\n`; - output += `📦 Total Supply: ${formatValue(result.security.data.totalSupply)}\n`; - output += result.security.data.proxied - ? `🌿 Mintable: ${result.security.data.mintable ?? "N/A"}\n` - : ""; - output += result.security.data.proxy - ? `🔄 Proxied: ${result.security.data.proxy ?? "N/A"}\n` - : ""; - output += result.security.data.securityChecks - ? `🔍 Security Checks: ${JSON.stringify(result.security.data.securityChecks)}\n` - : ""; - } - - return output ?? `No results found for ${address.address}`; -}; diff --git a/packages/plugin-birdeye/src/actions/token-search-symbol.ts b/packages/plugin-birdeye/src/actions/token-search-symbol.ts deleted file mode 100644 index b45e878bf787e..0000000000000 --- a/packages/plugin-birdeye/src/actions/token-search-symbol.ts +++ /dev/null @@ -1,226 +0,0 @@ -import { - type Action, - type ActionExample, - elizaLogger, - type HandlerCallback, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { BirdeyeProvider } from "../birdeye"; -import type { TokenResult } from "../types/api/search"; -import { - extractSymbols, - formatPercentChange, - formatPrice, - formatValue, -} from "../utils"; - -// "strict" requires a $ prefix and will match $SOL, $ai16z, $BTC, etc. -// "loose" will match $SOL, SOL, SOLANA, etc. and does not require a $ prefix but may interpret any other acronyms as symbols to search for -const SYMBOL_SEARCH_MODE = "strict"; - -export const tokenSearchSymbolAction = { - name: "TOKEN_SEARCH_SYMBOL", - similes: [ - "SEARCH_TOKEN_SYMBOL", - "FIND_TOKEN_SYMBOL", - "LOOKUP_TOKEN_SYMBOL", - "CHECK_TOKEN_SYMBOL", - "GET_TOKEN_BY_SYMBOL", - "SYMBOL_SEARCH", - "SYMBOL_LOOKUP", - "SYMBOL_CHECK", - "TOKEN_SYMBOL_INFO", - "TOKEN_SYMBOL_DETAILS", - "TOKEN_SYMBOL_LOOKUP", - "TOKEN_SYMBOL_SEARCH", - "TOKEN_SYMBOL_CHECK", - "TOKEN_SYMBOL_QUERY", - "TOKEN_SYMBOL_FIND", - "GET_TOKEN_INFO", - "TOKEN_INFO", - "TOKEN_REPORT", - "TOKEN_ANALYSIS", - "TOKEN_OVERVIEW", - "TOKEN_SUMMARY", - "TOKEN_INSIGHT", - "TOKEN_DATA", - "TOKEN_STATS", - "TOKEN_METRICS", - "TOKEN_PROFILE", - "TOKEN_REVIEW", - "TOKEN_CHECK", - "TOKEN_LOOKUP", - "TOKEN_FIND", - "TOKEN_DISCOVER", - "TOKEN_EXPLORE", - ], - description: - "Search for detailed token information including security and trade data by symbol", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State, - _options: Record, - callback?: HandlerCallback - ) => { - try { - const provider = new BirdeyeProvider(runtime.cacheManager); - - // get all symbols from the message that match (i.e. $SOL, $ETH, $BTC, etc.). If you want to match more loosely, use "loose" instead of "strict" and it will match $SOL, SOL, SOLANA, etc. - const symbols = extractSymbols( - message.content.text, - SYMBOL_SEARCH_MODE - ); - - elizaLogger.info( - `Searching Birdeye provider for ${symbols.length} symbols` - ); - - // for each symbol, do a search in Birdeye. This will return a list of token results that may be amatch to the token symbol. - const results = await Promise.all( - symbols.map((symbol) => - provider.fetchSearchTokenMarketData({ - keyword: symbol, - sort_by: "volume_24h_usd", - sort_type: "desc", - chain: "all", - limit: 5, - }) - ) - ); - - // get filter the results to only include the token results and then filter the results to only include the ones that match the symbol - const validResults = results.map((r, i) => - r.data.items - .filter((item) => item.type === "token" && item.result) - .flatMap((item) => - (item.result as TokenResult[]).filter( - (r) => - r.symbol?.toLowerCase() === - symbols[i].toLowerCase() - ) - ) - ) as TokenResult[][]; - - if (validResults.length === 0) { - return true; - } - - const completeResults = `I performed a search for the token symbols you requested and found the following results (for more details search by contract address):\n\n${validResults - .map( - (result, i) => - `${formatTokenSummary(symbols[i], i, result)}` - ) - .join("\n")}`; - - callback?.({ text: completeResults }); - return true; - } catch (error) { - console.error("Error in searchTokens handler:", error.message); - callback?.({ text: `Error: ${error.message}` }); - return false; - } - }, - validate: async (_runtime: IAgentRuntime, message: Memory) => { - const symbols = extractSymbols( - message.content.text, - SYMBOL_SEARCH_MODE - ); - return symbols.length > 0; - }, - examples: [ - [ - { - user: "user", - content: { - text: "Search for $SOL and $ETH", - action: "SEARCH_TOKENS", - }, - }, - { - user: "user", - content: { - text: "Find information about $BTC", - action: "TOKEN_SEARCH", - }, - }, - { - user: "user", - content: { - text: "Look up $WETH token", - action: "LOOKUP_TOKENS", - }, - }, - { - user: "user", - content: { - text: "Tell me about SOL", - action: "CHECK_TOKEN", - }, - }, - { - user: "user", - content: { - text: "Give me details on $ADA", - action: "TOKEN_DETAILS", - }, - }, - { - user: "user", - content: { - text: "What can you tell me about $DOGE?", - action: "TOKEN_INFO", - }, - }, - { - user: "user", - content: { - text: "I need a report on $XRP", - action: "TOKEN_REPORT", - }, - }, - { - user: "user", - content: { - text: "Analyze $BNB for me", - action: "TOKEN_ANALYSIS", - }, - }, - { - user: "user", - content: { - text: "Overview of $LTC", - action: "TOKEN_OVERVIEW", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -const formatTokenSummary = ( - symbol: string, - _index: number, - tokens: TokenResult[] -) => { - return tokens - .map((token, i) => { - let output = ""; - if (i === 0) { - output += `Search Results for ${symbol}:\n\n`; - } - output += `Search Result #${tokens.length > 0 ? i + 1 : ""}:\n`; - output += `🔖 Symbol: $${token.symbol.toUpperCase()}\n`; - output += `🔗 Address: ${token.address}\n`; - output += `🌐 Network: ${token.network.toUpperCase()}\n`; - output += `💵 Price: ${formatPrice(token.price)} (${formatPercentChange(token.price_change_24h_percent)})\n`; - output += `💸 Volume (24h USD): ${formatValue(token.volume_24h_usd)}\n`; - output += token.market_cap - ? `💰 Market Cap: ${formatValue(token.market_cap)}\n` - : ""; - output += token.fdv ? `🌊 FDV: ${formatValue(token.fdv)}\n` : ""; - return output; - }) - .join("\n"); -}; diff --git a/packages/plugin-birdeye/src/actions/wallet-search-address.ts b/packages/plugin-birdeye/src/actions/wallet-search-address.ts deleted file mode 100644 index 5484eff15cead..0000000000000 --- a/packages/plugin-birdeye/src/actions/wallet-search-address.ts +++ /dev/null @@ -1,177 +0,0 @@ -import { - type Action, - type ActionExample, - elizaLogger, - type HandlerCallback, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { BirdeyeProvider } from "../birdeye"; -import type { WalletPortfolioResponse } from "../types/api/wallet"; -import type { BaseAddress } from "../types/shared"; -import { extractAddresses } from "../utils"; - -export const walletSearchAddressAction = { - name: "WALLET_SEARCH_ADDRESS", - similes: [ - "SEARCH_WALLET_ADDRESS", - "FIND_WALLET_ADDRESS", - "LOOKUP_WALLET_ADDRESS", - "CHECK_WALLET_ADDRESS", - "GET_WALLET_BY_ADDRESS", - "WALLET_ADDRESS_INFO", - "WALLET_ADDRESS_LOOKUP", - "WALLET_ADDRESS_SEARCH", - "WALLET_ADDRESS_CHECK", - "WALLET_ADDRESS_DETAILS", - "WALLET_CONTRACT_SEARCH", - "WALLET_CONTRACT_LOOKUP", - "WALLET_CONTRACT_INFO", - "WALLET_CONTRACT_CHECK", - "VERIFY_WALLET_ADDRESS", - "VALIDATE_WALLET_ADDRESS", - "GET_WALLET_INFO", - "WALLET_INFO", - "WALLET_REPORT", - "WALLET_ANALYSIS", - "WALLET_OVERVIEW", - "WALLET_SUMMARY", - "WALLET_INSIGHT", - "WALLET_DATA", - "WALLET_STATS", - "WALLET_METRICS", - "WALLET_PROFILE", - "WALLET_REVIEW", - "WALLET_CHECK", - "WALLET_LOOKUP", - "WALLET_FIND", - "WALLET_DISCOVER", - "WALLET_EXPLORE", - ], - description: - "Search for detailed wallet information including portfolio and transaction data by address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state: State, - _options: Record, - callback?: HandlerCallback - ) => { - try { - const provider = new BirdeyeProvider(runtime.cacheManager); - - // get all wallet addresses from the message - const addresses = extractAddresses(message.content.text); - - elizaLogger.info( - `Searching Birdeye provider for ${addresses.length} addresses` - ); - - // for each symbol, do a search in Birdeye. This will return a list of token results that may be amatch to the token symbol. - const results: WalletPortfolioResponse[] = await Promise.all( - addresses.map(async ({ address, chain: addressChain }) => { - // address detection can't distinguish between evm chains, so we currently only do address search on ETH for EVM addresses. Future support will be added for other chains if the user requests it. - const chain = - addressChain === "evm" ? "ethereum" : addressChain; - return provider.fetchWalletPortfolio( - { - wallet: address, - }, - { - headers: { - chain: chain, - }, - } - ); - }) - ); - - console.log(results); - - const completeResults = `I performed a search for the wallet addresses you requested and found the following results:\n\n${results - .map( - (result, i) => - `${formatWalletReport(addresses[i], results.length, i, result)}` - ) - .join("\n\n")}`; - - callback?.({ text: completeResults }); - return true; - } catch (error) { - console.error("Error in searchTokens handler:", error.message); - callback?.({ text: `Error: ${error.message}` }); - return false; - } - }, - validate: async (_runtime: IAgentRuntime, message: Memory) => { - const addresses = extractAddresses(message.content.text); - return addresses.length > 0; - }, - examples: [ - [ - { - user: "user", - content: { - text: "Search wallet 0x1234567890abcdef1234567890abcdef12345678", - action: "WALLET_SEARCH_ADDRESS", - }, - }, - { - user: "user", - content: { - text: "Look up wallet address HN7cABqLq46Es1jh92dQQisAq662SmxELLLsHHe4YWrH", - action: "WALLET_ADDRESS_LOOKUP", - }, - }, - { - user: "user", - content: { - text: "Check this address: 0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045", - action: "CHECK_WALLET_ADDRESS", - }, - }, - { - user: "user", - content: { - text: "Get wallet info for 5yBYpGQRHPz4i5FkVnP9h9VTJBMnwgHRe5L5gw2bwp9q", - action: "WALLET_INFO", - }, - }, - { - user: "user", - content: { - text: "Show me portfolio for 0x3cD751E6b0078Be393132286c442345e5DC49699", - action: "WALLET_OVERVIEW", - }, - }, - ], - ] as ActionExample[][], -} as Action; - -// take all the details of the results and present to the user -const formatWalletReport = ( - address: BaseAddress, - totalResults: number, - index: number, - result: WalletPortfolioResponse -) => { - const tokens = result.data.items.slice(0, 10) || []; - const totalValue = tokens.reduce( - (sum, token) => sum + (token.valueUsd || 0), - 0 - ); - - let header = `Wallet Result ${totalResults > 1 ? `#${index + 1}` : ""}\n`; - header += `👛 Address ${address.address}*\n`; - header += `💰 Total Value: $${totalValue.toLocaleString()}\n`; - header += "🔖 Top Holdings:"; - const tokenList = tokens - .map( - (token) => - `• $${token.symbol.toUpperCase()}: $${token.valueUsd?.toLocaleString()} (${token.uiAmount?.toFixed(4)} tokens)` - ) - .join("\n"); - - return `${header}\n${tokenList}`; -}; diff --git a/packages/plugin-birdeye/src/birdeye.ts b/packages/plugin-birdeye/src/birdeye.ts deleted file mode 100644 index a89e6e453dc6d..0000000000000 --- a/packages/plugin-birdeye/src/birdeye.ts +++ /dev/null @@ -1,803 +0,0 @@ -import { elizaLogger, type ICacheManager, settings } from "@elizaos/core"; -import NodeCache from "node-cache"; -import * as path from "node:path"; -import { - API_BASE_URL, - BIRDEYE_ENDPOINTS, - DEFAULT_MAX_RETRIES, - DEFAULT_SUPPORTED_SYMBOLS, - RETRY_DELAY_MS, -} from "./constants"; -import type { BirdeyeApiParams, BirdeyeApiResponse } from "./types/api/common"; -import type { - BaseQuoteParams, - BaseQuoteResponse, - DefiHistoryPriceParams, - DefiHistoryPriceResponse, - DefiMultiPriceParams, - DefiMultiPriceParamsPOST, - DefiMultiPriceResponse, - DefiNetworksResponse, - DefiPriceParams, - DefiPriceResponse, - DefiTradesTokenParams, - DefiTradesTokenResponse, - HistoricalPriceUnixParams, - HistoricalPriceUnixResponse, - MultiPriceVolumeParams, - MultiPriceVolumeResponse, - OHLCVParams, - OHLCVResponse, - PriceVolumeParams, - PriceVolumeResponse, -} from "./types/api/defi"; -import type { - OHLCVPairParams, - OHLCVPairResponse, - PairOverviewMultiParams, - PairOverviewMultiResponse, - PairOverviewSingleParams, - PairOverviewSingleResponse, -} from "./types/api/pair"; -import type { - TokenMarketSearchParams, - TokenMarketSearchResponse, -} from "./types/api/search"; -import type { - AllMarketsParams, - AllMarketsResponse, - MintBurnParams, - MintBurnResponse, - NewListingParams, - NewListingResponse, - TokenCreationInfoParams, - TokenCreationInfoResponse, - TokenHoldersParams, - TokenHoldersResponse, - TokenListParams, - TokenListResponse, - TokenListV2Response, - TokenMarketDataParams, - TokenMarketDataResponse, - TokenMetadataMultiParams, - TokenMetadataMultiResponse, - TokenMetadataSingleParams, - TokenMetadataSingleResponse, - TokenOverviewParams, - TokenOverviewResponse, - TokenSecurityParams, - TokenSecurityResponse, - TokenTradeDataMultiParams, - TokenTradeDataMultiResponse, - TokenTradeDataSingleParams, - TokenTradeDataSingleResponse, - TokenTrendingParams, - TokenTrendingResponse, - TopTradersParams, - TopTradersResponse, -} from "./types/api/token"; -import type { - GainersLosersParams, - GainersLosersResponse, - TraderTransactionsSeekParams, - TraderTransactionsSeekResponse, -} from "./types/api/trader"; -import type { - WalletPortfolioMultichainParams, - WalletPortfolioMultichainResponse, - WalletPortfolioParams, - WalletPortfolioResponse, - WalletSimulationParams, - WalletSimulationResponse, - WalletTokenBalanceParams, - WalletTokenBalanceResponse, - WalletTransactionHistoryMultichainParams, - WalletTransactionHistoryMultichainResponse, - WalletTransactionHistoryParams, - WalletTransactionHistoryResponse, -} from "./types/api/wallet"; -import { convertToStringParams, waitFor } from "./utils"; - -type FetchParams = T & { - headers?: Record; -}; - -class BaseCachedProvider { - private cache: NodeCache; - - constructor( - private cacheManager: ICacheManager, - private cacheKey, - ttl?: number - ) { - this.cache = new NodeCache({ stdTTL: ttl || 300 }); - } - - private readFsCache(key: string): Promise { - return this.cacheManager.get(path.join(this.cacheKey, key)); - } - - private writeFsCache(key: string, data: T): Promise { - return this.cacheManager.set(path.join(this.cacheKey, key), data, { - expires: Date.now() + 5 * 60 * 1000, - }); - } - - public async readFromCache(key: string): Promise { - // get memory cache first - const val = this.cache.get(key); - if (val) { - return val; - } - - const fsVal = await this.readFsCache(key); - if (fsVal) { - // set to memory cache - this.cache.set(key, fsVal); - } - - return fsVal; - } - - public async writeToCache(key: string, val: T): Promise { - // Set in-memory cache - this.cache.set(key, val); - - // Write to file-based cache - await this.writeFsCache(key, val); - } -} - -export class BirdeyeProvider extends BaseCachedProvider { - private symbolMap: Record; - private maxRetries: number; - - constructor( - cacheManager: ICacheManager, - symbolMap?: Record, - maxRetries?: number - ) { - super(cacheManager, "birdeye/data"); - this.symbolMap = symbolMap || DEFAULT_SUPPORTED_SYMBOLS; - this.maxRetries = maxRetries || DEFAULT_MAX_RETRIES; - } - - /* - * COMMON FETCH FUNCTIONS - */ - private async fetchWithRetry( - url: string, - options: RequestInit = {} - ): Promise { - let attempts = 0; - - // allow the user to override the chain - const chain = - options.headers?.["x-chain"] || settings.BIRDEYE_CHAIN || "solana"; - - while (attempts < this.maxRetries) { - attempts++; - try { - const resp = await fetch(url, { - ...options, - headers: { - Accept: "application/json", - "Content-Type": "application/json", - "x-chain": chain, - "X-API-KEY": settings.BIRDEYE_API_KEY || "", - ...options.headers, - }, - }); - - if (!resp.ok) { - const errorText = await resp.text(); - throw new Error( - `HTTP error! status: ${resp.status}, message: ${errorText}` - ); - } - - const rawData = await resp.json(); - // If the response already has data and success fields, return it - if ( - rawData.data !== undefined && - rawData.success !== undefined - ) { - return rawData as T; - } - // Otherwise wrap the response in the expected format - return { - data: rawData, - success: true, - } as T; - } catch (error) { - if (attempts === this.maxRetries) { - // failed after all - throw error; - } - await waitFor(RETRY_DELAY_MS); - } - } - } - - private async fetchWithCacheAndRetry({ - url, - params, - headers, - method = "GET", - }: { - url: string; - params?: BirdeyeApiParams; - headers?: Record; - method?: "GET" | "POST"; - }): Promise { - const stringParams = convertToStringParams(params); - const fullUrl = `${API_BASE_URL}${url}`; - const cacheKey = - method === "GET" - ? `${url}?${new URLSearchParams(stringParams)}` - : `${url}:${JSON.stringify(params)}`; - - const val = await this.readFromCache(cacheKey); - if (val) return val as T; - - const urlWithParams = - method === "GET" && params - ? `${fullUrl}?${new URLSearchParams(stringParams)}` - : fullUrl; - - elizaLogger.info(`Birdeye fetch: ${urlWithParams}`); - - const data = await this.fetchWithRetry(urlWithParams, { - method, - headers, - ...(method === "POST" && - params && { body: JSON.stringify(params) }), - }); - - await this.writeToCache(cacheKey, data); - return data as T; - } - - /* - * DEFI FETCH FUNCTIONS - */ - - // Get a list of all supported networks. - public async fetchDefiSupportedNetworks() { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.networks, - }); - } - - // Get price update of a token. - public async fetchDefiPrice( - params: DefiPriceParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.price, - params, - headers: options.headers, - }); - } - - // Get price updates of multiple tokens in a single API call. Maximum 100 tokens - public async fetchDefiPriceMultiple( - params: DefiMultiPriceParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.price_multi, - params, - headers: options.headers, - }); - } - - // Get price updates of multiple tokens in a single API call. Maximum 100 tokens - public async fetchDefiPriceMultiple_POST( - params: DefiMultiPriceParamsPOST, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.price_multi_POST, - params, - headers: options.headers, - method: "POST", - }); - } - - // Get historical price line chart of a token. - public async fetchDefiPriceHistorical( - params: DefiHistoryPriceParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.history_price, - params, - headers: options.headers, - }); - } - - // Get historical price by unix timestamp - public async fetchDefiPriceHistoricalByUnixTime( - params: HistoricalPriceUnixParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.historical_price_unix, - params, - headers: options.headers, - }); - } - - // Get list of trades of a certain token. - public async fetchDefiTradesToken( - params: DefiTradesTokenParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.trades_token, - params, - headers: options.headers, - }); - } - - // Get list of trades of a certain pair or market. - public async fetchDefiTradesPair( - params: DefiTradesTokenParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.trades_token, - params, - headers: options.headers, - }); - } - - // Get list of trades of a token with time bound option. - public async fetchDefiTradesTokenSeekByTime( - params: DefiTradesTokenParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.trades_token_seek, - params, - headers: options.headers, - }); - } - - // Get list of trades of a certain pair or market with time bound option. - public async fetchDefiTradesPairSeekByTime( - params: DefiTradesTokenParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.trades_pair_seek, - params, - headers: options.headers, - }); - } - - // Get OHLCV price of a token. - public async fetchDefiOHLCV( - params: OHLCVParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.ohlcv, - params, - headers: options.headers, - }); - } - - // Get OHLCV price of a pair. - public async fetchDefiOHLCVPair( - params: OHLCVPairParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.ohlcv_pair, - params, - headers: options.headers, - }); - } - - // Get OHLCV price of a base-quote pair. - public async fetchDefiOHLCVBaseQuote( - params: BaseQuoteParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.ohlcv_base_quote, - params, - headers: options.headers, - }); - } - - // Get price and volume of a token. - public async fetchDefiPriceVolume( - params: PriceVolumeParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.price_volume, - params, - headers: options.headers, - }); - } - - // Get price and volume updates of maximum 50 tokens - public async fetchDefiPriceVolumeMulti_POST( - params: MultiPriceVolumeParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.price_volume_multi_POST, - params, - headers: options.headers, - method: "POST", - }); - } - - /* - * TOKEN FETCH FUNCTIONS - */ - - // Get token list of any supported chains. - public async fetchTokenList( - params: TokenListParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.list_all, - params, - headers: options.headers, - }); - } - - // Get token security of any supported chains. - public async fetchTokenSecurityByAddress( - params: TokenSecurityParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.security, - params, - headers: options.headers, - }); - } - - // Get overview of a token. - public async fetchTokenOverview( - params: TokenOverviewParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.overview, - params, - headers: options.headers, - }); - } - - // Get creation info of token - public async fetchTokenCreationInfo( - params: TokenCreationInfoParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.creation_info, - params, - headers: options.headers, - }); - } - - // Retrieve a dynamic and up-to-date list of trending tokens based on specified sorting criteria. - public async fetchTokenTrending( - params?: TokenTrendingParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.trending, - params, - headers: options.headers, - }); - } - - // This endpoint facilitates the retrieval of a list of tokens on a specified blockchain network. This upgraded version is exclusive to business and enterprise packages. By simply including the header for the requested blockchain without any query parameters, business and enterprise users can get the full list of tokens on the specified blockchain in the URL returned in the response. This removes the need for the limit response of the previous version and reduces the workload of making multiple calls. - public async fetchTokenListV2_POST( - params: FetchParams> - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.list_all_v2_POST, - params, - headers: params.headers, - method: "POST", - }); - } - - // Get newly listed tokens of any supported chains. - public async fetchTokenNewListing( - params?: NewListingParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.new_listing, - params, - headers: options?.headers, - }); - } - - // Get top traders of given token. - public async fetchTokenTopTraders( - params: TopTradersParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.top_traders, - params, - headers: options.headers, - }); - } - - // The API provides detailed information about the markets for a specific cryptocurrency token on a specified blockchain. Users can retrieve data for one or multiple markets related to a single token. This endpoint requires the specification of a token address and the blockchain to filter results. Additionally, it supports optional query parameters such as offset, limit, and required sorting by liquidity or sort type (ascending or descending) to refine the output. - public async fetchTokenAllMarketsList( - params: AllMarketsParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.all_markets, - params, - headers: options.headers, - }); - } - - // Get metadata of single token - public async fetchTokenMetadataSingle( - params: TokenMetadataSingleParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.metadata_single, - params, - headers: options.headers, - }); - } - - // Get metadata of multiple tokens - public async fetchTokenMetadataMulti( - params: TokenMetadataMultiParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.metadata_multi, - params, - headers: options.headers, - }); - } - - // Get market data of single token - public async fetchTokenMarketData( - params: TokenMarketDataParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.market_data, - params, - headers: options.headers, - }); - } - - // Get trade data of single token - public async fetchTokenTradeDataSingle( - params: TokenTradeDataSingleParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.trade_data_single, - params, - headers: options.headers, - }); - } - - // Get trade data of multiple tokens - public async fetchTokenTradeDataMultiple( - params: TokenTradeDataMultiParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.trade_data_multi, - params, - headers: options.headers, - }); - } - - // Get top holder list of the given token - public async fetchTokenHolders( - params: TokenHoldersParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.holders, - params, - headers: options.headers, - }); - } - - // Get mint/burn transaction list of the given token. Only support solana currently - public async fetchTokenMintBurn( - params: MintBurnParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.token.mint_burn, - params, - headers: options.headers, - }); - } - - /* - * WALLET FETCH FUNCTIONS - */ - public async fetchWalletSupportedNetworks( - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.defi.networks, - headers: options.headers, - }); - } - - public async fetchWalletPortfolio( - params: WalletPortfolioParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.wallet.portfolio, - params, - headers: options.headers, - }); - } - - /** - * @deprecated This endpoint will be decommissioned on Feb 1st, 2025. - */ - public async fetchWalletPortfolioMultichain( - params: WalletPortfolioMultichainParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.wallet.portfolio_multichain, - params, - headers: options.headers, - }); - } - - public async fetchWalletTokenBalance( - params: WalletTokenBalanceParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.wallet.token_balance, - params, - headers: options.headers, - }); - } - - public async fetchWalletTransactionHistory( - params: WalletTransactionHistoryParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.wallet.transaction_history, - params, - headers: options.headers, - }); - } - - /** - * @deprecated This endpoint will be decommissioned on Feb 1st, 2025. - */ - public async fetchWalletTransactionHistoryMultichain( - params: WalletTransactionHistoryMultichainParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry( - { - url: BIRDEYE_ENDPOINTS.wallet.transaction_history_multichain, - params, - headers: options.headers, - } - ); - } - - public async fetchWalletTransactionSimulate_POST( - params: WalletSimulationParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.wallet.transaction_simulation_POST, - params, - headers: options.headers, - method: "POST", - }); - } - - /* - * TRADER FETCH FUNCTIONS - */ - - // The API provides detailed information top gainers/losers - public async fetchTraderGainersLosers( - params: GainersLosersParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.trader.gainers_losers, - params, - headers: options.headers, - }); - } - - // Get list of trades of a trader with time bound option. - public async fetchTraderTransactionsSeek( - params: TraderTransactionsSeekParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.trader.trades_seek, - params, - headers: options.headers, - }); - } - - /* - * PAIR FETCH FUNCTIONS - */ - public async fetchPairOverviewSingle( - params: PairOverviewSingleParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.pair.overview_single, - params, - headers: options.headers, - }); - } - - // Get overview of multiple pairs - public async fetchMultiPairOverview( - params: PairOverviewMultiParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.pair.overview_multi, - params, - headers: options.headers, - }); - } - - public async fetchPairOverviewMultiple( - params: PairOverviewMultiParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.pair.overview_multi, - params, - headers: options.headers, - }); - } - - /* - * SEARCH FETCH FUNCTIONS - */ - public async fetchSearchTokenMarketData( - params: TokenMarketSearchParams, - options: { headers?: Record } = {} - ) { - return this.fetchWithCacheAndRetry({ - url: BIRDEYE_ENDPOINTS.search.token_market, - params, - headers: options.headers, - }); - } -} diff --git a/packages/plugin-birdeye/src/constants.ts b/packages/plugin-birdeye/src/constants.ts deleted file mode 100644 index e9b74ad185639..0000000000000 --- a/packages/plugin-birdeye/src/constants.ts +++ /dev/null @@ -1,71 +0,0 @@ -export const DEFAULT_MAX_RETRIES = 3; - -export const DEFAULT_SUPPORTED_SYMBOLS = { - SOL: "So11111111111111111111111111111111111111112", - BTC: "qfnqNqs3nCAHjnyCgLRDbBtq4p2MtHZxw8YjSyYhPoL", - ETH: "7vfCXTUXx5WJV5JADk17DUJ4ksgau7utNKj4b963voxs", - Example: "2weMjPLLybRMMva1fM3U31goWWrCpF59CHWNhnCJ9Vyh", -}; - -export const API_BASE_URL = "https://public-api.birdeye.so"; - -export const RETRY_DELAY_MS = 2_000; - -export const BIRDEYE_ENDPOINTS = { - defi: { - networks: "/defi/networks", // https://docs.birdeye.so/reference/get_defi-networks - price: "/defi/price", // https://docs.birdeye.so/reference/get_defi-price - price_multi: "/defi/multi_price", // https://docs.birdeye.so/reference/get_defi-multi-price - price_multi_POST: "/defi/multi_price", // https://docs.birdeye.so/reference/post_defi-multi-price - history_price: "/defi/history_price", // https://docs.birdeye.so/reference/get_defi-history-price - historical_price_unix: "/defi/historical_price_unix", // https://docs.birdeye.so/reference/get_defi-historical-price-unix - trades_token: "/defi/txs/token", // https://docs.birdeye.so/reference/get_defi-txs-token - trades_pair: "/defi/txs/pair", // https://docs.birdeye.so/reference/get_defi-txs-pair - trades_token_seek: "/defi/txs/token/seek_by_time", // https://docs.birdeye.so/reference/get_defi-txs-token-seek-by-time - trades_pair_seek: "/defi/txs/pair/seek_by_time", // https://docs.birdeye.so/reference/get_defi-txs-pair-seek-by-time - ohlcv: "/defi/ohlcv", // https://docs.birdeye.so/reference/get_defi-ohlcv - ohlcv_pair: "/defi/ohlcv/pair", // https://docs.birdeye.so/reference/get_defi-ohlcv-pair - ohlcv_base_quote: "/defi/ohlcv/base_quote", // https://docs.birdeye.so/reference/get_defi-ohlcv-base-quote - price_volume: "/defi/price_volume/single", // https://docs.birdeye.so/reference/get_defi-price-volume-single - price_volume_multi: "/defi/price_volume/multi", // https://docs.birdeye.so/reference/get_defi-price-volume-multi - price_volume_multi_POST: "/defi/price_volume/multi", // https://docs.birdeye.so/reference/post_defi-price-volume-multi - }, - token: { - list_all: "/defi/tokenlist", // https://docs.birdeye.so/reference/get_defi-tokenlist - security: "/defi/token_security", // https://docs.birdeye.so/reference/get_defi-token-security - overview: "/defi/token_overview", // https://docs.birdeye.so/reference/get_defi-token-overview - creation_info: "/defi/token_creation_info", // https://docs.birdeye.so/reference/get_defi-token-creation-info - trending: "/defi/token_trending", // https://docs.birdeye.so/reference/get_defi-token-trending - list_all_v2_POST: "/defi/v2/tokens/all", // https://docs.birdeye.so/reference/post_defi-v2-tokens-all - new_listing: "/defi/v2/tokens/new_listing", // https://docs.birdeye.so/reference/get_defi-v2-tokens-new-listing - top_traders: "/defi/v2/tokens/top_traders", // https://docs.birdeye.so/reference/get_defi-v2-tokens-top-traders - all_markets: "/defi/v2/markets", // https://docs.birdeye.so/reference/get_defi-v2-markets - metadata_single: "/defi/v3/token/meta-data/single", // https://docs.birdeye.so/reference/get_defi-v3-token-meta-data-single - metadata_multi: "/defi/v3/token/meta-data/multiple", // https://docs.birdeye.so/reference/get_defi-v3-token-meta-data-multiple - market_data: "/defi/v3/token/market-data", // https://docs.birdeye.so/reference/get_defi-v3-token-market-data - trade_data_single: "/defi/v3/token/trade-data/single", // https://docs.birdeye.so/reference/get_defi-v3-token-trade-data-single - trade_data_multi: "/defi/v3/token/trade-data/multiple", // https://docs.birdeye.so/reference/get_defi-v3-token-trade-data-multiple - holders: "/defi/v3/token/holder", // https://docs.birdeye.so/reference/get_defi-v3-token-holder - mint_burn: "/defi/v3/token/mint-burn-txs", // https://docs.birdeye.so/reference/get_defi-v3-token-mint-burn-txs - }, - wallet: { - networks: "/v1/wallet/list_supported_chain", // https://docs.birdeye.so/reference/get_v1-wallet-list-supported-chain - portfolio: "/v1/wallet/token_list", // https://docs.birdeye.so/reference/get_v1-wallet-token-list - portfolio_multichain: "/v1/wallet/multichain_token_list", // https://docs.birdeye.so/reference/get_v1-wallet-multichain-token-list - token_balance: "/v1/wallet/token_balance", // https://docs.birdeye.so/reference/get_v1-wallet-token-balance - transaction_history: "/v1/wallet/tx_list", // https://docs.birdeye.so/reference/get_v1-wallet-tx-list - transaction_history_multichain: "/v1/wallet/multichain_tx_list", // https://docs.birdeye.so/reference/get_v1-wallet-multichain-tx-list - transaction_simulation_POST: "/v1/wallet/simulate", // https://docs.birdeye.so/reference/post_v1-wallet-simulate - }, - trader: { - gainers_losers: "/trader/gainers-losers", // https://docs.birdeye.so/reference/get_trader-gainers-losers - trades_seek: "/trader/txs/seek_by_time", // https://docs.birdeye.so/reference/get_trader-txs-seek-by-time - }, - pair: { - overview_multi: "/defi/v3/pair/overview/multiple", // https://docs.birdeye.so/reference/get_defi-v3-pair-overview-multiple - overview_single: "/defi/v3/pair/overview/single", // https://docs.birdeye.so/reference/get_defi-v3-pair-overview-single - }, - search: { - token_market: "/defi/v3/search", // https://docs.birdeye.so/reference/get_defi-v3-search - }, -}; diff --git a/packages/plugin-birdeye/src/index.ts b/packages/plugin-birdeye/src/index.ts deleted file mode 100644 index 7ee67c2665b1f..0000000000000 --- a/packages/plugin-birdeye/src/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { tokenSearchAddressAction } from "./actions/token-search-address"; -import { tokenSearchSymbolAction } from "./actions/token-search-symbol"; -import { walletSearchAddressAction } from "./actions/wallet-search-address"; -import { agentPortfolioProvider } from "./providers/agent-portfolio-provider"; - -export const birdeyePlugin: Plugin = { - name: "birdeye", - description: "Birdeye Plugin for token data and analytics", - actions: [ - tokenSearchSymbolAction, - tokenSearchAddressAction, - walletSearchAddressAction, - // testAllEndpointsAction, // this action can be used to optionally test all endpoints - ], - evaluators: [], - providers: [agentPortfolioProvider], -}; - -export default birdeyePlugin; diff --git a/packages/plugin-birdeye/src/providers/agent-portfolio-provider.ts b/packages/plugin-birdeye/src/providers/agent-portfolio-provider.ts deleted file mode 100644 index d2d842ff851d8..0000000000000 --- a/packages/plugin-birdeye/src/providers/agent-portfolio-provider.ts +++ /dev/null @@ -1,52 +0,0 @@ -import type { IAgentRuntime, Memory, Provider, State } from "@elizaos/core"; -import { BirdeyeProvider } from "../birdeye"; -import { extractChain, formatPortfolio } from "../utils"; - -/** - * Agent portfolio data provider that queries Birdeye API for the agent's wallet address. - * When a wallet address is set, this provider fetches portfolio data to give the agent - * context about the agent's holdings when responding to queries. - * - * The provider: - * - Validates the agent's wallet address - * - Fetches current portfolio data from Birdeye including token balances and metadata - * - Makes this portfolio context available to the agent for responding to user queries - * about their holdings, token values, etc. - */ -export const agentPortfolioProvider: Provider = { - get: async ( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise => { - try { - const provider = new BirdeyeProvider(runtime.cacheManager); - const walletAddr = runtime.getSetting("BIRDEYE_WALLET_ADDR"); - - if (!walletAddr) { - console.warn("No Birdeye wallet was specified"); - return ""; - } - - const chain = extractChain(walletAddr); - - const resp = await provider.fetchWalletPortfolio( - { - wallet: walletAddr, - }, - { - headers: { - chain, - }, - } - ); - - const portfolioText = formatPortfolio(resp); - - return `This is your wallet address: ${walletAddr}\n\nThis is your portfolio: [${portfolioText}]`; - } catch (error) { - console.error("Error fetching token data:", error); - return "Unable to fetch token information. Please try again later."; - } - }, -}; diff --git a/packages/plugin-birdeye/src/tests/birdeye.test.ts b/packages/plugin-birdeye/src/tests/birdeye.test.ts deleted file mode 100644 index 5df51c7f561f0..0000000000000 --- a/packages/plugin-birdeye/src/tests/birdeye.test.ts +++ /dev/null @@ -1,510 +0,0 @@ -import type { ICacheManager } from "@elizaos/core"; -import { afterEach, beforeEach, describe, expect, it, type Mock, vi } from "vitest"; -import { BirdeyeProvider } from "../birdeye"; -import { - API_BASE_URL, - BIRDEYE_ENDPOINTS, - DEFAULT_SUPPORTED_SYMBOLS, -} from "../constants"; -import { convertToStringParams } from "../utils"; - -describe("BirdeyeProvider", () => { - let cacheManager: ICacheManager; - let provider: BirdeyeProvider; - - beforeEach(() => { - cacheManager = { - get: vi.fn(), - set: vi.fn(), - } as unknown as ICacheManager; - provider = new BirdeyeProvider(cacheManager); - global.fetch = vi.fn(); - - vi.mock("@elizaos/core", () => ({ - settings: { - get: vi.fn().mockImplementation((key) => { - if (key === "BIRDEYE_API_KEY") - return process.env.BIRDEYE_API_KEY || "test-api-key"; - if (key === "BIRDEYE_CHAIN") return "solana"; - return undefined; - }), - }, - ICacheManager: vi.fn(), - })); - }); - - afterEach(() => { - vi.clearAllMocks(); - }); - - const mockSuccessResponse = (data: any) => { - (fetch as Mock).mockResolvedValue({ - ok: true, - json: async () => ({ data, success: true }), - }); - }; - - const expectFetchCall = ( - endpoint: string, - params?: any, - method = "GET" - ) => { - const url = `${API_BASE_URL}${endpoint}${ - params && method === "GET" - ? `?${new URLSearchParams(convertToStringParams(params))}` - : "" - }`; - - expect(fetch).toHaveBeenCalledWith(url, expect.anything()); - }; - - describe("Defi Endpoints", () => { - it("should fetch supported networks", async () => { - const mockData = { chains: ["solana", "ethereum"] }; - mockSuccessResponse(mockData); - const result = await provider.fetchDefiSupportedNetworks(); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.defi.networks); - }); - - it("should fetch price", async () => { - const mockData = { value: 100 }; - mockSuccessResponse(mockData); - const result = await provider.fetchDefiPrice({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.defi.price, { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - }); - - it("should fetch multiple prices", async () => { - const mockData = { prices: {} }; - mockSuccessResponse(mockData); - const result = await provider.fetchDefiPriceMultiple({ - list_address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.defi.price_multi, { - list_address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - }); - - it("should fetch multiple prices via POST", async () => { - const mockData = { prices: {} }; - mockSuccessResponse(mockData); - const result = await provider.fetchDefiPriceMultiple_POST({ - list_address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - expect(result.data).toEqual(mockData); - expectFetchCall( - BIRDEYE_ENDPOINTS.defi.price_multi_POST, - { list_address: DEFAULT_SUPPORTED_SYMBOLS.SOL }, - "POST" - ); - }); - - it("should fetch historical price", async () => { - const mockData = { items: [] }; - mockSuccessResponse(mockData); - const result = await provider.fetchDefiPriceHistorical({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - type: "1H", - address_type: "token", - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.defi.history_price, { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - type: "1H", - address_type: "token", - }); - }); - }); - - describe("Token Endpoints", () => { - it("should fetch token list", async () => { - const mockData = { tokens: [] }; - mockSuccessResponse(mockData); - const result = await provider.fetchTokenList({}); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.token.list_all, {}); - }); - - it("should fetch token security", async () => { - const mockData = { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - totalSupply: 1000000, - mintable: false, - proxied: false, - ownerAddress: "owner123", - creatorAddress: "creator123", - securityChecks: { - honeypot: false, - trading_cooldown: false, - transfer_pausable: false, - is_blacklisted: false, - is_whitelisted: false, - is_proxy: false, - is_mintable: false, - can_take_back_ownership: false, - hidden_owner: false, - anti_whale_modifiable: false, - is_anti_whale: false, - trading_pausable: false, - can_be_blacklisted: false, - is_true_token: true, - is_airdrop_scam: false, - slippage_modifiable: false, - is_honeypot: false, - transfer_pausable_time: false, - is_wrapped: false, - }, - }; - mockSuccessResponse(mockData); - const result = await provider.fetchTokenSecurityByAddress({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.token.security, { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - }); - - it("should fetch token overview", async () => { - const mockData = { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - decimals: 9, - symbol: "SOL", - name: "Solana", - extensions: { - coingeckoId: "solana", - website: "https://solana.com", - telegram: "solana", - twitter: "solana", - description: "Solana blockchain token", - }, - logoURI: "https://example.com/sol.png", - liquidity: 1000000, - price: 100, - priceChange24hPercent: 5, - uniqueWallet24h: 1000, - }; - mockSuccessResponse(mockData); - const result = await provider.fetchTokenOverview({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.token.overview, { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - }); - - it("should fetch token trending", async () => { - const mockData = { - updateUnixTime: 1234567890, - updateTime: "2024-01-01T00:00:00Z", - tokens: [], - total: 0, - }; - mockSuccessResponse(mockData); - const result = await provider.fetchTokenTrending(); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.token.trending); - }); - }); - - describe("Wallet Endpoints", () => { - it("should fetch wallet portfolio", async () => { - const mockData = { - wallet: "test-wallet", - totalUsd: 1000, - items: [ - { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - name: "Solana", - symbol: "SOL", - decimals: 9, - balance: "1000000000", - uiAmount: 1, - chainId: "solana", - logoURI: "https://example.com/sol.png", - priceUsd: 100, - valueUsd: 100, - }, - ], - }; - mockSuccessResponse(mockData); - const result = await provider.fetchWalletPortfolio({ - wallet: "test-wallet", - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.wallet.portfolio, { - wallet: "test-wallet", - }); - }); - - it("should fetch wallet token balance", async () => { - const mockData = { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - name: "Solana", - symbol: "SOL", - decimals: 9, - balance: 1000000000, - uiAmount: 1, - chainId: "solana", - priceUsd: 100, - valueUsd: 100, - }; - mockSuccessResponse(mockData); - const result = await provider.fetchWalletTokenBalance({ - wallet: "test-wallet", - token_address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.wallet.token_balance, { - wallet: "test-wallet", - token_address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - }); - }); - - describe("Pair Endpoints", () => { - it("should fetch pair overview", async () => { - const mockData = { - address: "pair-address", - name: "SOL/USDC", - base: { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - decimals: 9, - icon: "https://example.com/sol.png", - symbol: "SOL", - }, - quote: { - address: "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", - decimals: 6, - icon: "https://example.com/usdc.png", - symbol: "USDC", - }, - created_at: "2024-01-01T00:00:00Z", - source: "Raydium", - liquidity: 1000000, - liquidity_change_percentage_24h: 5, - price: 100, - volume_24h: 1000000, - volume_24h_change_percentage_24h: 10, - trade_24h: 1000, - trade_24h_change_percent: 15, - unique_wallet_24h: 500, - unique_wallet_24h_change_percent: 20, - }; - mockSuccessResponse(mockData); - const result = await provider.fetchPairOverviewSingle({ - address: "pair-address", - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.pair.overview_single, { - address: "pair-address", - }); - }); - - it("should fetch multiple pair overview", async () => { - const mockData = { - "pair-1": { - address: "pair-1", - name: "SOL/USDC", - base: { - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - decimals: 9, - icon: "https://example.com/sol.png", - symbol: "SOL", - }, - quote: { - address: "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", - decimals: 6, - icon: "https://example.com/usdc.png", - symbol: "USDC", - }, - created_at: "2024-01-01T00:00:00Z", - source: "Raydium", - liquidity: 1000000, - liquidity_change_percentage_24h: 5, - price: 100, - volume_24h: 1000000, - volume_24h_change_percentage_24h: 10, - trade_24h: 1000, - trade_24h_change_percent: 15, - unique_wallet_24h: 500, - unique_wallet_24h_change_percent: 20, - }, - "pair-2": { - address: "pair-2", - name: "BTC/USDC", - base: { - address: DEFAULT_SUPPORTED_SYMBOLS.BTC, - decimals: 8, - icon: "https://example.com/btc.png", - symbol: "BTC", - }, - quote: { - address: "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", - decimals: 6, - icon: "https://example.com/usdc.png", - symbol: "USDC", - }, - created_at: "2024-01-01T00:00:00Z", - source: "Raydium", - liquidity: 2000000, - liquidity_change_percentage_24h: 3, - price: 50000, - volume_24h: 2000000, - volume_24h_change_percentage_24h: 8, - trade_24h: 500, - trade_24h_change_percent: 12, - unique_wallet_24h: 300, - unique_wallet_24h_change_percent: 15, - }, - }; - mockSuccessResponse(mockData); - const result = await provider.fetchMultiPairOverview({ - list_address: "pair-1,pair-2", - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.pair.overview_multi, { - list_address: "pair-1,pair-2", - }); - }); - }); - - describe("Search Endpoints", () => { - it("should fetch token market search", async () => { - const mockData = { - items: [ - { - type: "token", - result: [ - { - name: "Solana", - symbol: "SOL", - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - fdv: 1000000000, - market_cap: 500000000, - liquidity: 1000000, - volume_24h_change_percent: 10, - price: 100, - price_change_24h_percent: 5, - buy_24h: 500, - buy_24h_change_percent: 15, - sell_24h: 300, - sell_24h_change_percent: -10, - trade_24h: 800, - trade_24h_change_percent: 8, - unique_wallet_24h: 1000, - unique_view_24h_change_percent: 20, - last_trade_human_time: "2024-01-01T00:00:00Z", - last_trade_unix_time: 1704067200, - creation_time: "2020-01-01T00:00:00Z", - volume_24h_usd: 1000000, - logo_uri: "https://example.com/sol.png", - }, - ], - }, - ], - }; - mockSuccessResponse(mockData); - const result = await provider.fetchSearchTokenMarketData({ - keyword: "test", - }); - expect(result.data).toEqual(mockData); - expectFetchCall(BIRDEYE_ENDPOINTS.search.token_market, { - keyword: "test", - }); - }); - }); - - describe("Caching", () => { - beforeEach(() => { - // Reset the provider with a fresh cache manager for each test - cacheManager = { - get: vi.fn(), - set: vi.fn(), - } as unknown as ICacheManager; - provider = new BirdeyeProvider(cacheManager); - }); - - it("should use file system cache when available", async () => { - const mockResponse = { data: { value: 100 }, success: true }; - (cacheManager.get as Mock).mockResolvedValue(mockResponse); - - const result = await provider.fetchDefiPrice({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - - expect(result).toEqual(mockResponse); - expect(fetch).not.toHaveBeenCalled(); - expect(cacheManager.get).toHaveBeenCalled(); - }); - - it("should fetch and cache when cache misses", async () => { - const mockResponse = { data: { value: 100 }, success: true }; - (cacheManager.get as Mock).mockResolvedValue(null); - (fetch as Mock).mockResolvedValue({ - ok: true, - json: async () => mockResponse, - }); - - const result = await provider.fetchDefiPrice({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - - expect(result).toEqual(mockResponse); - expect(fetch).toHaveBeenCalledTimes(1); - expect(cacheManager.set).toHaveBeenCalled(); - }); - }); - - describe("Error Handling", () => { - it("should retry on failure", async () => { - (fetch as Mock) - .mockRejectedValueOnce(new Error("Network error")) - .mockRejectedValueOnce(new Error("Network error")) - .mockResolvedValueOnce({ - ok: true, - json: async () => ({ data: { value: 100 }, success: true }), - }); - - const result = await provider.fetchDefiPrice({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }); - - expect(result).toEqual({ data: { value: 100 }, success: true }); - expect(fetch).toHaveBeenCalledTimes(3); - }); - - it("should throw after max retries", async () => { - (fetch as Mock).mockRejectedValue(new Error("Network error")); - - await expect( - provider.fetchDefiPrice({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }) - ).rejects.toThrow("Network error"); - expect(fetch).toHaveBeenCalledTimes(3); // Default max retries - }); - - it("should handle non-200 responses", async () => { - (fetch as Mock).mockResolvedValue({ - ok: false, - status: 404, - text: async () => "Not found", - }); - - await expect( - provider.fetchDefiPrice({ - address: DEFAULT_SUPPORTED_SYMBOLS.SOL, - }) - ).rejects.toThrow("HTTP error! status: 404, message: Not found"); - expect(fetch).toHaveBeenCalledTimes(3); // Should still retry on HTTP errors - }); - }); -}); diff --git a/packages/plugin-birdeye/src/types/api/common.ts b/packages/plugin-birdeye/src/types/api/common.ts deleted file mode 100644 index f220b648498de..0000000000000 --- a/packages/plugin-birdeye/src/types/api/common.ts +++ /dev/null @@ -1,301 +0,0 @@ -import type { - BaseQuoteParams, - DefiHistoryPriceParams, - DefiMultiPriceParams, - DefiPriceParams, - HistoricalPriceUnixParams, - MultiPriceVolumeParams, - OHLCVParams, - PriceVolumeParams, -} from "./defi"; -import type { - OHLCVPairParams, - PairOverviewMultiParams, - PairOverviewSingleParams, - PairTradesParams, -} from "./pair"; -import type { TokenMarketSearchParams } from "./search"; -import type { - AllMarketsParams, - MintBurnParams, - NewListingParams, - TokenCreationInfoParams, - TokenHoldersParams, - TokenListV2Params, - TokenMarketDataParams, - TokenMetadataMultiParams, - TokenMetadataSingleParams, - TokenOverviewParams, - TokenSecurityParams, - TokenTradeDataMultiParams, - TokenTradeDataSingleParams, - TokenTradesParams, - TopTradersParams, -} from "./token"; -import type { GainersLosersParams, TraderTransactionsSeekParams } from "./trader"; -import type { - WalletPortfolioMultichainParams, - WalletPortfolioParams, - WalletSimulationParams, - WalletTokenBalanceParams, - WalletTransactionHistoryMultichainParams, - WalletTransactionHistoryParams, -} from "./wallet"; - -export type BirdeyeApiParams = - | DefiPriceParams - | DefiMultiPriceParams - | DefiHistoryPriceParams - | HistoricalPriceUnixParams - | OHLCVParams - | PriceVolumeParams - | MultiPriceVolumeParams - | PairTradesParams - | OHLCVPairParams - | PairOverviewMultiParams - | PairOverviewSingleParams - | TokenMarketSearchParams - | TokenTradesParams - | TokenSecurityParams - | TokenOverviewParams - | TokenCreationInfoParams - | TokenListV2Params - | TokenMetadataMultiParams - | TokenTradeDataMultiParams - | GainersLosersParams - | TraderTransactionsSeekParams - | WalletPortfolioParams - | WalletTokenBalanceParams - | WalletTransactionHistoryParams - | BaseQuoteParams - | TokenHoldersParams - | MintBurnParams - | TopTradersParams - | AllMarketsParams - | NewListingParams - | TokenMetadataSingleParams - | TokenMarketDataParams - | TokenTradeDataSingleParams - | WalletPortfolioMultichainParams - | WalletTransactionHistoryMultichainParams - | WalletSimulationParams - | Record; - -export interface BirdeyeApiResponseWrapper { - data: T; - success: boolean; -} - -export type BirdeyeApiResponse = BirdeyeApiResponseWrapper; - -export type TimeInterval = - | "1m" - | "3m" - | "5m" - | "15m" - | "30m" - | "1H" - | "2H" - | "4H" - | "6H" - | "8H" - | "12H" - | "1D" - | "3D" - | "1W" - | "1M" - | "30m" - | "1h" - | "2h" - | "4h" - | "6h" - | "8h" - | "12h" - | "24h"; - -export interface TokenTradeData { - address: string; - holder: number; - market: number; - last_trade_unix_time: number; - last_trade_human_time: string; - price: number; - history_30m_price: number; - price_change_30m_percent: number; - history_1h_price: number; - price_change_1h_percent: number; - history_2h_price: number; - price_change_2h_percent: number; - history_4h_price: number; - price_change_4h_percent: number; - history_6h_price: number; - price_change_6h_percent: number; - history_8h_price: number; - price_change_8h_percent: number; - history_12h_price: number; - price_change_12h_percent: number; - history_24h_price: number; - price_change_24h_percent: number; - unique_wallet_30m: number; - unique_wallet_history_30m: number; - unique_wallet_30m_change_percent: number | null; - unique_wallet_1h: number; - unique_wallet_history_1h: number; - unique_wallet_1h_change_percent: number | null; - unique_wallet_2h: number; - unique_wallet_history_2h: number; - unique_wallet_2h_change_percent: number | null; - unique_wallet_4h: number; - unique_wallet_history_4h: number; - unique_wallet_4h_change_percent: number | null; - unique_wallet_8h: number; - unique_wallet_history_8h: number; - unique_wallet_8h_change_percent: number | null; - unique_wallet_24h: number; - unique_wallet_history_24h: number; - unique_wallet_24h_change_percent: number | null; - trade_30m: number; - trade_history_30m: number; - trade_30m_change_percent: number; - sell_30m: number; - sell_history_30m: number; - sell_30m_change_percent: number; - buy_30m: number; - buy_history_30m: number; - buy_30m_change_percent: number; - volume_30m: number; - volume_30m_usd: number; - volume_history_30m: number; - volume_history_30m_usd: number; - volume_30m_change_percent: number; - volume_buy_30m: number; - volume_buy_30m_usd: number; - volume_buy_history_30m: number; - volume_buy_history_30m_usd: number; - volume_buy_30m_change_percent: number; - volume_sell_30m: number; - volume_sell_30m_usd: number; - volume_sell_history_30m: number; - volume_sell_history_30m_usd: number; - volume_sell_30m_change_percent: number; - trade_1h: number; - trade_history_1h: number; - trade_1h_change_percent: number; - sell_1h: number; - sell_history_1h: number; - sell_1h_change_percent: number; - buy_1h: number; - buy_history_1h: number; - buy_1h_change_percent: number; - volume_1h: number; - volume_1h_usd: number; - volume_history_1h: number; - volume_history_1h_usd: number; - volume_1h_change_percent: number; - volume_buy_1h: number; - volume_buy_1h_usd: number; - volume_buy_history_1h: number; - volume_buy_history_1h_usd: number; - volume_buy_1h_change_percent: number; - volume_sell_1h: number; - volume_sell_1h_usd: number; - volume_sell_history_1h: number; - volume_sell_history_1h_usd: number; - volume_sell_1h_change_percent: number; - trade_2h: number; - trade_history_2h: number; - trade_2h_change_percent: number; - sell_2h: number; - sell_history_2h: number; - sell_2h_change_percent: number; - buy_2h: number; - buy_history_2h: number; - buy_2h_change_percent: number; - volume_2h: number; - volume_2h_usd: number; - volume_history_2h: number; - volume_history_2h_usd: number; - volume_2h_change_percent: number; - volume_buy_2h: number; - volume_buy_2h_usd: number; - volume_buy_history_2h: number; - volume_buy_history_2h_usd: number; - volume_buy_2h_change_percent: number; - volume_sell_2h: number; - volume_sell_2h_usd: number; - volume_sell_history_2h: number; - volume_sell_history_2h_usd: number; - volume_sell_2h_change_percent: number; - trade_4h: number; - trade_history_4h: number; - trade_4h_change_percent: number; - sell_4h: number; - sell_history_4h: number; - sell_4h_change_percent: number; - buy_4h: number; - buy_history_4h: number; - buy_4h_change_percent: number; - volume_4h: number; - volume_4h_usd: number; - volume_history_4h: number; - volume_history_4h_usd: number; - volume_4h_change_percent: number; - volume_buy_4h: number; - volume_buy_4h_usd: number; - volume_buy_history_4h: number; - volume_buy_history_4h_usd: number; - volume_buy_4h_change_percent: number; - volume_sell_4h: number; - volume_sell_4h_usd: number; - volume_sell_history_4h: number; - volume_sell_history_4h_usd: number; - volume_sell_4h_change_percent: number; - trade_8h: number; - trade_history_8h: number; - trade_8h_change_percent: number; - sell_8h: number; - sell_history_8h: number; - sell_8h_change_percent: number; - buy_8h: number; - buy_history_8h: number; - buy_8h_change_percent: number; - volume_8h: number; - volume_8h_usd: number; - volume_history_8h: number; - volume_history_8h_usd: number; - volume_8h_change_percent: number; - volume_buy_8h: number; - volume_buy_8h_usd: number; - volume_buy_history_8h: number; - volume_buy_history_8h_usd: number; - volume_buy_8h_change_percent: number; - volume_sell_8h: number; - volume_sell_8h_usd: number; - volume_sell_history_8h: number; - volume_sell_history_8h_usd: number; - volume_sell_8h_change_percent: number; - trade_24h: number; - trade_history_24h: number; - trade_24h_change_percent: number; - sell_24h: number; - sell_history_24h: number; - sell_24h_change_percent: number; - buy_24h: number; - buy_history_24h: number; - buy_24h_change_percent: number; - volume_24h: number; - volume_24h_usd: number; - volume_history_24h: number; - volume_history_24h_usd: number; - volume_24h_change_percent: number; - volume_buy_24h: number; - volume_buy_24h_usd: number; - volume_buy_history_24h: number; - volume_buy_history_24h_usd: number; - volume_buy_24h_change_percent: number; - volume_sell_24h: number; - volume_sell_24h_usd: number; - volume_sell_history_24h: number; - volume_sell_history_24h_usd: number; - volume_sell_24h_change_percent: number; -} diff --git a/packages/plugin-birdeye/src/types/api/defi.ts b/packages/plugin-birdeye/src/types/api/defi.ts deleted file mode 100644 index ca505ae1d8e79..0000000000000 --- a/packages/plugin-birdeye/src/types/api/defi.ts +++ /dev/null @@ -1,219 +0,0 @@ -import type { TimeInterval } from "./common"; - -// Network Types -export interface DefiNetworksResponse { - success: boolean; - data: { - chains: string[]; - }; -} - -// Price Types -export interface DefiPriceParams { - address: string; - check_liquidity?: number; - include_liquidity?: boolean; -} - -export interface DefiPriceResponse { - success: boolean; - data: { - value: number; - updateUnixTime?: number; - updateHumanTime?: string; - liquidity?: number; - }; -} - -// Multi Price Types -export interface DefiMultiPriceParams { - list_address: string; - check_liquidity?: number; - include_liquidity?: boolean; -} - -export interface DefiMultiPriceResponse { - success: boolean; - data: { - [address: string]: { - value?: number; - updateUnixTime?: number; - updateHumanTime?: string; - priceChange24h?: number; - }; - }; -} - -// Multi Price Types POST -export interface DefiMultiPriceParamsPOST { - check_liquidity?: number; - include_liquidity?: boolean; - list_address: string; -} - -// History Price Types -export interface DefiHistoryPriceParams { - address: string; - address_type: "token" | "pair"; - type: TimeInterval; - time_from?: number; - time_to?: number; -} - -export interface DefiHistoryPriceResponse { - success: boolean; - data: { - items: Array<{ - unixTime?: number; - value?: number; - }>; - }; -} - -// Historical Price Unix Types -export interface HistoricalPriceUnixParams { - address: string; - unixtime?: number; -} - -export interface HistoricalPriceUnixResponse { - success: boolean; - data: { - value?: number; - updateUnixTime?: number; - priceChange24h?: string; - }; -} - -// OHLCV Types -export interface OHLCVParams { - address: string; - type?: TimeInterval; - time_from?: number; - time_to?: number; -} - -export interface OHLCVResponse { - success: boolean; - data: { - items: Array<{ - unixTime?: number; - address?: string; - type?: TimeInterval; - o?: number; - h?: number; - l?: number; - c?: number; - v?: number; - }>; - }; -} - -// Price Volume Types -export interface PriceVolumeParams { - address: string; - type?: TimeInterval; -} - -export interface PriceVolumeResponse { - success: boolean; - data: { - price?: number; - updateUnixTime?: number; - updateHumanTime?: string; - volumeUSD?: number; - volumeChangePercent?: number; - priceChangePercent?: number; - }; -} - -// Multi Price Volume Types -export interface MultiPriceVolumeParams { - list_address: string; - type?: TimeInterval; -} - -export interface MultiPriceVolumeResponse { - success: boolean; - data: { - [address: string]: { - price?: number; - updateUnixTime?: number; - updateHumanTime?: string; - volumeUSD?: number; - volumeChangePercent?: number; - priceChangePercent?: number; - }; - }; -} - -// Base Quote Types -export interface BaseQuoteParams { - base_address: string; - quote_address: string; - type?: TimeInterval; - time_from?: number; - time_to?: number; -} - -export interface BaseQuoteResponse { - success: boolean; - data: { - unixTime?: number; - vBase?: number; - vQuote?: number; - o?: number; - h?: number; - l?: number; - c?: number; - }; -} - -// Token Trades Types -export interface DefiTradesTokenParams { - address: string; - limit?: number; - offset?: number; - tx_type?: "swap" | "add" | "remove" | "all"; - sort_type?: "asc" | "desc"; - before_time?: number; - after_time?: number; -} - -export interface DefiTradesTokenInfo { - symbol: string; - decimals: number; - address: string; - amount: number; - uiAmount: number; - price: number | null; - nearestPrice: number | null; - changeAmount: number; - uiChangeAmount: number; - feeInfo?: any | null; -} - -export interface DefiTradesTokenResponse { - success: boolean; - data: { - items: Array<{ - quote?: DefiTradesTokenInfo; - base?: DefiTradesTokenInfo; - basePrice?: number | null; - quotePrice?: number | null; - txHash?: string; - source?: string; - blockUnixTime?: number; - txType?: string; - owner?: string; - side?: string; - alias?: string | null; - pricePair?: number; - from?: DefiTradesTokenInfo; - to?: DefiTradesTokenInfo; - tokenPrice?: number | null; - poolId?: string; - }>; - hasNext?: boolean; - }; -} diff --git a/packages/plugin-birdeye/src/types/api/pair.ts b/packages/plugin-birdeye/src/types/api/pair.ts deleted file mode 100644 index d678f53ace521..0000000000000 --- a/packages/plugin-birdeye/src/types/api/pair.ts +++ /dev/null @@ -1,199 +0,0 @@ -import type { TimeInterval } from "./common"; - -// Pair Trades Types -export interface PairTradesParams { - pair: string; - limit?: number; - offset?: number; -} - -export interface PairTradesResponse { - success: boolean; - data: { - items: Array<{ - signature?: string; - blockNumber?: number; - unixTime?: number; - type?: "buy" | "sell"; - tokenAddress?: string; - tokenAmount?: number; - tokenAmountUI?: number; - tokenSymbol?: string; - tokenDecimals?: number; - priceUsd?: number; - volumeUsd?: number; - maker?: string; - taker?: string; - txType?: string; - poolAddress?: string; - poolName?: string; - dex?: string; - }>; - }; -} - -// OHLCV Pair Types -export interface OHLCVPairParams { - address: string; - type?: TimeInterval; - time_from?: number; - time_to?: number; -} - -export interface OHLCVPairResponse { - success: boolean; - data: { - items: Array<{ - unixTime?: number; - address?: string; - type?: TimeInterval; - o?: number; - h?: number; - l?: number; - c?: number; - v?: number; - }>; - }; -} - -// Pair Overview Types -export interface PairOverviewMultiParams { - list_address: string; - before_time?: number; -} - -export interface PairOverviewSingleParams { - address: string; -} - -interface PairOverviewData { - address: string; - name: string; - base: { - address: string; - decimals: number; - icon: string; - symbol: string; - }; - quote: { - address: string; - decimals: number; - icon: string; - symbol: string; - }; - created_at: string; - source: string; - liquidity: number; - liquidity_change_percentage_24h: number | null; - price: number; - volume_24h: number; - volume_24h_change_percentage_24h: number | null; - trade_24h: number; - trade_24h_change_percent: number; - unique_wallet_24h: number; - unique_wallet_24h_change_percent: number | null; - - // Time-based metrics - trade_30m: number; - trade_1h: number; - trade_2h: number; - trade_4h: number; - trade_8h: number; - trade_12h: number; - - trade_30m_change_percent: number; - trade_1h_change_percent: number; - trade_2h_change_percent: number; - trade_4h_change_percent: number; - trade_8h_change_percent: number; - trade_12h_change_percent: number; - - volume_30m: number; - volume_1h: number; - volume_2h: number; - volume_4h: number; - volume_8h: number; - volume_12h: number; - - volume_30m_quote: number; - volume_1h_quote: number; - volume_2h_quote: number; - volume_4h_quote: number; - volume_8h_quote: number; - volume_12h_quote: number; - - volume_30m_base: number; - volume_1h_base: number; - volume_2h_base: number; - volume_4h_base: number; - volume_8h_base: number; - volume_12h_base: number; -} - -export interface PairOverviewSingleResponse { - success: boolean; - data: { - address?: string; - name?: string; - base?: { - address?: string; - decimals?: number; - icon?: string; - symbol?: string; - }; - quote?: { - address?: string; - decimals?: number; - icon?: string; - symbol?: string; - }; - created_at?: string; - source?: string; - liquidity?: number; - liquidity_change_percentage_24h?: number | null; - price?: number; - volume_24h?: number; - volume_24h_change_percentage_24h?: number | null; - trade_24h?: number; - trade_24h_change_percent?: number; - unique_wallet_24h?: number; - unique_wallet_24h_change_percent?: number | null; - trade_30m?: number; - trade_1h?: number; - trade_2h?: number; - trade_4h?: number; - trade_8h?: number; - trade_12h?: number; - trade_30m_change_percent?: number; - trade_1h_change_percent?: number; - trade_2h_change_percent?: number; - trade_4h_change_percent?: number; - trade_8h_change_percent?: number; - trade_12h_change_percent?: number; - volume_30m?: number; - volume_1h?: number; - volume_2h?: number; - volume_4h?: number; - volume_8h?: number; - volume_12h?: number; - volume_30m_quote?: number; - volume_1h_quote?: number; - volume_2h_quote?: number; - volume_4h_quote?: number; - volume_8h_quote?: number; - volume_12h_quote?: number; - volume_30m_base?: number; - volume_1h_base?: number; - volume_2h_base?: number; - volume_4h_base?: number; - volume_8h_base?: number; - volume_12h_base?: number; - }; -} - -export interface PairOverviewMultiResponse { - success: boolean; - data: { - [pair: string]: PairOverviewData; - }; -} diff --git a/packages/plugin-birdeye/src/types/api/search.ts b/packages/plugin-birdeye/src/types/api/search.ts deleted file mode 100644 index 0d1043ca321da..0000000000000 --- a/packages/plugin-birdeye/src/types/api/search.ts +++ /dev/null @@ -1,85 +0,0 @@ -import type { BirdeyeSupportedChain } from "../shared"; - -// Search Types -export interface TokenMarketSearchParams { - chain?: BirdeyeSupportedChain | "all"; - keyword?: string; - target?: "token" | "market" | "all"; - sort_by?: - | "fdv" - | "marketcap" - | "liquidity" - | "price" - | "price_change_24h_percent" - | "trade_24h" - | "trade_24h_change_percent" - | "buy_24h" - | "buy_24h_change_percent" - | "sell_24h" - | "sell_24h_change_percent" - | "unique_wallet_24h" - | "unique_view_24h_change_percent" - | "last_trade_unix_time" - | "volume_24h_usd" - | "volume_24h_change_percent"; - sort_type?: "asc" | "desc"; - verify_token?: boolean; - markets?: string; - offset?: number; - limit?: number; -} - -export interface TokenMarketSearchResponse { - success: boolean; - data: { - items: Array<{ - type?: "token" | "market"; - result?: Array; - }>; - }; -} - -export interface TokenResult { - name?: string; - symbol?: string; - address?: string; - network?: string; - fdv?: number; - market_cap?: number; - liquidity?: number; - volume_24h_change_percent?: number; - price?: number; - price_change_24h_percent?: number; - buy_24h?: number; - buy_24h_change_percent?: number; - sell_24h?: number; - sell_24h_change_percent?: number; - trade_24h?: number; - trade_24h_change_percent?: number; - unique_wallet_24h?: number; - unique_view_24h_change_percent?: number; - last_trade_human_time?: string; - last_trade_unix_time?: number; - creation_time?: string; - volume_24h_usd?: number; - logo_uri?: string; -} - -export interface MarketResult { - name: string; - address: string; - liquidity: number; - source: string; - trade_24h: number; - trade_24h_change_percent: number; - unique_wallet_24h: number; - unique_wallet_24h_change_percent: number; - last_trade_human_time: string; - last_trade_unix_time: number; - base_mint: string; - quote_mint: string; - amount_base: number; - amout_quote: number; // Note: typo in API response - creation_time: string; - volume_24h_usd: number; -} diff --git a/packages/plugin-birdeye/src/types/api/token.ts b/packages/plugin-birdeye/src/types/api/token.ts deleted file mode 100644 index 179892e5f896e..0000000000000 --- a/packages/plugin-birdeye/src/types/api/token.ts +++ /dev/null @@ -1,634 +0,0 @@ -import type { TimeInterval, TokenTradeData } from "./common"; - -// Token Trades Types -export interface TokenTradesParams { - address: string; - limit?: number; - offset?: number; - type?: "buy" | "sell" | "all"; -} - -export interface TokenTradesResponse { - success: boolean; - data: { - items: Array<{ - signature?: string; - blockNumber?: number; - unixTime?: number; - type?: "buy" | "sell"; - tokenAddress?: string; - tokenAmount?: number; - tokenAmountUI?: number; - tokenSymbol?: string; - tokenDecimals?: number; - priceUsd?: number; - volumeUsd?: number; - maker?: string; - taker?: string; - txType?: string; - poolAddress?: string; - poolName?: string; - dex?: string; - }>; - }; -} - -export interface TokenListParams { - sort_by?: "mc" | "v24hUSD" | "v24hChangePercent"; - sort_type?: "asc" | "desc"; - offset?: number; - limit?: number; - min_liquidity?: number; -} - -// Token List Types -export interface TokenListResponse { - success: boolean; - data: { - tokens: Array<{ - address?: string; - symbol?: string; - name?: string; - decimals?: number; - logoURI?: string; - coingeckoId?: string; - volume24h?: number; - priceChange24h?: number; - price?: number; - }>; - }; -} - -// Token Security Types -export interface TokenSecurityParams { - address: string; -} - -export interface TokenSecurityResponse { - success: boolean; - data: { - address?: string; - totalSupply?: number; - mintable?: boolean; - proxied?: boolean; - proxy?: string; - ownerAddress?: string; - creatorAddress?: string; - securityChecks?: { - honeypot?: boolean; - trading_cooldown?: boolean; - transfer_pausable?: boolean; - is_blacklisted?: boolean; - is_whitelisted?: boolean; - is_proxy?: boolean; - is_mintable?: boolean; - can_take_back_ownership?: boolean; - hidden_owner?: boolean; - anti_whale_modifiable?: boolean; - is_anti_whale?: boolean; - trading_pausable?: boolean; - can_be_blacklisted?: boolean; - is_true_token?: boolean; - is_airdrop_scam?: boolean; - slippage_modifiable?: boolean; - is_honeypot?: boolean; - transfer_pausable_time?: boolean; - is_wrapped?: boolean; - }; - }; -} - -// Token Overview Types -export interface TokenOverviewParams { - address: string; -} - -export interface TokenOverviewResponse { - success: boolean; - data: { - address?: string; - decimals?: number; - symbol?: string; - name?: string; - extensions?: { - coingeckoId?: string; - serumV3Usdc?: string; - serumV3Usdt?: string; - website?: string; - telegram?: string | null; - twitter?: string; - description?: string; - discord?: string; - medium?: string; - }; - logoURI?: string; - liquidity?: number; - lastTradeUnixTime?: number; - lastTradeHumanTime?: string; - price?: number; - history30mPrice?: number; - priceChange30mPercent?: number; - history1hPrice?: number; - priceChange1hPercent?: number; - history2hPrice?: number; - priceChange2hPercent?: number; - history4hPrice?: number; - priceChange4hPercent?: number; - history6hPrice?: number; - priceChange6hPercent?: number; - history8hPrice?: number; - priceChange8hPercent?: number; - history12hPrice?: number; - priceChange12hPercent?: number; - history24hPrice?: number; - priceChange24hPercent?: number; - uniqueWallet30m?: number; - uniqueWalletHistory30m?: number; - uniqueWallet30mChangePercent?: number; - uniqueWallet1h?: number; - uniqueWalletHistory1h?: number; - uniqueWallet1hChangePercent?: number; - uniqueWallet2h?: number; - uniqueWalletHistory2h?: number; - uniqueWallet2hChangePercent?: number; - uniqueWallet4h?: number; - uniqueWalletHistory4h?: number; - uniqueWallet4hChangePercent?: number; - uniqueWallet8h?: number; - uniqueWalletHistory8h?: number; - uniqueWallet8hChangePercent?: number; - uniqueWallet24h?: number; - uniqueWalletHistory24h?: number; - uniqueWallet24hChangePercent?: number; - supply?: number; - mc?: number; - circulatingSupply?: number; - realMc?: number; - holder?: number; - trade30m?: number; - tradeHistory30m?: number; - trade30mChangePercent?: number; - sell30m?: number; - sellHistory30m?: number; - sell30mChangePercent?: number; - buy30m?: number; - buyHistory30m?: number; - buy30mChangePercent?: number; - v30m?: number; - v30mUSD?: number; - vHistory30m?: number; - vHistory30mUSD?: number; - v30mChangePercent?: number; - vBuy30m?: number; - vBuy30mUSD?: number; - vBuyHistory30m?: number; - vBuyHistory30mUSD?: number; - vBuy30mChangePercent?: number; - vSell30m?: number; - vSell30mUSD?: number; - vSellHistory30m?: number; - vSellHistory30mUSD?: number; - vSell30mChangePercent?: number; - trade1h?: number; - tradeHistory1h?: number; - trade1hChangePercent?: number; - sell1h?: number; - sellHistory1h?: number; - sell1hChangePercent?: number; - buy1h?: number; - buyHistory1h?: number; - buy1hChangePercent?: number; - v1h?: number; - v1hUSD?: number; - vHistory1h?: number; - vHistory1hUSD?: number; - v1hChangePercent?: number; - vBuy1h?: number; - vBuy1hUSD?: number; - vBuyHistory1h?: number; - vBuyHistory1hUSD?: number; - vBuy1hChangePercent?: number; - vSell1h?: number; - vSell1hUSD?: number; - vSellHistory1h?: number; - vSellHistory1hUSD?: number; - vSell1hChangePercent?: number; - trade2h?: number; - tradeHistory2h?: number; - trade2hChangePercent?: number; - sell2h?: number; - sellHistory2h?: number; - sell2hChangePercent?: number; - buy2h?: number; - buyHistory2h?: number; - buy2hChangePercent?: number; - v2h?: number; - v2hUSD?: number; - vHistory2h?: number; - vHistory2hUSD?: number; - v2hChangePercent?: number; - vBuy2h?: number; - vBuy2hUSD?: number; - vBuyHistory2h?: number; - vBuyHistory2hUSD?: number; - vBuy2hChangePercent?: number; - vSell2h?: number; - vSell2hUSD?: number; - vSellHistory2h?: number; - vSellHistory2hUSD?: number; - vSell2hChangePercent?: number; - trade4h?: number; - tradeHistory4h?: number; - trade4hChangePercent?: number; - sell4h?: number; - sellHistory4h?: number; - sell4hChangePercent?: number; - buy4h?: number; - buyHistory4h?: number; - buy4hChangePercent?: number; - v4h?: number; - v4hUSD?: number; - vHistory4h?: number; - vHistory4hUSD?: number; - v4hChangePercent?: number; - vBuy4h?: number; - vBuy4hUSD?: number; - vBuyHistory4h?: number; - vBuyHistory4hUSD?: number; - vBuy4hChangePercent?: number; - vSell4h?: number; - vSell4hUSD?: number; - vSellHistory4h?: number; - vSellHistory4hUSD?: number; - vSell4hChangePercent?: number; - trade8h?: number; - tradeHistory8h?: number; - trade8hChangePercent?: number; - sell8h?: number; - sellHistory8h?: number; - sell8hChangePercent?: number; - buy8h?: number; - buyHistory8h?: number; - buy8hChangePercent?: number; - v8h?: number; - v8hUSD?: number; - vHistory8h?: number; - vHistory8hUSD?: number; - v8hChangePercent?: number; - vBuy8h?: number; - vBuy8hUSD?: number; - vBuyHistory8h?: number; - vBuyHistory8hUSD?: number; - vBuy8hChangePercent?: number; - vSell8h?: number; - vSell8hUSD?: number; - vSellHistory8h?: number; - vSellHistory8hUSD?: number; - vSell8hChangePercent?: number; - trade24h?: number; - tradeHistory24h?: number; - trade24hChangePercent?: number; - sell24h?: number; - sellHistory24h?: number; - sell24hChangePercent?: number; - buy24h?: number; - buyHistory24h?: number; - buy24hChangePercent?: number; - v24h?: number; - v24hUSD?: number; - vHistory24h?: number; - vHistory24hUSD?: number; - v24hChangePercent?: number; - vBuy24h?: number; - vBuy24hUSD?: number; - vBuyHistory24h?: number; - vBuyHistory24hUSD?: number; - vBuy24hChangePercent?: number; - vSell24h?: number; - vSell24hUSD?: number; - vSellHistory24h?: number; - vSellHistory24hUSD?: number; - vSell24hChangePercent?: number; - watch?: null; - numberMarkets?: number; - }; -} - -// Token Creation Info Types -export interface TokenCreationInfoParams { - address: string; -} - -export interface TokenCreationInfoResponse { - success: boolean; - data: { - txHash?: string; - slot?: number; - tokenAddress?: string; - decimals?: number; - owner?: string; - blockUnixTime?: number; - blockHumanTime?: string; - }; -} - -export interface TokenTrendingParams { - sort_by?: "rank" | "volume24hUSD" | "liquidity"; - sort_type?: "asc" | "desc"; - offset?: number; - limit?: number; -} - -// Token Trending Types -export interface TokenTrendingResponse { - success: boolean; - data: { - updateUnixTime?: number; - updateTime?: string; - tokens: Array<{ - address?: string; - symbol?: string; - name?: string; - decimals?: number; - liquidity?: number; - logoURI?: string; - volume24hUSD?: number; - rank?: number; - price?: number; - }>; - total?: number; - }; -} - -// Token List V2 Types -export interface TokenListV2Params { - offset?: number; - limit?: number; - sortBy?: string; - sortOrder?: "asc" | "desc"; -} - -// this endpoint is for enterprise only and the response is not documented -export interface TokenListV2Response { - success: boolean; - data: any; -} - -export interface TokenMetadataMultiParams { - list_addresses: string; -} - -export interface TokenMetadataMultiResponse { - success: boolean; - data: { - [address: string]: { - address?: string; - symbol?: string; - name?: string; - decimals?: number; - extensions?: { - coingecko_id?: string; - website?: string; - twitter?: string; - discord?: string; - medium?: string; - }; - logo_uri?: string; - }; - }; -} - -export interface TokenTradeDataMultiParams { - list_addresses: string; -} - -export interface TokenTradeDataMultiResponse { - success: boolean; - data: { - [address: string]: TokenTradeData; - }; -} - -// Token Metadata Single Types -export interface TokenMetadataSingleParams { - address: string; -} - -export interface TokenMetadataSingleResponse { - success: boolean; - data: { - address?: string; - symbol?: string; - name?: string; - decimals?: number; - extensions?: { - coingecko_id?: string; - website?: string; - twitter?: string; - discord?: string; - medium?: string; - }; - logo_uri?: string; - }; -} - -// Token Market Data Types -export interface TokenMarketDataParams { - address: string; -} - -export interface TokenMarketDataResponse { - success: boolean; - data: { - address?: string; - liquidity?: number; - price?: number; - supply?: number; - marketcap?: number; - circulating_supply?: number; - circulating_marketcap?: number; - }; -} - -// Token Trade Data Single Types -export interface TokenTradeDataSingleParams { - address: string; -} - -export interface TokenTradeDataSingleResponse { - success: boolean; - data: TokenTradeData; -} - -// Token Market Stats Types -export interface TokenMarketStatsResponse { - success: boolean; - data: { - address: string; - liquidity: number; - price: number; - supply: number; - marketcap: number; - circulating_supply: number; - circulating_marketcap: number; - }; -} - -// Token Holders Types -export interface TokenHoldersParams { - address: string; - offset?: number; - limit?: number; -} - -export interface TokenHoldersResponse { - success: boolean; - data: { - items: Array<{ - amount?: string; - decimals?: number; - mint?: string; - owner?: string; - token_account?: string; - ui_amount?: number; - }>; - }; -} - -// Token Mint Burn Types -export interface MintBurnParams { - address: string; - sort_by: "block_time"; - sort_type: "asc" | "desc"; - type: "mint" | "burn" | "all"; - after_time?: number; - before_time?: number; - offset?: number; - limit?: number; -} - -export interface MintBurnResponse { - success: boolean; - data: { - items: Array<{ - amount?: string; - block_human_time?: string; - block_time?: number; - common_type?: "mint" | "burn"; - decimals?: number; - mint?: string; - program_id?: string; - slot?: number; - tx_hash?: string; - ui_amount?: number; - ui_amount_string?: string; - }>; - }; -} - -// New Listing Types -export interface NewListingParams { - time_to: number; - meme_platform_enabled: boolean; - limit?: number; -} - -export interface NewListingResponse { - success: boolean; - data: { - items: Array<{ - address: string; - symbol: string; - name: string; - decimals: number; - source: string; - liquidityAddedAt: string; - logoURI: string | null; - liquidity: number; - }>; - }; -} - -// Top Traders Types -export interface TopTradersParams { - address: string; - time_frame?: TimeInterval; - sort_type?: "asc" | "desc"; - sort_by?: "volume" | "trade"; - offset?: number; - limit?: number; -} - -export interface TopTradersResponse { - success: boolean; - data: { - items: Array<{ - trader: string; - volume24h: number; - trades24h: number; - profit24h: number; - }>; - total: number; - }; -} - -// All Markets Types -export interface AllMarketsParams { - address: string; - time_frame: TimeInterval; - sort_type: "asc" | "desc"; - sort_by: "volume24h" | "liquidity"; - offset?: number; - limit?: number; -} - -export interface AllMarketsResponse { - success: boolean; - data: { - items: Array<{ - address: string; - base: { - address: string; - decimals: number; - symbol: string; - icon?: string; - }; - quote: { - address: string; - decimals: number; - symbol: string; - icon?: string; - }; - createdAt: string; - liquidity: number; - name: string; - price: number | null; - source: string; - trade24h: number; - trade24hChangePercent: number; - uniqueWallet24h: number; - uniqueWallet24hChangePercent: number; - volume24h: number; - }>; - total: number; - }; -} - -// Token Volume By Owner Types -export interface TokenVolumeByOwnerResponse { - success: boolean; - data: { - items: Array<{ - tokenAddress: string; - owner: string; - tags: string[]; - type: string; - volume: number; - trade: number; - tradeBuy: number; - tradeSell: number; - volumeBuy: number; - volumeSell: number; - }>; - }; -} diff --git a/packages/plugin-birdeye/src/types/api/trader.ts b/packages/plugin-birdeye/src/types/api/trader.ts deleted file mode 100644 index c338f2fad4f0c..0000000000000 --- a/packages/plugin-birdeye/src/types/api/trader.ts +++ /dev/null @@ -1,75 +0,0 @@ -// Trader Gainers Losers Types -export interface GainersLosersParams { - type: "yesterday" | "today" | "1W"; - sort_by: "PnL"; - sort_type: "asc" | "desc"; - offset?: number; - limit?: number; -} - -export interface GainersLosersResponse { - success: boolean; - data: { - items: Array<{ - network?: string; - address?: string; - pnl?: number; - trade_count?: number; - volume?: number; - }>; - }; -} - -// Trader Transactions Seek Types -export interface TraderTransactionsSeekParams { - address: string; - offset?: number; - limit?: number; - tx_type?: "swap" | "add" | "remove" | "all"; - before_time?: number; - after_time?: number; -} - -export interface TraderTransactionsSeekResponse { - success: boolean; - data: { - items: Array<{ - quote?: { - symbol?: string; - decimals?: number; - address?: string; - amount?: number; - type?: string; - type_swap?: "from" | "to"; - ui_amount?: number; - price?: number | null; - nearest_price?: number; - change_amount?: number; - ui_change_amount?: number; - }; - base?: { - symbol?: string; - decimals?: number; - address?: string; - amount?: number; - type?: string; - type_swap?: "from" | "to"; - fee_info?: any | null; - ui_amount?: number; - price?: number | null; - nearest_price?: number; - change_amount?: number; - ui_change_amount?: number; - }; - base_price?: number | null; - quote_price?: number | null; - tx_hash?: string; - source?: string; - block_unix_time?: number; - tx_type?: string; - address?: string; - owner?: string; - }>; - hasNext?: boolean; - }; -} diff --git a/packages/plugin-birdeye/src/types/api/wallet.ts b/packages/plugin-birdeye/src/types/api/wallet.ts deleted file mode 100644 index ff3c06b823243..0000000000000 --- a/packages/plugin-birdeye/src/types/api/wallet.ts +++ /dev/null @@ -1,180 +0,0 @@ -// Wallet Portfolio Types -export interface WalletPortfolioParams { - wallet: string; -} - -export interface WalletPortfolioResponse { - success: boolean; - data: { - wallet?: string; - totalUsd?: number; - items: Array<{ - address?: string; - name?: string; - symbol?: string; - decimals?: number; - balance?: string; - uiAmount?: number; - chainId?: string; - logoURI?: string; - priceUsd?: number; - valueUsd?: number; - }>; - }; -} - -// Wallet Token Balance Types -export interface WalletTokenBalanceParams { - wallet: string; - token_address: string; -} - -export interface WalletTokenBalanceResponse { - success: boolean; - data: { - address?: string; - name?: string; - symbol?: string; - decimals?: number; - balance?: number; - uiAmount?: number; - chainId?: string; - priceUsd?: number; - valueUsd?: number; - }; -} - -// Wallet Transaction History Types -export interface WalletTransactionHistoryParams { - wallet: string; - limit?: number; - before?: string; -} - -export interface WalletTransactionHistoryResponse { - success: boolean; - data: { - [chain: string]: Array<{ - txHash?: string; - blockNumber?: number; - blockTime?: string; - status?: boolean; - from?: string; - to?: string; - gasUsed?: number; - gasPrice?: number; - fee?: string; - feeUsd?: number; - value?: string; - contractLabel?: { - address?: string; - name?: string; - metadata?: Record; - }; - mainAction?: string; - balanceChange?: Array<{ - name?: string; - symbol?: string; - logoURI?: string; - address?: string; - amount?: number; - decimals?: number; - }>; - }>; - }; -} - -// Wallet Networks Types -export interface WalletNetworksResponse { - success: boolean; - data: { - chains?: string[]; - }; -} - -// Wallet Portfolio Multichain Types -export interface WalletPortfolioMultichainParams { - wallet: string; -} - -export interface WalletPortfolioMultichainResponse { - success: boolean; - data: { - items: Array<{ - chain?: string; - address?: string; - symbol?: string; - name?: string; - decimals?: number; - price?: number; - priceChange24h?: number; - value?: number; - amount?: number; - }>; - total?: number; - totalValue?: number; - }; -} - -// Wallet Transaction History Multichain Types -export interface WalletTransactionHistoryMultichainParams { - wallet: string; -} - -export interface WalletTransactionHistoryMultichainResponse { - success: boolean; - data: { - [chain: string]: Array<{ - txHash?: string; - blockNumber?: number; - blockTime?: string; - status?: boolean; - from?: string; - to?: string; - gasUsed?: number; - gasPrice?: number; - fee?: string; - feeUsd?: number; - value?: string; - contractLabel?: { - address?: string; - name?: string; - metadata?: Record; - }; - mainAction?: string; - balanceChange?: Array<{ - name?: string; - symbol?: string; - logoURI?: string; - address?: string; - amount?: number; - decimals?: number; - }>; - }>; - }; -} - -// Wallet Transaction Simulation Types -export interface WalletSimulationParams { - from?: string; - to?: string; - data?: string; - value?: string; -} - -export interface WalletSimulationResponse { - success: boolean; - data: { - balanceChange: Array<{ - index?: number; - before?: number; - after?: number; - address?: string; - name?: string; - symbol?: string; - logoURI?: string; - decimals?: number; - }>; - gasUsed?: number; - }; -} diff --git a/packages/plugin-birdeye/src/types/shared.ts b/packages/plugin-birdeye/src/types/shared.ts deleted file mode 100644 index 811745c44a43d..0000000000000 --- a/packages/plugin-birdeye/src/types/shared.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { BIRDEYE_SUPPORTED_CHAINS } from "../utils"; - -// Types -export type BirdeyeSupportedChain = (typeof BIRDEYE_SUPPORTED_CHAINS)[number]; - -export interface BaseAddress { - type?: "wallet" | "token" | "contract"; - symbol?: string; - address: string; - chain: BirdeyeSupportedChain; -} - -export interface WalletAddress extends BaseAddress { - type: "wallet"; -} - -export interface TokenAddress extends BaseAddress { - type: "token"; -} - -export interface ContractAddress extends BaseAddress { - type: "contract"; -} diff --git a/packages/plugin-birdeye/src/utils.ts b/packages/plugin-birdeye/src/utils.ts deleted file mode 100644 index a7588d85e68ed..0000000000000 --- a/packages/plugin-birdeye/src/utils.ts +++ /dev/null @@ -1,613 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import type { BirdeyeApiParams } from "./types/api/common"; -import type { TokenMarketSearchResponse, TokenResult } from "./types/api/search"; -import type { TokenMetadataSingleResponse } from "./types/api/token"; -import type { WalletPortfolioResponse } from "./types/api/wallet"; -import type { BaseAddress, BirdeyeSupportedChain } from "./types/shared"; - -// Constants -export const BASE_URL = "https://public-api.birdeye.so"; - -export const BIRDEYE_SUPPORTED_CHAINS = [ - "solana", - "ethereum", - "arbitrum", - "avalanche", - "bsc", - "optimism", - "polygon", - "base", - "zksync", - "sui", - "solana", - "evm", // EVM-compatible chains but we don't know the chain -] as const; - -// Chain abbreviations and alternative names mapping -export const CHAIN_ALIASES: Record = { - // Solana - sol: "solana", - - // Ethereum - eth: "ethereum", - ether: "ethereum", - - // Arbitrum - arb: "arbitrum", - arbitrumone: "arbitrum", - - // Avalanche - avax: "avalanche", - - // BSC - bnb: "bsc", - binance: "bsc", - "binance smart chain": "bsc", - - // Optimism - op: "optimism", - opti: "optimism", - - // Polygon - matic: "polygon", - poly: "polygon", - - // Base - // no common abbreviations - - // zkSync - zks: "zksync", - zk: "zksync", - - // Sui - // no common abbreviations -} as const; - -export class BirdeyeApiError extends Error { - constructor( - public status: number, - message: string - ) { - super(message); - this.name = "BirdeyeApiError"; - } -} - -export interface ApiResponse { - success: boolean; - data: T; - error?: string; -} - -// Time-related types and constants -export const TIME_UNITS = { - second: 1, - minute: 60, - hour: 3600, - day: 86400, - week: 604800, - month: 2592000, -} as const; - -export const TIMEFRAME_KEYWORDS = { - "1m": 60, - "3m": 180, - "5m": 300, - "15m": 900, - "30m": 1800, - "1h": 3600, - "2h": 7200, - "4h": 14400, - "6h": 21600, - "12h": 43200, - "1d": 86400, - "1w": 604800, -} as const; - -export type TimeUnit = keyof typeof TIME_UNITS; -export type Timeframe = keyof typeof TIMEFRAME_KEYWORDS; - -// Helper functions -export const extractChain = (text: string): BirdeyeSupportedChain => { - // Check for SUI address (0x followed by 64 hex chars) - if (text.match(/0x[a-fA-F0-9]{64}/)) { - return "sui"; - } - // Check for EVM address (0x followed by 40 hex chars) - if (text.match(/0x[a-fA-F0-9]{40}/)) { - return "ethereum"; - } - // Default to solana - return "solana"; -}; - -export const extractAddresses = (text: string): BaseAddress[] => { - const addresses: BaseAddress[] = []; - - // EVM-compatible chains (Ethereum, Arbitrum, Avalanche, BSC, Optimism, Polygon, Base, zkSync) - const evmAddresses = text.match(/0x[a-fA-F0-9]{40}/g); - if (evmAddresses) { - addresses.push( - ...evmAddresses.map((address) => ({ - address, - chain: "evm" as BirdeyeSupportedChain, // we don't yet know the chain but can assume it's EVM-compatible - })) - ); - } - - // Solana addresses (base58 strings) - const solAddresses = text.match(/[1-9A-HJ-NP-Za-km-z]{32,44}/g); - if (solAddresses) { - addresses.push( - ...solAddresses.map((address) => ({ - address, - chain: "solana" as BirdeyeSupportedChain, - })) - ); - } - - // Sui addresses (0x followed by 64 hex chars) - const suiAddresses = text.match(/0x[a-fA-F0-9]{64}/g); - if (suiAddresses) { - addresses.push( - ...suiAddresses.map((address) => ({ - address, - chain: "sui" as BirdeyeSupportedChain, - })) - ); - } - - return addresses; -}; - -// Time extraction and analysis -export const extractTimeframe = (text: string): Timeframe => { - // First, check for explicit timeframe mentions - const timeframe = Object.keys(TIMEFRAME_KEYWORDS).find((tf) => - text.toLowerCase().includes(tf.toLowerCase()) - ); - if (timeframe) return timeframe as Timeframe; - - // Check for semantic timeframe hints - const semanticMap = { - "short term": "15m", - "medium term": "1h", - "long term": "1d", - intraday: "1h", - daily: "1d", - weekly: "1w", - detailed: "5m", - quick: "15m", - overview: "1d", - } as const; - - for (const [hint, tf] of Object.entries(semanticMap)) { - if (text.toLowerCase().includes(hint)) { - return tf as Timeframe; - } - } - - // Analyze for time-related words - if (text.match(/minute|min|minutes/i)) return "15m"; - if (text.match(/hour|hourly|hours/i)) return "1h"; - if (text.match(/day|daily|24h/i)) return "1d"; - if (text.match(/week|weekly/i)) return "1w"; - - // Default based on context - if (text.match(/trade|trades|trading|recent/i)) return "15m"; - if (text.match(/trend|analysis|analyze/i)) return "1h"; - if (text.match(/history|historical|long|performance/i)) return "1d"; - - return "1h"; // Default timeframe -}; - -export const extractTimeRange = ( - text: string -): { start: number; end: number } => { - const now = Math.floor(Date.now() / 1000); - - // Check for specific date ranges - const dateRangeMatch = text.match( - /from\s+(\d{4}-\d{2}-\d{2})\s+to\s+(\d{4}-\d{2}-\d{2})/i - ); - if (dateRangeMatch) { - const start = new Date(dateRangeMatch[1]).getTime() / 1000; - const end = new Date(dateRangeMatch[2]).getTime() / 1000; - return { start, end }; - } - - // Check for relative time expressions - const timeRegex = /(\d+)\s*(second|minute|hour|day|week|month)s?\s*ago/i; - const match = text.match(timeRegex); - if (match) { - const amount = Number.parseInt(match[1]); - const unit = match[2].toLowerCase() as TimeUnit; - const start = now - amount * TIME_UNITS[unit]; - return { start, end: now }; - } - - // Check for semantic time ranges - const semanticRanges: Record = { - today: TIME_UNITS.day, - "this week": TIME_UNITS.week, - "this month": TIME_UNITS.month, - recent: TIME_UNITS.hour * 4, - latest: TIME_UNITS.hour, - "last hour": TIME_UNITS.hour, - "last day": TIME_UNITS.day, - "last week": TIME_UNITS.week, - "last month": TIME_UNITS.month, - }; - - for (const [range, duration] of Object.entries(semanticRanges)) { - if (text.toLowerCase().includes(range)) { - return { start: now - duration, end: now }; - } - } - - // Analyze context for appropriate default range - if (text.match(/trend|analysis|performance/i)) { - return { start: now - TIME_UNITS.week, end: now }; // 1 week for analysis - } - if (text.match(/trade|trades|trading|recent/i)) { - return { start: now - TIME_UNITS.day, end: now }; // 1 day for trading - } - if (text.match(/history|historical|long term/i)) { - return { start: now - TIME_UNITS.month, end: now }; // 1 month for history - } - - // Default to last 24 hours - return { start: now - TIME_UNITS.day, end: now }; -}; - -export const extractLimit = (text: string): number => { - // Check for explicit limit mentions - const limitMatch = text.match( - /\b(show|display|get|fetch|limit)\s+(\d+)\b/i - ); - if (limitMatch) { - const limit = Number.parseInt(limitMatch[2]); - return Math.min(Math.max(limit, 1), 100); // Clamp between 1 and 100 - } - - // Check for semantic limit hints - if (text.match(/\b(all|everything|full|complete)\b/i)) return 100; - if (text.match(/\b(brief|quick|summary|overview)\b/i)) return 5; - if (text.match(/\b(detailed|comprehensive)\b/i)) return 50; - - // Default based on context - if (text.match(/\b(trade|trades|trading)\b/i)) return 10; - if (text.match(/\b(analysis|analyze|trend)\b/i)) return 24; - if (text.match(/\b(history|historical)\b/i)) return 50; - - return 10; // Default limit -}; - -// Formatting helpers -export const formatValue = (value?: number): string => { - if (!value) return "N/A"; - if (value && value >= 1_000_000_000) { - return `$${(value / 1_000_000_000).toFixed(2)}B`; - } - if (value >= 1_000_000) { - return `$${(value / 1_000_000).toFixed(2)}M`; - } - if (value >= 1_000) { - return `$${(value / 1_000).toFixed(2)}K`; - } - return `$${value.toFixed(2)}`; -}; - -export const formatPercentChange = (change?: number): string => { - if (change === undefined) return "N/A"; - const symbol = change >= 0 ? "↑" : "↓"; - return `${symbol} ${Math.abs(change).toFixed(2)}%`; -}; - -export const shortenAddress = (address?: string): string => { - if (!address || address.length <= 12) return address || "Unknown"; - return `${address.slice(0, 6)}...${address.slice(-4)}`; -}; - -export const formatTimestamp = (timestamp?: number): string => { - return timestamp ? new Date(timestamp * 1000).toLocaleString() : "N/A"; -}; - -export const formatPrice = (price?: number): string => { - return price - ? price < 0.01 - ? price.toExponential(2) - : price.toFixed(2) - : "N/A"; -}; - -// API helpers -export async function makeApiRequest( - url: string, - options: { - apiKey: string; - chain?: BirdeyeSupportedChain; - method?: "GET" | "POST"; - body?: any; - } -): Promise { - const { apiKey, chain = "solana", method = "GET", body } = options; - - try { - const response = await fetch(url, { - method, - headers: { - "X-API-KEY": apiKey, - "x-chain": chain, - ...(body && { "Content-Type": "application/json" }), - }, - ...(body && { body: JSON.stringify(body) }), - }); - - if (!response.ok) { - if (response.status === 404) { - throw new BirdeyeApiError(404, "Resource not found"); - } - if (response.status === 429) { - throw new BirdeyeApiError(429, "Rate limit exceeded"); - } - throw new BirdeyeApiError( - response.status, - `HTTP error! status: ${response.status}` - ); - } - - const responseJson: T = await response.json(); - - return responseJson; - } catch (error) { - if (error instanceof BirdeyeApiError) { - elizaLogger.error(`API Error (${error.status}):`, error.message); - } else { - elizaLogger.error("Error making API request:", error); - } - throw error; - } -} - -// Formatting helpers -export const formatTokenInfo = ( - token: TokenResult, - metadata?: TokenMetadataSingleResponse -): string => { - const priceFormatted = - token.price != null - ? token.price < 0.01 - ? token.price.toExponential(2) - : token.price.toFixed(2) - : "N/A"; - - const volume = - token.volume_24h_usd != null - ? `$${(token.volume_24h_usd / 1_000_000).toFixed(2)}M` - : "N/A"; - - const liquidity = - token.liquidity != null - ? `$${(token.liquidity / 1_000_000).toFixed(2)}M` - : "N/A"; - - const fdv = - token.fdv != null ? `$${(token.fdv / 1_000_000).toFixed(2)}M` : "N/A"; - - const priceChange = - token.price_change_24h_percent != null - ? `${token.price_change_24h_percent > 0 ? "+" : ""}${token.price_change_24h_percent.toFixed(2)}%` - : "N/A"; - - const trades = token.trade_24h != null ? token.trade_24h.toString() : "N/A"; - - const age = token.creation_time - ? `${Math.floor((Date.now() - new Date(token.creation_time).getTime()) / (1000 * 60 * 60 * 24))}d` - : "N/A"; - - let output = - `🪙 ${token.name} @ ${token.symbol}\n` + - `💰 USD: $${priceFormatted} (${priceChange})\n` + - `💎 FDV: ${fdv}\n` + - `💦 MCap: ${token.market_cap ? `$${(token.market_cap / 1_000_000).toFixed(2)}M` : "N/A"}\n` + - `💦 Liq: ${liquidity}\n` + - `📊 Vol: ${volume}\n` + - `🕰️ Age: ${age}\n` + - `🔄 Trades: ${trades}\n` + - `🔗 Address: ${token.address}`; - - // Add metadata if available - if (metadata?.success) { - const { extensions } = metadata.data; - const links: string[] = []; - - if (extensions.website) - links.push(`🌐 [Website](${extensions.website})`); - if (extensions.twitter) - links.push(`🐦 [Twitter](${extensions.twitter})`); - if (extensions.discord) - links.push(`💬 [Discord](${extensions.discord})`); - if (extensions.medium) links.push(`📝 [Medium](${extensions.medium})`); - if (extensions.coingecko_id) - links.push( - `🦎 [CoinGecko](https://www.coingecko.com/en/coins/${extensions.coingecko_id})` - ); - - if (links.length > 0) { - output += "\n\n📱 Social Links:\n" + links.join("\n"); - } - } - - return output; -}; - -// Extract symbols from text -export const extractSymbols = ( - text: string, - // loose mode will try to extract more symbols but may include false positives - // strict mode will only extract symbols that are clearly formatted as a symbol using $SOL format - mode: "strict" | "loose" = "loose" -): string[] => { - const symbols = new Set(); - - // Match patterns - this may - const patterns = - mode === "strict" - ? [ - // $SYMBOL format - /\$([A-Z0-9]{2,10})\b/gi, - // $SYMBOL format with lowercase - /\$([a-z0-9]{2,10})\b/gi, - ] - : [ - // $SYMBOL format - /\$([A-Z0-9]{2,10})\b/gi, - // After articles (a/an) - /\b(?:a|an)\s+([A-Z0-9]{2,10})\b/gi, - // // Standalone caps - /\b[A-Z0-9]{2,10}\b/g, - // // Quoted symbols - /["']([A-Z0-9]{2,10})["']/gi, - // // Common price patterns - /\b([A-Z0-9]{2,10})\/USD\b/gi, - /\b([A-Z0-9]{2,10})-USD\b/gi, - ]; - - // Extract all matches - patterns.forEach((pattern) => { - const matches = text.matchAll(pattern); - for (const match of matches) { - const symbol = (match[1] || match[0]).toUpperCase(); - symbols.add(symbol); - } - }); - - return Array.from(symbols); -}; - -export const formatMetadataResponse = ( - data: TokenMetadataSingleResponse, - chain: BirdeyeSupportedChain -): string => { - const tokenData = data.data; - const chainName = chain.charAt(0).toUpperCase() + chain.slice(1); - const chainExplorer = (() => { - switch (chain) { - case "solana": - return `https://solscan.io/token/${tokenData.address}`; - case "ethereum": - return `https://etherscan.io/token/${tokenData.address}`; - case "arbitrum": - return `https://arbiscan.io/token/${tokenData.address}`; - case "avalanche": - return `https://snowtrace.io/token/${tokenData.address}`; - case "bsc": - return `https://bscscan.com/token/${tokenData.address}`; - case "optimism": - return `https://optimistic.etherscan.io/token/${tokenData.address}`; - case "polygon": - return `https://polygonscan.com/token/${tokenData.address}`; - case "base": - return `https://basescan.org/token/${tokenData.address}`; - case "zksync": - return `https://explorer.zksync.io/address/${tokenData.address}`; - case "sui": - return `https://suiscan.xyz/mainnet/object/${tokenData.address}`; - default: - return null; - } - })(); - - let response = `Token Metadata for ${tokenData.name} (${tokenData.symbol}) on ${chainName}\n\n`; - - // Basic Information - response += "📝 Basic Information\n"; - response += `• Name: ${tokenData.name}\n`; - response += `• Symbol: ${tokenData.symbol}\n`; - response += `• Address: ${tokenData.address}\n`; - response += `• Decimals: ${tokenData.decimals}\n`; - if (chainExplorer) { - response += `• Explorer: [View on ${chainName} Explorer](${chainExplorer})\n`; - } - - // Social Links - response += "\n🔗 Social Links & Extensions\n"; - response += formatSocialLinks(tokenData) + "\n"; - - // Logo - if (tokenData.logo_uri) { - response += "\n🖼️ Logo\n"; - response += tokenData.logo_uri; - } - - return response; -}; - -const formatSocialLinks = ( - data: TokenMetadataSingleResponse["data"] -): string => { - const links: string[] = []; - const { extensions } = data; - - if (!extensions) { - return "No social links available"; - } - - if (extensions.website) { - links.push(`🌐 [Website](${extensions.website})`); - } - if (extensions.twitter) { - links.push(`🐦 [Twitter](${extensions.twitter})`); - } - if (extensions.discord) { - links.push(`💬 [Discord](${extensions.discord})`); - } - if (extensions.medium) { - links.push(`📝 [Medium](${extensions.medium})`); - } - if (extensions.coingecko_id) { - links.push( - `🦎 [CoinGecko](https://www.coingecko.com/en/coins/${extensions.coingecko_id})` - ); - } - - return links.length > 0 ? links.join("\n") : "No social links available"; -}; - -export const waitFor = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); - -export const formatPortfolio = (response: WalletPortfolioResponse) => { - const { items } = response.data; - if (!items?.length) return "No tokens found in portfolio"; - - return items - .map((item) => { - const value = item?.priceUsd?.toFixed(2); - const amount = item?.uiAmount?.toFixed(4); - return ( - `• ${item.symbol || "Unknown Token"}: ${amount} tokens` + - `${value !== "0.00" ? ` (Value: $${value || "unknown"})` : ""}` - ); - }) - .join("\n"); -}; - -export const convertToStringParams = (params: BirdeyeApiParams) => { - return Object.entries(params || {}).reduce( - (acc, [key, value]) => ({ - ...acc, - [key]: value?.toString() || "", - }), - {} as Record - ); -}; - -export const getTokenResultFromSearchResponse = ( - response: TokenMarketSearchResponse -): TokenResult[] | undefined => { - return response.data.items - .filter((item) => item.type === "token") - .flatMap((item) => item.result); -}; diff --git a/packages/plugin-birdeye/tsconfig.json b/packages/plugin-birdeye/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/plugin-birdeye/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-birdeye/tsup.config.ts b/packages/plugin-birdeye/tsup.config.ts deleted file mode 100644 index dd25475bb630f..0000000000000 --- a/packages/plugin-birdeye/tsup.config.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - "base-x", - "bs58", - "borsh", - "@solana/buffer-layout", - "stream", - "buffer", - "querystring", - "amqplib", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-bittensor/.npmignore b/packages/plugin-bittensor/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-bittensor/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-bittensor/README.md b/packages/plugin-bittensor/README.md deleted file mode 100644 index 47c93d7b50ca4..0000000000000 --- a/packages/plugin-bittensor/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# @elizaos/plugin-bittensor - -A plugin that integrates BitMind's API into ElizaOS agents, enabling access to AI services and digital assets powered by the Bittensor network. - -## Description -The Bittensor plugin enables agents to interact with BitMind's API to access a range of AI capabilities on Bittensor's decentralized network, including inference, media generation, and deepfake detection services. Currently, the plugin offers image detection functionality to determine if images are AI-generated, with additional capabilities planned for future releases through the BitMind API. - -## Installation - -```bash -pnpm install @elizaos/plugin-bittensor -``` - -## Features - -### SN34 - Deepfake Detection -The plugin currently implements BitMind's SN34 subnet for AI-generated image detection. This subnet provides: -- Real-time analysis of image authenticity -- Confidence scoring for AI influence detection -- Detailed response formatting with: - - Binary classification (AI vs Natural image) - - Percentage-based AI influence rating - - Risk assessment based on confidence levels - - Visual indicators for quick interpretation (🤖, 📸, ⚠️, ⚡, ✅) \ No newline at end of file diff --git a/packages/plugin-bittensor/__tests__/actions/sn34.test.ts b/packages/plugin-bittensor/__tests__/actions/sn34.test.ts deleted file mode 100644 index 96f81118104cf..0000000000000 --- a/packages/plugin-bittensor/__tests__/actions/sn34.test.ts +++ /dev/null @@ -1,125 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import { analyzeImage, analysisHistory } from '../../src/actions/sn34'; -import { elizaLogger } from '@elizaos/core'; - -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - info: vi.fn(), - error: vi.fn(), - debug: vi.fn(), - } -})); - -describe('sn34', () => { - let mockRuntime; - let mockMessage; - let mockState; - let mockCallback; - - beforeEach(() => { - mockRuntime = { - character: { - settings: { - secrets: { - BITMIND: 'test-api-key' - } - } - } - }; - - mockMessage = { - content: { - text: 'analyze this image: https://example.com/image.jpg' - } - }; - - mockState = {}; - mockCallback = vi.fn(); - - // Reset all mocks - vi.clearAllMocks(); - }); - - describe('analyzeImage', () => { - describe('validation', () => { - it('should validate when image URL is present', async () => { - const result = await analyzeImage.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - expect(elizaLogger.info).toHaveBeenCalledWith('🔍 BitMind: Validating analysis request...'); - }); - - it('should fail validation when no image URL is present', async () => { - mockMessage.content.text = 'analyze this image'; - const result = await analyzeImage.validate(mockRuntime, mockMessage); - expect(result).toBe(false); - expect(elizaLogger.error).toHaveBeenCalledWith('❌ BitMind: No image URL found in request'); - }); - - it('should fail validation when API credentials are missing', async () => { - mockRuntime.character.settings.secrets.BITMIND = undefined; - const result = await analyzeImage.validate(mockRuntime, mockMessage); - expect(result).toBe(false); - expect(elizaLogger.error).toHaveBeenCalledWith('❌ BitMind: API credentials not configured'); - }); - }); - - describe('action properties', () => { - it('should have correct action properties', () => { - expect(analyzeImage.name).toBe('DETECT_IMAGE'); - expect(analyzeImage.similes).toEqual([ - 'ANALYZE_IMAGE', - 'VERIFY_IMAGE', - 'BITMIND_DETECTION', - 'AI_DETECTION', - 'REAL_OR_FAKE' - ]); - expect(analyzeImage.examples).toBeDefined(); - expect(Array.isArray(analyzeImage.examples)).toBe(true); - }); - - it('should have valid examples', () => { - analyzeImage.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - example.forEach(interaction => { - expect(interaction).toHaveProperty('user'); - expect(interaction).toHaveProperty('content'); - }); - }); - }); - }); - }); - - describe('analysisHistory', () => { - describe('validation', () => { - it('should validate successfully', async () => { - const result = await analysisHistory.validate(mockRuntime); - expect(result).toBe(true); - }); - }); - - describe('action properties', () => { - it('should have correct action properties', () => { - expect(analysisHistory.name).toBe('IMAGE_REPORT'); - expect(analysisHistory.similes).toEqual([ - 'SHOW_DETECTIONS', - 'IMAGE_HISTORY', - 'PAST_ANALYSES', - 'DETECTION_HISTORY' - ]); - expect(analysisHistory.description).toBe('Display history of AI image analysis results'); - expect(analysisHistory.examples).toBeDefined(); - expect(Array.isArray(analysisHistory.examples)).toBe(true); - }); - - it('should have valid examples', () => { - analysisHistory.examples.forEach(example => { - expect(Array.isArray(example)).toBe(true); - example.forEach(interaction => { - expect(interaction).toHaveProperty('user'); - expect(interaction).toHaveProperty('content'); - }); - }); - }); - }); - }); -}); diff --git a/packages/plugin-bittensor/__tests__/evaluators/fact.test.ts b/packages/plugin-bittensor/__tests__/evaluators/fact.test.ts deleted file mode 100644 index 4d430beb71355..0000000000000 --- a/packages/plugin-bittensor/__tests__/evaluators/fact.test.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import { factEvaluator } from '../../src/evaluators/fact'; -import { composeContext, generateObjectArray, MemoryManager } from '@elizaos/core'; - -vi.mock('@elizaos/core', () => ({ - composeContext: vi.fn(), - generateObjectArray: vi.fn(), - MemoryManager: vi.fn().mockImplementation((config: any) => ({ - getMemoriesByEvaluator: vi.fn().mockResolvedValue([]), - addMemory: vi.fn().mockResolvedValue(true), - addEmbeddingToMemory: vi.fn().mockResolvedValue({ - id: 'test-memory-id', - content: { - text: 'Test memory content' - } - }), - createMemory: vi.fn().mockResolvedValue({ - id: 'test-memory-id', - content: { - text: 'Test memory content' - } - }) - })), - ModelClass: { - SMALL: 'small' - } -})); - -describe('factEvaluator', () => { - let mockRuntime; - let mockMessage; - - beforeEach(() => { - mockRuntime = { - character: { - settings: {} - }, - messageManager: { - countMemories: vi.fn().mockResolvedValue(5) - }, - composeState: vi.fn().mockResolvedValue({ - agentId: 'test-agent', - roomId: 'test-room' - }), - getConversationLength: vi.fn().mockReturnValue(10) - }; - - mockMessage = { - content: { - text: 'I live in New York and work as a software engineer.' - }, - roomId: 'test-room' - }; - - // Reset all mocks - vi.clearAllMocks(); - }); - - describe('validation', () => { - it('should validate successfully', async () => { - const result = await factEvaluator.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - expect(mockRuntime.messageManager.countMemories).toHaveBeenCalledWith('test-room'); - expect(mockRuntime.getConversationLength).toHaveBeenCalled(); - }); - }); - - describe('evaluator properties', () => { - it('should have correct evaluator properties', () => { - expect(factEvaluator.name).toBe('GET_FACTS'); - expect(factEvaluator.similes).toContain('GET_CLAIMS'); - expect(factEvaluator.description).toBeDefined(); - expect(factEvaluator.description).toContain('Extract factual information'); - expect(factEvaluator.examples).toBeDefined(); - expect(Array.isArray(factEvaluator.examples)).toBe(true); - }); - - it('should have valid examples', () => { - factEvaluator.examples.forEach(example => { - expect(example).toBeDefined(); - // Will add more specific example validations based on the example structure - }); - }); - }); - - describe('fact extraction', () => { - it('should handle fact extraction', async () => { - const mockFacts = [ - { - claim: 'User lives in New York', - type: 'fact', - in_bio: false, - already_known: false - }, - { - claim: 'User works as a software engineer', - type: 'fact', - in_bio: false, - already_known: false - } - ]; - - vi.mocked(composeContext).mockReturnValue('mock-context'); - vi.mocked(generateObjectArray).mockResolvedValue(mockFacts); - - const result = await factEvaluator.handler(mockRuntime, mockMessage); - - expect(composeContext).toHaveBeenCalled(); - expect(generateObjectArray).toHaveBeenCalled(); - expect(result).toBeDefined(); - }); - }); -}); diff --git a/packages/plugin-bittensor/biome.json b/packages/plugin-bittensor/biome.json deleted file mode 100644 index edf6799928be5..0000000000000 --- a/packages/plugin-bittensor/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": true - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "style": { - "noNonNullAssertion": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "complexity": { - "noUselessCatch": "error" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 80 - }, - "javascript": { - "formatter": { - "quoteStyle": "double", - "trailingComma": "es5", - "semicolons": "always" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-bittensor/package.json b/packages/plugin-bittensor/package.json deleted file mode 100644 index 09fb8852b1088..0000000000000 --- a/packages/plugin-bittensor/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "@elizaos/plugin-bittensor", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - ".": { - "types": "./dist/index.d.ts", - "import": "./dist/index.js", - "default": "./dist/index.js" - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/client-twitter": "workspace:*", - "@elizaos/core": "workspace:*", - "tsup": "8.3.5" - }, - "devDependencies": { - "@biomejs/biome": "1.5.3" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "lint": "biome lint .", - "format": "biome format . --write", - "check": "biome check --apply ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-bittensor/src/actions/index.ts b/packages/plugin-bittensor/src/actions/index.ts deleted file mode 100644 index 405d83e2f416c..0000000000000 --- a/packages/plugin-bittensor/src/actions/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./sn34.ts"; \ No newline at end of file diff --git a/packages/plugin-bittensor/src/actions/sn34.ts b/packages/plugin-bittensor/src/actions/sn34.ts deleted file mode 100644 index 13b5fb6aeecf6..0000000000000 --- a/packages/plugin-bittensor/src/actions/sn34.ts +++ /dev/null @@ -1,331 +0,0 @@ -import { - ActionExample, - IAgentRuntime, - Memory, - State, - HandlerCallback, - type Action, -} from "@elizaos/core"; -import { elizaLogger } from "@elizaos/core"; - -interface AIImageDetectionResult { - isAIGenerated: boolean; - confidenceScore: number; -} - -interface AIImageAnalysisMemory extends Memory { - content: { - text: string; - imageUrl: string; - isAIGenerated: boolean; - confidenceScore: number; - imageSource: 'tweet' | 'url'; - actionType: string; - }; -} - -export const formatAnalysisHistory = (analyses: AIImageAnalysisMemory[]) => { - const analysisStrings = analyses - .reverse() - .map((analysis: AIImageAnalysisMemory) => { - const { isAIGenerated, confidenceScore } = analysis.content; - const scorePercentage = Number(confidenceScore); - return `Image Analysis: ${isAIGenerated ? 'AI Generated' : 'Natural'} (${(scorePercentage * 100).toFixed(2)}% confidence)`; - }); - return analysisStrings.join("\n"); -}; - -const validateAnalysisRequest = async (runtime: IAgentRuntime, message: Memory): Promise => { - elizaLogger.info("🔍 BitMind: Validating analysis request..."); - - const urlMatch = message?.content?.text?.match(/https?:\/\/[^\s]+/); - const imageUrls = message?.content?.imageUrls as string[] | undefined; - - if (!urlMatch && (!imageUrls || imageUrls.length === 0)) { - elizaLogger.error("❌ BitMind: No image URL found in request"); - return false; - } - - if (!runtime?.character?.settings?.secrets?.BITMIND) { - elizaLogger.error("❌ BitMind: API credentials not configured"); - return false; - } - - elizaLogger.info("✅ BitMind: Request validation successful"); - return true; -}; - -const extractImageUrl = (message: Memory): { url: string; isTweet: boolean } => { - const urlMatch = message.content.text.match(/https?:\/\/[^\s]+/); - const imageUrls = message.content.imageUrls as string[] | undefined; - const isTweet = Boolean(imageUrls && imageUrls.length > 0); - - if (isTweet && imageUrls) { - return { url: imageUrls[0], isTweet }; - } - if (urlMatch) { - return { url: urlMatch[0], isTweet }; - } - throw new Error("No valid image URL found in request"); -}; - -const analyzeImageWithBitMind = async (imageUrl: string, apiKey: string): Promise => { - try { - const response = await fetch("https://subnet-api.bitmindlabs.ai/detect-image", { - method: "POST", - headers: { - "Authorization": `Bearer ${apiKey}`, - "Content-Type": "application/json" - }, - body: JSON.stringify({ image: imageUrl }) - }); - - if (!response.ok) { - const errorMessage = `BitMind API error (${response.status}): ${response.statusText}`; - elizaLogger.error(`❌ ${errorMessage}`); - if (response.status === 500) { - throw new Error("BitMind service is currently experiencing issues. Please try again later."); - } - throw new Error(errorMessage); - } - - const result = await response.json(); - return { - isAIGenerated: result.isAI, - confidenceScore: result.confidence - }; - } catch (error) { - if (error.message.includes('BitMind service')) { - throw error; // Re-throw our custom error - } - elizaLogger.error('❌ BitMind API request failed:', error); - throw new Error('Failed to connect to BitMind service. Please check your connection and try again.'); - } -}; - -const generateAnalysisReport = (result: AIImageDetectionResult): string => { - const confidencePercent = (result.confidenceScore * 100).toFixed(2); - const confidenceValue = parseFloat(confidencePercent); - - return `🔍 Trinity Matrix Deepfake Analysis -Powered by BitMind Subnet (SN34) on Bittensor - -${result.isAIGenerated ? '🤖 AI Generated' : '📸 Natural Image'} -${confidencePercent}% AI Influence Rating -${confidenceValue > 75 - ? "⚠️ High synthetic probability detected. Approach with caution." - : confidenceValue > 40 - ? "⚡ Moderate AI patterns present. Verification recommended." - : "✅ Low synthetic markers. Likely authentic content."} - -—————————————————`; -}; - -export const analyzeImage: Action = { - name: "DETECT_IMAGE", - similes: ["ANALYZE_IMAGE", "VERIFY_IMAGE", "BITMIND_DETECTION", "AI_DETECTION", "REAL_OR_FAKE"], - validate: validateAnalysisRequest, - description: "Analyze an image to determine if it was AI-generated using BitMind API", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ): Promise => { - if (state['isAnalysisInProgress']) return; - state['isAnalysisInProgress'] = true; - - elizaLogger.info("🤖 BitMind: Initiating image analysis..."); - - if (!runtime.character?.settings?.secrets?.BITMIND) { - throw new Error("BitMind API credentials not configured"); - } - - try { - const { url: imageUrl, isTweet } = extractImageUrl(message); - elizaLogger.info(`📸 BitMind: Processing image: ${imageUrl}`); - - const result = await analyzeImageWithBitMind(imageUrl, runtime.character.settings.secrets.BITMIND); - - elizaLogger.info(`✅ BitMind: Analysis complete`, { - isAIGenerated: result.isAIGenerated, - confidenceScore: result.confidenceScore, - source: isTweet ? 'tweet' : 'message' - }); - - const analysisMemory: AIImageAnalysisMemory = { - ...message, - content: { - text: `Image Analysis: ${result.isAIGenerated ? 'AI Generated' : 'Natural'} (${(result.confidenceScore * 100).toFixed(2)}% confidence)`, - imageUrl: imageUrl, - isAIGenerated: result.isAIGenerated, - confidenceScore: result.confidenceScore, - imageSource: isTweet ? 'tweet' : 'url', - actionType: "DETECT_IMAGE" - }, - createdAt: Date.now(), - }; - - elizaLogger.info("Saving analysis memory:", { - roomId: message.roomId, - analysisMemory - }); - - await runtime.messageManager.createMemory(analysisMemory); - - elizaLogger.info("Analysis memory saved"); - - callback({ - text: generateAnalysisReport(result), - isAIGenerated: result.isAIGenerated, - confidenceScore: result.confidenceScore - }); - - } catch (error) { - elizaLogger.error(`❌ BitMind: Analysis error:`, error); - throw new Error(`Image analysis failed: ${error.message}`); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "analyze this image: https://example.com/image.jpg" } - }, - { - user: "{{agentName}}", - content: { - text: "I'll analyze that image for you...", - action: "DETECT_IMAGE" - } - } - ], - [ - { - user: "{{user1}}", - content: { text: "is this image AI generated?" } - }, - { - user: "{{agentName}}", - content: { - text: "Let me check if that image is AI generated...", - action: "DETECT_IMAGE" - } - } - ] - ] as ActionExample[][], -} as Action; - -const generateConfidenceBar = (confidence: number): string => { - const barLength = 20; - const filledBars = Math.round(confidence * barLength); - const emptyBars = barLength - filledBars; - return `[${'█'.repeat(filledBars)}${'░'.repeat(emptyBars)}]`; -}; - -export const analysisHistory: Action = { - name: "IMAGE_REPORT", - similes: ["SHOW_DETECTIONS", "IMAGE_HISTORY", "PAST_ANALYSES", "DETECTION_HISTORY"], - validate: async (runtime: IAgentRuntime): Promise => { - return true; - }, - description: "Display history of AI image analysis results", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: any, - callback: HandlerCallback - ): Promise => { - elizaLogger.info("📊 BitMind: Generating analysis history..."); - try { - const limit = options?.limit || 10; - - // Get all rooms the agent is in - const rooms = await runtime.databaseAdapter.getRoomsForParticipant(runtime.agentId); - elizaLogger.info(`📊 BitMind: Found ${rooms.length} rooms`); - - // Get memories from each room and combine them - const allMemories = await runtime.messageManager.getMemoriesByRoomIds({ - roomIds: rooms, - limit: limit * 5 - }) as AIImageAnalysisMemory[]; - - elizaLogger.info(`📊 BitMind: Retrieved ${allMemories.length} memories`); - - const imageAnalyses = allMemories.filter( - mem => mem.content.actionType === 'DETECT_IMAGE' - ); - - elizaLogger.info(`📊 BitMind: Found ${imageAnalyses.length} image analyses`); - - if (!imageAnalyses || imageAnalyses.length === 0) { - callback({ - text: "No image analyses found.", - }); - return; - } - const statistics = imageAnalyses.reduce((acc, analysis) => { - acc.total++; - if (analysis.content.isAIGenerated) acc.aiCount++; - acc.avgConfidence += analysis.content.confidenceScore; - return acc; - }, { total: 0, aiCount: 0, avgConfidence: 0 }); - - const reportText = `🔍 Trinity Matrix Analysis Report -━━━━━━━━━━━━━━━━━━━━━━━━━━━ - -📊 Recent Analyses (${imageAnalyses.length}) -${formatAnalysisHistory(imageAnalyses)} - -📈 Statistical Overview -┌─────────────────────────────────┐ -│ 🔍 Total Analyzed : ${statistics.total.toString().padEnd(12)} │ -│ 🤖 AI Generated : ${statistics.aiCount.toString().padEnd(12)} │ -│ 📸 Natural : ${(statistics.total - statistics.aiCount).toString().padEnd(12)} │ -│ ⚡ AI Detection Rate: ${((statistics.aiCount / statistics.total) * 100).toFixed(1)}% │ -└─────────────────────────────────┘ - -🎯 Confidence Metrics -Average Confidence: ${((statistics.avgConfidence / statistics.total) * 100).toFixed(1)}% -${generateConfidenceBar(statistics.avgConfidence / statistics.total)} - -Powered by BitMind Subnet (SN34) on Bittensor`; - - callback({ text: reportText }); - - } catch (error) { - elizaLogger.error(`❌ BitMind: History generation error:`, error); - throw new Error(`Failed to generate analysis history: ${error.message}`); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "show me recent image analyses" } - }, - { - user: "{{agentName}}", - content: { - text: "Here's your image analysis report...", - action: "IMAGE_REPORT" - } - } - ], - [ - { - user: "{{user1}}", - content: { text: "what images have you checked recently?" } - }, - { - user: "{{agentName}}", - content: { - text: "Let me show you the recent image detection history...", - action: "IMAGE_REPORT" - } - } - ] - ] as ActionExample[][], -} as Action; \ No newline at end of file diff --git a/packages/plugin-bittensor/src/evaluators/fact.ts b/packages/plugin-bittensor/src/evaluators/fact.ts deleted file mode 100644 index fcade349b4dfc..0000000000000 --- a/packages/plugin-bittensor/src/evaluators/fact.ts +++ /dev/null @@ -1,247 +0,0 @@ -import { composeContext } from "@elizaos/core"; -import { generateObjectArray } from "@elizaos/core"; -import { MemoryManager } from "@elizaos/core"; -import { - type ActionExample, - type IAgentRuntime, - type Memory, - ModelClass, - type Evaluator, -} from "@elizaos/core"; - -export const formatFacts = (facts: Memory[]) => { - const messageStrings = facts - .reverse() - .map((fact: Memory) => fact.content.text); - const finalMessageStrings = messageStrings.join("\n"); - return finalMessageStrings; -}; - -const factsTemplate = - // {{actors}} - `TASK: Extract Claims from the conversation as an array of claims in JSON format. - -# START OF EXAMPLES -These are an examples of the expected output of this task: -{{evaluationExamples}} -# END OF EXAMPLES - -# INSTRUCTIONS - -Extract any claims from the conversation that are not already present in the list of known facts above: -- Try not to include already-known facts. If you think a fact is already known, but you're not sure, respond with already_known: true. -- If the fact is already in the user's description, set in_bio to true -- If we've already extracted this fact, set already_known to true -- Set the claim type to 'status', 'fact' or 'opinion' -- For true facts about the world or the character that do not change, set the claim type to 'fact' -- For facts that are true but change over time, set the claim type to 'status' -- For non-facts, set the type to 'opinion' -- 'opinion' inlcudes non-factual opinions and also includes the character's thoughts, feelings, judgments or recommendations -- Include any factual detail, including where the user lives, works, or goes to school, what they do for a living, their hobbies, and any other relevant information - -Recent Messages: -{{recentMessages}} - -Response should be a JSON object array inside a JSON markdown block. Correct response format: -\`\`\`json -[ - {"claim": string, "type": enum, in_bio: boolean, already_known: boolean }, - {"claim": string, "type": enum, in_bio: boolean, already_known: boolean }, - ... -] -\`\`\``; - -async function handler(runtime: IAgentRuntime, message: Memory) { - const state = await runtime.composeState(message); - - const { agentId, roomId } = state; - - const context = composeContext({ - state, - template: runtime.character.templates?.factsTemplate || factsTemplate, - }); - - const facts = await generateObjectArray({ - runtime, - context, - modelClass: ModelClass.LARGE, - }); - - const factsManager = new MemoryManager({ - runtime, - tableName: "facts", - }); - - if (!facts) { - return []; - } - - // If the fact is known or corrupted, remove it - const filteredFacts = facts - .filter((fact) => { - return ( - !fact.already_known && - fact.type === "fact" && - !fact.in_bio && - fact.claim && - fact.claim.trim() !== "" - ); - }) - .map((fact) => fact.claim); - - if (!agentId) { - return filteredFacts; - } - - for (const fact of filteredFacts) { - const factMemory = await factsManager.addEmbeddingToMemory({ - userId: agentId, - agentId, - content: { text: fact }, - roomId, - createdAt: Date.now(), - }); - - await factsManager.createMemory(factMemory, true); - - await new Promise((resolve) => setTimeout(resolve, 250)); - } - return filteredFacts; -} - -export const factEvaluator: Evaluator = { - name: "GET_FACTS", - similes: [ - "GET_CLAIMS", - "EXTRACT_CLAIMS", - "EXTRACT_FACTS", - "EXTRACT_CLAIM", - "EXTRACT_INFORMATION", - ], - validate: async ( - runtime: IAgentRuntime, - - message: Memory - ): Promise => { - const messageCount = (await runtime.messageManager.countMemories( - message.roomId - )) as number; - - const reflectionCount = Math.ceil(runtime.getConversationLength() / 2); - - return messageCount % reflectionCount === 0; - }, - description: - "Extract factual information about the people in the conversation, the current events in the world, and anything else that might be important to remember.", - handler, - examples: [ - { - context: `Actors in the scene: -{{user1}}: Programmer and moderator of the local story club. -{{user2}}: New member of the club. Likes to write and read. - -Facts about the actors: -None`, - messages: [ - { - user: "{{user1}}", - content: { text: "So where are you from" }, - }, - { - user: "{{user2}}", - content: { text: "I'm from the city" }, - }, - { - user: "{{user1}}", - content: { text: "Which city?" }, - }, - { - user: "{{user2}}", - content: { text: "Oakland" }, - }, - { - user: "{{user1}}", - content: { - text: "Oh, I've never been there, but I know it's in California", - }, - }, - ] as ActionExample[], - outcome: `{ "claim": "{{user2}} is from Oakland", "type": "fact", "in_bio": false, "already_known": false },`, - }, - { - context: `Actors in the scene: -{{user1}}: Athelete and cyclist. Worked out every day for a year to prepare for a marathon. -{{user2}}: Likes to go to the beach and shop. - -Facts about the actors: -{{user1}} and {{user2}} are talking about the marathon -{{user1}} and {{user2}} have just started dating`, - messages: [ - { - user: "{{user1}}", - content: { - text: "I finally completed the marathon this year!", - }, - }, - { - user: "{{user2}}", - content: { text: "Wow! How long did it take?" }, - }, - { - user: "{{user1}}", - content: { text: "A little over three hours." }, - }, - { - user: "{{user1}}", - content: { text: "I'm so proud of myself." }, - }, - ] as ActionExample[], - outcome: `Claims: -json\`\`\` -[ - { "claim": "Alex just completed a marathon in just under 4 hours.", "type": "fact", "in_bio": false, "already_known": false }, - { "claim": "Alex worked out 2 hours a day at the gym for a year.", "type": "fact", "in_bio": true, "already_known": false }, - { "claim": "Alex is really proud of himself.", "type": "opinion", "in_bio": false, "already_known": false } -] -\`\`\` -`, - }, - { - context: `Actors in the scene: -{{user1}}: Likes to play poker and go to the park. Friends with Eva. -{{user2}}: Also likes to play poker. Likes to write and read. - -Facts about the actors: -Mike and Eva won a regional poker tournament about six months ago -Mike is married to Alex -Eva studied Philosophy before switching to Computer Science`, - messages: [ - { - user: "{{user1}}", - content: { - text: "Remember when we won the regional poker tournament last spring", - }, - }, - { - user: "{{user2}}", - content: { - text: "That was one of the best days of my life", - }, - }, - { - user: "{{user1}}", - content: { - text: "It really put our poker club on the map", - }, - }, - ] as ActionExample[], - outcome: `Claims: -json\`\`\` -[ - { "claim": "Mike and Eva won the regional poker tournament last spring", "type": "fact", "in_bio": false, "already_known": true }, - { "claim": "Winning the regional poker tournament put the poker club on the map", "type": "opinion", "in_bio": false, "already_known": false } -] -\`\`\``, - }, - ], -}; diff --git a/packages/plugin-bittensor/src/evaluators/index.ts b/packages/plugin-bittensor/src/evaluators/index.ts deleted file mode 100644 index 7d864701f3c26..0000000000000 --- a/packages/plugin-bittensor/src/evaluators/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./fact.ts"; \ No newline at end of file diff --git a/packages/plugin-bittensor/src/index.ts b/packages/plugin-bittensor/src/index.ts deleted file mode 100644 index 476a3d89b5011..0000000000000 --- a/packages/plugin-bittensor/src/index.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { TwitterClientInterface } from "@elizaos/client-twitter"; - -import { analyzeImage, analysisHistory } from "./actions/sn34.ts"; -import { factEvaluator } from "./evaluators/fact.ts"; -import { timeProvider } from "./providers/time.ts"; - -export * as actions from "./actions/index.ts"; -export * as evaluators from "./evaluators/index.ts"; -export * as providers from "./providers/index.ts"; - - -export const bittensorPlugin: Plugin = { - name: "bittensor", - description: "Utilize the BitMind API to access a range of digital commodities, including inference, media generation, and deepfake detection, on Bittensor's decentralized AI network.", - actions: [ - analyzeImage, - analysisHistory - ], - evaluators: [factEvaluator], - providers: [timeProvider], - clients: [TwitterClientInterface] -}; -export default bittensorPlugin; \ No newline at end of file diff --git a/packages/plugin-bittensor/src/providers/index.ts b/packages/plugin-bittensor/src/providers/index.ts deleted file mode 100644 index a287e836eeb8d..0000000000000 --- a/packages/plugin-bittensor/src/providers/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./time.ts"; \ No newline at end of file diff --git a/packages/plugin-bittensor/src/providers/time.ts b/packages/plugin-bittensor/src/providers/time.ts deleted file mode 100644 index 079f4334ca34c..0000000000000 --- a/packages/plugin-bittensor/src/providers/time.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { IAgentRuntime, Memory, Provider, State } from "@elizaos/core"; - -const timeProvider: Provider = { - get: async (_runtime: IAgentRuntime, _message: Memory, _state?: State) => { - const currentDate = new Date(); - - // Get UTC time since bots will be communicating with users around the global - const options = { - timeZone: "UTC", - dateStyle: "full" as const, - timeStyle: "long" as const, - }; - const humanReadable = new Intl.DateTimeFormat("en-US", options).format( - currentDate - ); - return `The current date and time is ${humanReadable}. Please use this as your reference for any time-based operations or responses.`; - }, -}; -export { timeProvider }; diff --git a/packages/plugin-bittensor/tsconfig.json b/packages/plugin-bittensor/tsconfig.json deleted file mode 100644 index 834c4dce26957..0000000000000 --- a/packages/plugin-bittensor/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-bittensor/tsup.config.ts b/packages/plugin-bittensor/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/plugin-bittensor/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-bittensor/vitest.config.ts b/packages/plugin-bittensor/vitest.config.ts deleted file mode 100644 index adbf725538008..0000000000000 --- a/packages/plugin-bittensor/vitest.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - }, -}); diff --git a/packages/plugin-bnb/README.md b/packages/plugin-bnb/README.md deleted file mode 100644 index 6b66f199f8a41..0000000000000 --- a/packages/plugin-bnb/README.md +++ /dev/null @@ -1,144 +0,0 @@ -# `@ai16z/plugin-bnb` - -This plugin enables interaction with the BNB Chain ecosystem, providing support for BNB Smart Chain, opBNB, and BNB Greenfield networks. - ---- - -## Configuration - -### Default Setup - -By default, **plugin-bnb** is not enabled. To use it, simply add your private key and/or public key to the `.env` file. If private key is not provided, some actions will be disabled. - -**Security Note:** Your private key grants full access to your associated funds. Store it securely and never share it with anyone. Do not commit or upload your `.env` file to version control systems like Git. - -```env -BNB_PRIVATE_KEY=your-private-key-here -BNB_PUBLIC_KEY=your-public-key-here -``` - -### Custom RPC URLs - -By default, the RPC URL is inferred from the `viem/chains` config. To use custom RPC URLs, add the following to your `.env` file: - -```env -BSC_PROVIDER_URL=https://your-custom-bsc-rpc-url -OPBNB_PROVIDER_URL=https://your-custom-opbnb-rpc-url -``` - -## Provider - -The **Wallet Provider** initializes with BSC as the default. It: - -- Provides the **context** of the currently connected address and its balance. -- Creates **Public** and **Wallet clients** to interact with the supported chains. - ---- - -## Actions - -### Get Balance - -Get the balance of an address on BSC. Just specify the: - -- **Chain** -- **Address** -- **Token** - -**Example usage:** - -```bash -Get the USDC balance of 0x1234567890 on BSC. -``` - -### Transfer - -Transfer tokens from one address to another on BSC/opBNB. Just specify the: - -- **Chain** -- **Token** -- **Amount** -- **Recipient Address** -- **Data**(Optional) - -**Example usage:** - -```bash -Transfer 1 BNB to 0xRecipient on BSC. -``` - -### Swap - -Swap tokens from one address to another on BSC. Just specify the: - -- **Chain**(Only BSC is supported for now) -- **Input Token** -- **Output Token** -- **Amount** -- **Slippage**(Optional) - -**Example usage:** - -```bash -Swap 1 BNB to USDC on BSC. -``` - -### Bridge - -Bridge tokens from one chain to another on BSC/opBNB. Just specify the: - -- **From Chain** -- **To Chain** -- **From Token** -- **To Token** -- **Amount** -- **Recipient Address**(Optional) - -**Example usage:** - -```bash -Bridge 1 BNB from BSC to opBNB. -``` - -### Stake - -Perform staking operations on BSC through [Lista Dao](https://lista.org/liquid-staking/BNB). User will receive sliBNB(0xB0b84D294e0C75A6abe60171b70edEb2EFd14A1B) as staking credit. Just specify the: - -- **Chain**(Only BSC is supported for now) -- **Action** -- **Amount** - -**Example usage:** - -```bash -Deposit 1 BNB to Lista Dao. -``` - -### Faucet - -Request testnet tokens from the faucet. You could request any of the supported tokens(BNB, BTC, BUSD, DAI, ETH, USDC). Just specify the: - -- **Token**(Optional) -- **Recipient Address** - -The faucet is rate-limited. One claim is allowed per IP address within a 24-hour period. And the recipient address must maintain a minimum balance of 0.002 BNB on BSC Mainnet to qualify. - -**Example usage:** - -```bash -Get some testnet USDC from the faucet. -``` - ---- - -## Contribution - -The plugin contains tests. Whether you're using **TDD** or not, please make sure to run the tests before submitting a PR. - -### Running Tests - -Navigate to the `plugin-bnb` directory and run: - -```bash -pnpm test -``` diff --git a/packages/plugin-bnb/biome.json b/packages/plugin-bnb/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-bnb/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-bnb/package.json b/packages/plugin-bnb/package.json deleted file mode 100644 index efe33b2b7e562..0000000000000 --- a/packages/plugin-bnb/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "@elizaos/plugin-bnb", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "@lifi/data-types": "5.15.5", - "@lifi/sdk": "3.4.1", - "@lifi/types": "16.3.0", - "@web3-name-sdk/core": "^0.3.2", - "@openzeppelin/contracts": "^5.1.0", - "@types/node": "^22.10.5", - "solc": "^0.8.28", - "tsup": "8.3.5", - "viem": "2.21.53", - "ws": "^8.18.0" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-bnb/src/actions/bridge.ts b/packages/plugin-bnb/src/actions/bridge.ts deleted file mode 100644 index 6fb2ae5f25c19..0000000000000 --- a/packages/plugin-bnb/src/actions/bridge.ts +++ /dev/null @@ -1,488 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { parseEther, getContract, parseUnits, erc20Abi } from "viem"; - -import { - bnbWalletProvider, - initWalletProvider, - type WalletProvider, -} from "../providers/wallet"; -import { bridgeTemplate } from "../templates"; -import { - L1StandardBridgeAbi, - L2StandardBridgeAbi, - type BridgeParams, - type BridgeResponse, -} from "../types"; - -export { bridgeTemplate }; - -// Exported for tests -export class BridgeAction { - private readonly L1_BRIDGE_ADDRESS = - "0xF05F0e4362859c3331Cb9395CBC201E3Fa6757Ea" as const; - private readonly L2_BRIDGE_ADDRESS = - "0x4000698e3De52120DE28181BaACda82B21568416" as const; - private readonly LEGACY_ERC20_ETH = - "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000" as const; - - constructor(private walletProvider: WalletProvider) {} - - async bridge(params: BridgeParams): Promise { - elizaLogger.debug("Bridge params:", params); - await this.validateAndNormalizeParams(params); - elizaLogger.debug("Normalized bridge params:", params); - - const fromAddress = this.walletProvider.getAddress(); - - this.walletProvider.switchChain(params.fromChain); - const walletClient = this.walletProvider.getWalletClient( - params.fromChain - ); - const publicClient = this.walletProvider.getPublicClient( - params.fromChain - ); - - const nativeToken = - this.walletProvider.chains[params.fromChain].nativeCurrency.symbol; - - const resp: BridgeResponse = { - fromChain: params.fromChain, - toChain: params.toChain, - txHash: "0x", - recipient: params.toAddress ?? fromAddress, - amount: params.amount, - fromToken: params.fromToken ?? nativeToken, - toToken: params.toToken ?? nativeToken, - }; - - const account = this.walletProvider.getAccount(); - const chain = this.walletProvider.getChainConfigs(params.fromChain); - - const selfBridge = !params.toAddress || params.toAddress === fromAddress; - const nativeTokenBridge = - !params.fromToken || params.fromToken === nativeToken; - - let amount: bigint; - if (nativeTokenBridge) { - amount = parseEther(params.amount); - } else { - const decimals = await publicClient.readContract({ - address: params.fromToken!, - abi: erc20Abi, - functionName: "decimals", - }); - amount = parseUnits(params.amount, decimals); - } - - if (params.fromChain === "bsc" && params.toChain === "opBNB") { - // from L1 to L2 - const l1BridgeContract = getContract({ - address: this.L1_BRIDGE_ADDRESS, - abi: L1StandardBridgeAbi, - client: { - public: publicClient, - wallet: walletClient, - }, - }); - - // check ERC20 allowance - if (!nativeTokenBridge) { - const allowance = await this.walletProvider.checkERC20Allowance( - params.fromChain, - params.fromToken!, - fromAddress, - this.L1_BRIDGE_ADDRESS - ); - if (allowance < amount) { - elizaLogger.log( - `Increasing ERC20 allowance for L1 bridge. ${amount - allowance} more needed` - ); - const txHash = await this.walletProvider.approveERC20( - params.fromChain, - params.fromToken!, - this.L1_BRIDGE_ADDRESS, - amount - ); - await publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - } - } - - if (selfBridge && nativeTokenBridge) { - const args = [1, "0x"] as const; - await l1BridgeContract.simulate.depositETH(args, { - value: amount, - }); - resp.txHash = await l1BridgeContract.write.depositETH(args, { - account, - chain, - value: amount, - }); - } else if (selfBridge && !nativeTokenBridge) { - const args = [ - params.fromToken!, - params.toToken!, - amount, - 1, - "0x", - ] as const; - await l1BridgeContract.simulate.depositERC20(args, { - account, - }); - resp.txHash = await l1BridgeContract.write.depositERC20(args, { - account, - chain, - }); - } else if (!selfBridge && nativeTokenBridge) { - const args = [params.toAddress!, 1, "0x"] as const; - await l1BridgeContract.simulate.depositETHTo(args, { - value: amount, - }); - resp.txHash = await l1BridgeContract.write.depositETHTo(args, { - account, - chain, - value: amount, - }); - } else { - const args = [ - params.fromToken!, - params.toToken!, - params.toAddress!, - amount, - 1, - "0x", - ] as const; - await l1BridgeContract.simulate.depositERC20To(args, { - account, - }); - resp.txHash = await l1BridgeContract.write.depositERC20To( - args, - { - account, - chain, - } - ); - } - } else if (params.fromChain === "opBNB" && params.toChain === "bsc") { - // from L2 to L1 - const l2BridgeContract = getContract({ - address: this.L2_BRIDGE_ADDRESS, - abi: L2StandardBridgeAbi, - client: { - public: publicClient, - wallet: walletClient, - }, - }); - - const delegationFee = await publicClient.readContract({ - address: this.L2_BRIDGE_ADDRESS, - abi: L2StandardBridgeAbi, - functionName: "delegationFee", - }); - - // check ERC20 allowance - if (!nativeTokenBridge) { - const allowance = await this.walletProvider.checkERC20Allowance( - params.fromChain, - params.fromToken!, - fromAddress, - this.L2_BRIDGE_ADDRESS - ); - if (allowance < amount) { - elizaLogger.log( - `Increasing ERC20 allowance for L2 bridge. ${amount - allowance} more needed` - ); - const txHash = await this.walletProvider.approveERC20( - params.fromChain, - params.fromToken!, - this.L2_BRIDGE_ADDRESS, - amount - ); - await publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - } - } - - if (selfBridge && nativeTokenBridge) { - const args = [this.LEGACY_ERC20_ETH, amount, 1, "0x"] as const; - const value = amount + delegationFee; - await l2BridgeContract.simulate.withdraw(args, { value }); - resp.txHash = await l2BridgeContract.write.withdraw(args, { - account, - chain, - value, - }); - } else if (selfBridge && !nativeTokenBridge) { - const args = [params.fromToken!, amount, 1, "0x"] as const; - const value = delegationFee; - await l2BridgeContract.simulate.withdraw(args, { - account, - value, - }); - resp.txHash = await l2BridgeContract.write.withdraw(args, { - account, - chain, - value, - }); - } else if (!selfBridge && nativeTokenBridge) { - const args = [ - this.LEGACY_ERC20_ETH, - params.toAddress!, - amount, - 1, - "0x", - ] as const; - const value = amount + delegationFee; - await l2BridgeContract.simulate.withdrawTo(args, { value }); - resp.txHash = await l2BridgeContract.write.withdrawTo(args, { - account, - chain, - value, - }); - } else { - const args = [ - params.fromToken!, - params.toAddress!, - amount, - 1, - "0x", - ] as const; - const value = delegationFee; - await l2BridgeContract.simulate.withdrawTo(args, { - account, - value, - }); - resp.txHash = await l2BridgeContract.write.withdrawTo(args, { - account, - chain, - value, - }); - } - } else { - throw new Error("Unsupported bridge direction"); - } - - if (!resp.txHash || resp.txHash === "0x") { - throw new Error("Get transaction hash failed"); - } - - // wait for the transaction to be confirmed - await publicClient.waitForTransactionReceipt({ - hash: resp.txHash, - }); - - return resp; - } - - async validateAndNormalizeParams(params: BridgeParams) { - if (!params.toAddress) { - params.toAddress = this.walletProvider.getAddress(); - } else { - params.toAddress = await this.walletProvider.formatAddress( - params.toAddress - ); - } - - if (params.fromChain === "bsc" && params.toChain === "opBNB") { - if (params.fromToken && !params.toToken) { - throw new Error( - "token address on opBNB is required when bridging ERC20 from BSC to opBNB" - ); - } - } - } -} - -// NOTE: The bridge action only supports bridge funds between BSC and opBNB for now. We may adding stargate support later. -export const bridgeAction = { - name: "bridge", - description: "Bridge tokens between BSC and opBNB", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting bridge action..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - state.walletInfo = await bnbWalletProvider.get(runtime, message, currentState); - - // Compose bridge context - const bridgeContext = composeContext({ - state: currentState, - template: bridgeTemplate, - }); - const content = await generateObjectDeprecated({ - runtime, - context: bridgeContext, - modelClass: ModelClass.LARGE, - }); - - const walletProvider = initWalletProvider(runtime); - const action = new BridgeAction(walletProvider); - const paramOptions: BridgeParams = { - fromChain: content.fromChain, - toChain: content.toChain, - fromToken: content.fromToken, - toToken: content.toToken, - amount: content.amount, - toAddress: content.toAddress, - }; - try { - const bridgeResp = await action.bridge(paramOptions); - callback?.({ - text: `Successfully bridged ${bridgeResp.amount} ${bridgeResp.fromToken} from ${bridgeResp.fromChain} to ${bridgeResp.toChain}\nTransaction Hash: ${bridgeResp.txHash}`, - content: { ...bridgeResp }, - }); - return true; - } catch (error) { - elizaLogger.error("Error during token bridge:", error.message); - callback?.({ - text: `Bridge failed: ${error.message}`, - content: { error: error.message }, - }); - return false; - } - }, - template: bridgeTemplate, - validate: async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("BNB_PRIVATE_KEY"); - return typeof privateKey === "string" && privateKey.startsWith("0x"); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Deposit 1 BNB from BSC to opBNB", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you bridge 1 BNB from BSC to opBNB", - action: "BRIDGE", - content: { - fromChain: "bsc", - toChain: "opBNB", - fromToken: undefined, - toToken: undefined, - amount: 1, - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Transfer 1 BNB from BSC to address 0x1234 on opBNB", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you bridge 1 BNB from BSC to address 0x1234 on opBNB", - action: "BRIDGE", - content: { - fromChain: "bsc", - toChain: "opBNB", - fromToken: undefined, - toToken: undefined, - amount: 1, - toAddress: "0x1234", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Deposit 1 0x123 token from BSC to address 0x456 on opBNB. The corresponding token address on opBNB is 0x789", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you bridge 1 0x123 token from BSC to address 0x456 on opBNB", - action: "BRIDGE", - content: { - fromChain: "bsc", - toChain: "opBNB", - fromToken: "0x123", - toToken: "0x789", - amount: 1, - toAddress: "0x456", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Withdraw 1 BNB from opBNB to BSC", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you bridge 1 BNB from opBNB to BSC", - action: "BRIDGE", - content: { - fromChain: "opBNB", - toChain: "bsc", - fromToken: undefined, - toToken: undefined, - amount: 1, - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Withdraw 1 0x1234 token from opBNB to address 0x5678 on BSC", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you bridge 1 0x1234 token from opBNB to address 0x5678 on BSC", - action: "BRIDGE", - content: { - fromChain: "opBNB", - toChain: "bsc", - fromToken: "0x1234", - toToken: undefined, - amount: 1, - toAddress: "0x5678", - }, - }, - }, - ], - ], - similes: ["BRIDGE", "TOKEN_BRIDGE", "DEPOSIT", "WITHDRAW"], -}; diff --git a/packages/plugin-bnb/src/actions/deploy.ts b/packages/plugin-bnb/src/actions/deploy.ts deleted file mode 100644 index fdf2a6069df1a..0000000000000 --- a/packages/plugin-bnb/src/actions/deploy.ts +++ /dev/null @@ -1,332 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import solc from "solc"; -import { type Abi, type Address, parseUnits } from "viem"; -import { - bnbWalletProvider, - initWalletProvider, - type WalletProvider, -} from "../providers/wallet"; -import { ercContractTemplate } from "../templates"; -import type { - IDeployERC1155Params, - IDeployERC721Params, - IDeployERC20Params, - SupportedChain, -} from "../types"; -import { compileSolidity } from "../utils/contracts"; - -export { ercContractTemplate }; - -export class DeployAction { - constructor(private walletProvider: WalletProvider) {} - - async compileSolidity(contractName: string, source: string) { - const solName = `${contractName}.sol`; - const input = { - language: "Solidity", - sources: { - [solName]: { - content: source, - }, - }, - settings: { - outputSelection: { - "*": { - "*": ["*"], - }, - }, - }, - }; - elizaLogger.debug("Compiling contract..."); - const output = JSON.parse(solc.compile(JSON.stringify(input))); - - // check compile error - if (output.errors) { - const hasError = output.errors.some( - (error) => error.type === "Error" - ); - if (hasError) { - elizaLogger.error( - `Compilation errors: ${JSON.stringify(output.errors, null, 2)}` - ); - } - } - - const contract = output.contracts[solName][contractName]; - - if (!contract) { - elizaLogger.error("Compilation result is empty"); - } - - elizaLogger.debug("Contract compiled successfully"); - return { - abi: contract.abi as Abi, - bytecode: contract.evm.bytecode.object, - }; - } - - async deployERC20(deployTokenParams: IDeployERC20Params) { - elizaLogger.debug("deployTokenParams", deployTokenParams); - - const { name, symbol, decimals, totalSupply, chain } = - deployTokenParams; - if (!name || name === "") { - throw new Error("Token name is required"); - } - if (!symbol || symbol === "") { - throw new Error("Token symbol is required"); - } - if (!decimals || decimals === 0) { - throw new Error("Token decimals is required"); - } - if (!totalSupply || totalSupply === "") { - throw new Error("Token total supply is required"); - } - - try { - const totalSupplyWithDecimals = parseUnits(totalSupply, decimals); - const args = [name, symbol, decimals, totalSupplyWithDecimals]; - const contractAddress = await this.deployContract( - chain, - "ERC20Contract", - args - ); - - return { - address: contractAddress, - }; - } catch (error) { - elizaLogger.error("Depoly ERC20 failed:", error.message); - throw error; - } - } - - async deployERC721(deployNftParams: IDeployERC721Params) { - elizaLogger.debug("deployNftParams", deployNftParams); - - const { baseURI, name, symbol, chain } = deployNftParams; - if (!name || name === "") { - throw new Error("Token name is required"); - } - if (!symbol || symbol === "") { - throw new Error("Token symbol is required"); - } - if (!baseURI || baseURI === "") { - throw new Error("Token baseURI is required"); - } - try { - const args = [name, symbol, baseURI]; - const contractAddress = await this.deployContract( - chain, - "ERC721Contract", - args - ); - - return { - address: contractAddress, - }; - } catch (error) { - elizaLogger.error("Depoly ERC721 failed:", error.message); - throw error; - } - } - - async deployERC1155(deploy1155Params: IDeployERC1155Params) { - elizaLogger.debug("deploy1155Params", deploy1155Params); - - const { baseURI, name, chain } = deploy1155Params; - if (!name || name === "") { - throw new Error("Token name is required"); - } - if (!baseURI || baseURI === "") { - throw new Error("Token baseURI is required"); - } - try { - const args = [name, baseURI]; - const contractAddress = await this.deployContract( - chain, - "ERC1155Contract", - args - ); - - return { - address: contractAddress, - }; - } catch (error) { - elizaLogger.error("Depoly ERC1155 failed:", error.message); - throw error; - } - } - - async deployContract( - chain: SupportedChain, - contractName: string, - args: any[] - ): Promise
    { - const { abi, bytecode } = await compileSolidity(contractName); - if (!bytecode) { - throw new Error("Bytecode is empty after compilation"); - } - - this.walletProvider.switchChain(chain); - - const chainConfig = this.walletProvider.getChainConfigs(chain); - const walletClient = this.walletProvider.getWalletClient(chain); - const hash = await walletClient.deployContract({ - account: this.walletProvider.getAccount(), - abi, - bytecode, - args, - chain: chainConfig, - }); - - elizaLogger.debug("Waiting for deployment transaction...", hash); - const publicClient = this.walletProvider.getPublicClient(chain); - const receipt = await publicClient.waitForTransactionReceipt({ - hash, - }); - elizaLogger.debug("Contract deployed successfully!"); - - return receipt.contractAddress; - } -} - -export const deployAction = { - name: "DEPLOY_TOKEN", - description: - "Deploy token contracts (ERC20/721/1155) based on user specifications", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting deploy action..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - state.walletInfo = await bnbWalletProvider.get(runtime, message, currentState); - - // Compose context - const context = composeContext({ - state: currentState, - template: ercContractTemplate, - }); - const content = await generateObjectDeprecated({ - runtime, - context: context, - modelClass: ModelClass.LARGE, - }); - - const walletProvider = initWalletProvider(runtime); - const action = new DeployAction(walletProvider); - try { - const contractType = content.contractType; - let result: any; - switch (contractType.toLocaleLowerCase()) { - case "erc20": - result = await action.deployERC20({ - chain: content.chain, - decimals: content.decimals, - symbol: content.symbol, - name: content.name, - totalSupply: content.totalSupply, - }); - break; - case "erc721": - result = await action.deployERC721({ - chain: content.chain, - name: content.name, - symbol: content.symbol, - baseURI: content.baseURI, - }); - break; - case "erc1155": - result = await action.deployERC1155({ - chain: content.chain, - name: content.name, - baseURI: content.baseURI, - }); - break; - } - - if (result) { - callback?.({ - text: `Successfully create contract - ${result?.address}`, - content: { ...result }, - }); - } else { - callback?.({ - text: "Unsuccessfully create contract", - content: { ...result }, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error during deploy:", error.message); - callback?.({ - text: `Deploy failed: ${error.message}`, - content: { error: error.message }, - }); - return false; - } - }, - template: ercContractTemplate, - validate: async (_runtime: IAgentRuntime) => { - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "deploy an ERC20 token with name 'MyToken', symbol 'MTK', decimals 18, total supply 10000", - action: "DEPLOY_TOKEN", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Deploy an ERC721 NFT contract with name 'MyNFT', symbol 'MNFT', baseURI 'https://my-nft-base-uri.com'", - action: "DEPLOY_TOKEN", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Deploy an ERC1155 contract with name 'My1155', baseURI 'https://my-1155-base-uri.com'", - action: "DEPLOY_TOKEN", - }, - }, - ], - ], - similes: [ - "DEPLOY_ERC20", - "DEPLOY_ERC721", - "DEPLOY_ERC1155", - "CREATE_TOKEN", - "CREATE_NFT", - "CREATE_1155", - ], -}; diff --git a/packages/plugin-bnb/src/actions/faucet.ts b/packages/plugin-bnb/src/actions/faucet.ts deleted file mode 100644 index 72377535d4ad7..0000000000000 --- a/packages/plugin-bnb/src/actions/faucet.ts +++ /dev/null @@ -1,252 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import type { Hex } from "viem"; -import WebSocket, { type ClientOptions } from "ws"; - -import { faucetTemplate } from "../templates"; -import type { FaucetResponse, FaucetParams } from "../types"; -import { - bnbWalletProvider, - initWalletProvider, - type WalletProvider, -} from "../providers/wallet"; - -export { faucetTemplate }; - -// Exported for tests -export class FaucetAction { - private readonly SUPPORTED_TOKENS: string[] = [ - "BNB", - "BTC", - "BUSD", - "DAI", - "ETH", - "USDC", - ] as const; - private readonly FAUCET_URL = "wss://testnet.bnbchain.org/faucet-smart/api"; - - constructor(private walletProvider: WalletProvider) {} - - async faucet(params: FaucetParams): Promise { - elizaLogger.debug("Faucet params:", params); - await this.validateAndNormalizeParams(params); - elizaLogger.debug("Normalized faucet params:", params); - - // After validation, we know these values exist - if (!params.token || !params.toAddress) { - throw new Error("Token and address are required for faucet"); - } - - const resp: FaucetResponse = { - token: params.token, - recipient: params.toAddress, - txHash: "0x", - }; - - const options: ClientOptions = { - headers: { - Connection: "Upgrade", - Upgrade: "websocket", - }, - }; - - const ws = new WebSocket(this.FAUCET_URL, options); - - try { - // Wait for connection - await new Promise((resolve, reject) => { - ws.once("open", () => resolve()); - ws.once("error", reject); - }); - - // Send the message - const message = { - tier: 0, - url: params.toAddress, - symbol: params.token, - captcha: "noCaptchaToken", - }; - ws.send(JSON.stringify(message)); - - // Wait for response with transaction hash - const txHash = await new Promise((resolve, reject) => { - const timeout = setTimeout(() => { - ws.close(); - reject(new Error("Faucet request timeout")); - }, 15000); - - ws.on("message", (data) => { - const response = JSON.parse(data.toString()); - - // First response: funding request accepted - if (response.success) { - return; - } - - // Second response: transaction details - if (response.requests?.length > 0) { - const txHash = response.requests[0].tx.hash; - if (txHash) { - clearTimeout(timeout); - resolve(txHash as Hex); - } - } - - // Handle error case - if (response.error) { - clearTimeout(timeout); - reject(new Error(response.error)); - } - }); - - ws.on("error", (error) => { - clearTimeout(timeout); - reject( - new Error(`WebSocket error occurred: ${error.message}`) - ); - }); - }); - - resp.txHash = txHash; - return resp; - } finally { - ws.close(); - } - } - - async validateAndNormalizeParams(params: FaucetParams): Promise { - if (!params.toAddress) { - params.toAddress = this.walletProvider.getAddress(); - } else { - params.toAddress = await this.walletProvider.formatAddress( - params.toAddress - ); - } - - if (!params.token) { - params.token = "BNB"; - } - if (!this.SUPPORTED_TOKENS.includes(params.token)) { - throw new Error("Unsupported token"); - } - } -} - -export const faucetAction = { - name: "faucet", - description: "Get test tokens from the faucet", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting faucet action..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - state.walletInfo = await bnbWalletProvider.get( - runtime, - message, - currentState - ); - - // Compose faucet context - const faucetContext = composeContext({ - state: currentState, - template: faucetTemplate, - }); - const content = await generateObjectDeprecated({ - runtime, - context: faucetContext, - modelClass: ModelClass.LARGE, - }); - - const walletProvider = initWalletProvider(runtime); - const action = new FaucetAction(walletProvider); - const paramOptions: FaucetParams = { - token: content.token, - toAddress: content.toAddress, - }; - try { - const faucetResp = await action.faucet(paramOptions); - callback?.({ - text: `Successfully transferred ${faucetResp.token} to ${faucetResp.recipient}\nTransaction Hash: ${faucetResp.txHash}`, - content: { - hash: faucetResp.txHash, - recipient: faucetResp.recipient, - chain: content.chain, - }, - }); - - return true; - } catch (error) { - elizaLogger.error("Error during faucet:", error.message); - callback?.({ - text: `Get test tokens failed: ${error.message}`, - content: { error: error.message }, - }); - return false; - } - }, - template: faucetTemplate, - validate: async (_runtime: IAgentRuntime) => { - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Get some USDC from the faucet", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll request some USDC from the faucet on BSC Testnet now.", - action: "FAUCET", - content: { - token: "USDC", - toAddress: "{{walletAddress}}", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Get some test tokens from the faucet on BSC Testnet", - }, - }, - { - user: "{{agent}}", - content: { - text: "Of course, getting tBNB from the faucet on BSC Testnet now.", - action: "FAUCET", - content: { - token: "BNB", - toAddress: "{{walletAddress}}", - }, - }, - }, - ], - ], - similes: ["FAUCET", "GET_TEST_TOKENS"], -}; diff --git a/packages/plugin-bnb/src/actions/getBalance.ts b/packages/plugin-bnb/src/actions/getBalance.ts deleted file mode 100644 index d71bdf0004693..0000000000000 --- a/packages/plugin-bnb/src/actions/getBalance.ts +++ /dev/null @@ -1,289 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { getToken } from "@lifi/sdk"; - -import { - bnbWalletProvider, - initWalletProvider, - type WalletProvider, -} from "../providers/wallet"; -import { getBalanceTemplate } from "../templates"; -import type { - GetBalanceParams, - GetBalanceResponse, - SupportedChain, -} from "../types"; -import { type Address, erc20Abi, formatEther, formatUnits } from "viem"; - -export { getBalanceTemplate }; - -export class GetBalanceAction { - constructor(private walletProvider: WalletProvider) {} - - async getBalance(params: GetBalanceParams): Promise { - elizaLogger.debug("Get balance params:", params); - await this.validateAndNormalizeParams(params); - elizaLogger.debug("Normalized get balance params:", params); - - const { chain, address, token } = params; - if (!address) { - throw new Error("Address is required for getting balance"); - } - - this.walletProvider.switchChain(chain); - const nativeSymbol = - this.walletProvider.getChainConfigs(chain).nativeCurrency.symbol; - const chainId = this.walletProvider.getChainConfigs(chain).id; - - let queryNativeToken = false; - if ( - !token || - token === "" || - token.toLowerCase() === "bnb" || - token.toLowerCase() === "tbnb" - ) { - queryNativeToken = true; - } - - const resp: GetBalanceResponse = { - chain, - address, - }; - - // If ERC20 token is requested - if (!queryNativeToken) { - let amount: string; - if (token.startsWith("0x")) { - amount = await this.getERC20TokenBalance( - chain, - address, - token as `0x${string}` - ); - } else { - if (chainId !== 56) { - throw new Error( - "Only BSC mainnet is supported for querying balance by token symbol" - ); - } - - this.walletProvider.configureLiFiSdk(chain); - const tokenInfo = await getToken(chainId, token); - amount = await this.getERC20TokenBalance( - chain, - address, - tokenInfo.address as `0x${string}` - ); - } - - resp.balance = { token, amount }; - } else { - // If native token is requested - const nativeBalanceWei = await this.walletProvider - .getPublicClient(chain) - .getBalance({ address }); - resp.balance = { - token: nativeSymbol, - amount: formatEther(nativeBalanceWei), - }; - } - - return resp; - } - - async getERC20TokenBalance( - chain: SupportedChain, - address: Address, - tokenAddress: Address - ): Promise { - const publicClient = this.walletProvider.getPublicClient(chain); - - const balance = await publicClient.readContract({ - address: tokenAddress, - abi: erc20Abi, - functionName: "balanceOf", - args: [address], - }); - - const decimals = await publicClient.readContract({ - address: tokenAddress, - abi: erc20Abi, - functionName: "decimals", - }); - - return formatUnits(balance, decimals); - } - - async validateAndNormalizeParams(params: GetBalanceParams): Promise { - if (!params.address) { - params.address = this.walletProvider.getAddress(); - } else { - params.address = await this.walletProvider.formatAddress( - params.address - ); - } - } -} - -export const getBalanceAction = { - name: "getBalance", - description: "Get balance of a token or all tokens for the given address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting getBalance action..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - state.walletInfo = await bnbWalletProvider.get( - runtime, - message, - currentState - ); - - // Compose swap context - const getBalanceContext = composeContext({ - state: currentState, - template: getBalanceTemplate, - }); - const content = await generateObjectDeprecated({ - runtime, - context: getBalanceContext, - modelClass: ModelClass.LARGE, - }); - - const walletProvider = initWalletProvider(runtime); - const action = new GetBalanceAction(walletProvider); - const getBalanceOptions: GetBalanceParams = { - chain: content.chain, - address: content.address, - token: content.token, - }; - try { - const getBalanceResp = await action.getBalance(getBalanceOptions); - if (callback) { - let text = `No balance found for ${getBalanceOptions.address} on ${getBalanceOptions.chain}`; - if (getBalanceResp.balance) { - text = `Balance of ${getBalanceResp.address} on ${getBalanceResp.chain}:\n${ - getBalanceResp.balance.token - }: ${getBalanceResp.balance.amount}`; - } - callback({ - text, - content: { ...getBalanceResp }, - }); - } - return true; - } catch (error) { - elizaLogger.error("Error during get balance:", error.message); - callback?.({ - text: `Get balance failed: ${error.message}`, - content: { error: error.message }, - }); - return false; - } - }, - template: getBalanceTemplate, - validate: async (_runtime: IAgentRuntime) => { - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Check my balance of USDC", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you check your balance of USDC", - action: "GET_BALANCE", - content: { - chain: "bsc", - address: "{{walletAddress}}", - token: "USDC", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Check my balance of token 0x1234", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you check your balance of token 0x1234", - action: "GET_BALANCE", - content: { - chain: "bsc", - address: "{{walletAddress}}", - token: "0x1234", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Get USDC balance of 0x1234", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you check USDC balance of 0x1234", - action: "GET_BALANCE", - content: { - chain: "bsc", - address: "0x1234", - token: "USDC", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Check my wallet balance on BSC", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you check your wallet balance on BSC", - action: "GET_BALANCE", - content: { - chain: "bsc", - address: "{{walletAddress}}", - token: undefined, - }, - }, - }, - ], - ], - similes: ["GET_BALANCE", "CHECK_BALANCE"], -}; diff --git a/packages/plugin-bnb/src/actions/stake.ts b/packages/plugin-bnb/src/actions/stake.ts deleted file mode 100644 index 9b611d3a15637..0000000000000 --- a/packages/plugin-bnb/src/actions/stake.ts +++ /dev/null @@ -1,389 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { type Address, formatEther, parseEther, erc20Abi } from "viem"; - -import { - bnbWalletProvider, - initWalletProvider, - type WalletProvider, -} from "../providers/wallet"; -import { stakeTemplate } from "../templates"; -import { ListaDaoAbi, type StakeParams, type StakeResponse } from "../types"; - -export { stakeTemplate }; - -// Exported for tests -export class StakeAction { - private readonly LISTA_DAO = - "0x1adB950d8bB3dA4bE104211D5AB038628e477fE6" as const; - private readonly SLIS_BNB = - "0xB0b84D294e0C75A6abe60171b70edEb2EFd14A1B" as const; - - constructor(private walletProvider: WalletProvider) {} - - async stake(params: StakeParams): Promise { - elizaLogger.debug("Stake params:", params); - this.validateStakeParams(params); - elizaLogger.debug("Normalized stake params:", params); - - this.walletProvider.switchChain("bsc"); // only BSC is supported - - const actions = { - deposit: async () => { - if (!params.amount) { - throw new Error("Amount is required for deposit"); - } - return await this.doDeposit(params.amount); - }, - withdraw: async () => await this.doWithdraw(params.amount), - claim: async () => await this.doClaim(), - }; - const resp = await actions[params.action](); - return { response: resp }; - } - - validateStakeParams(params: StakeParams) { - if (params.chain !== "bsc") { - throw new Error("Only BSC mainnet is supported"); - } - - if (params.action === "deposit" && !params.amount) { - throw new Error("Amount is required for deposit"); - } - - if (params.action === "withdraw" && !params.amount) { - throw new Error("Amount is required for withdraw"); - } - } - - async doDeposit(amount: string): Promise { - const publicClient = this.walletProvider.getPublicClient("bsc"); - const walletClient = this.walletProvider.getWalletClient("bsc"); - const account = walletClient.account; - if (!account) { - throw new Error("Wallet account not found"); - } - - const { request } = await publicClient.simulateContract({ - account: this.walletProvider.getAccount(), - address: this.LISTA_DAO, - abi: ListaDaoAbi, - functionName: "deposit", - value: parseEther(amount), - }); - const txHash = await walletClient.writeContract(request); - await publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - - const slisBNBBalance = await publicClient.readContract({ - address: this.SLIS_BNB, - abi: erc20Abi, - functionName: "balanceOf", - args: [account.address], - }); - - return `Successfully do deposit. ${formatEther(slisBNBBalance)} slisBNB held. \nTransaction Hash: ${txHash}`; - } - - async doWithdraw(amount?: string): Promise { - const publicClient = this.walletProvider.getPublicClient("bsc"); - const walletClient = this.walletProvider.getWalletClient("bsc"); - const account = walletClient.account; - if (!account) { - throw new Error("Wallet account not found"); - } - - // If amount is not provided, withdraw all slisBNB - let amountToWithdraw: bigint; - if (!amount) { - amountToWithdraw = await publicClient.readContract({ - address: this.SLIS_BNB, - abi: erc20Abi, - functionName: "balanceOf", - args: [account.address], - }); - } else { - amountToWithdraw = parseEther(amount); - } - - // check slisBNB allowance - const allowance = await this.walletProvider.checkERC20Allowance( - "bsc", - this.SLIS_BNB, - account.address, - this.LISTA_DAO - ); - if (allowance < amountToWithdraw) { - elizaLogger.log( - `Increasing slisBNB allowance for Lista DAO. ${amountToWithdraw - allowance} more needed` - ); - const txHash = await this.walletProvider.approveERC20( - "bsc", - this.SLIS_BNB, - this.LISTA_DAO, - amountToWithdraw - ); - await publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - } - - const { request } = await publicClient.simulateContract({ - account: this.walletProvider.getAccount(), - address: this.LISTA_DAO, - abi: ListaDaoAbi, - functionName: "requestWithdraw", - args: [amountToWithdraw], - }); - const txHash = await walletClient.writeContract(request); - await publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - - const slisBNBBalance = await publicClient.readContract({ - address: this.SLIS_BNB, - abi: erc20Abi, - functionName: "balanceOf", - args: [account.address], - }); - - return `Successfully do withdraw. ${formatEther(slisBNBBalance)} slisBNB left. \nTransaction Hash: ${txHash}`; - } - - async doClaim(): Promise { - const publicClient = this.walletProvider.getPublicClient("bsc"); - const walletClient = this.walletProvider.getWalletClient("bsc"); - const account = walletClient.account; - if (!account) { - throw new Error("Wallet account not found"); - } - - const requests = await publicClient.readContract({ - address: this.LISTA_DAO, - abi: ListaDaoAbi, - functionName: "getUserWithdrawalRequests", - args: [account.address], - }); - - let totalClaimed = 0n; - for (let idx = 0; idx < requests.length; idx++) { - const [isClaimable, amount] = await publicClient.readContract({ - address: this.LISTA_DAO, - abi: ListaDaoAbi, - functionName: "getUserRequestStatus", - args: [account.address, BigInt(idx)], - }); - - if (isClaimable) { - const { request } = await publicClient.simulateContract({ - account: this.walletProvider.getAccount(), - address: this.LISTA_DAO, - abi: ListaDaoAbi, - functionName: "claimWithdraw", - args: [BigInt(idx)], - }); - - const txHash = await walletClient.writeContract(request); - await publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - - totalClaimed += amount; - } else { - break; - } - } - - return `Successfully do claim. ${formatEther(totalClaimed)} BNB claimed.`; - } -} - -export const stakeAction = { - name: "stake", - description: "Stake related actions through Lista DAO", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting stake action..."); - - // Validate stake - if (!(message.content.source === "direct")) { - callback?.({ - text: "I can't do that for you.", - content: { error: "Stake not allowed" }, - }); - return false; - } - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - state.walletInfo = await bnbWalletProvider.get( - runtime, - message, - currentState - ); - - // Compose stake context - const stakeContext = composeContext({ - state: currentState, - template: stakeTemplate, - }); - const content = await generateObjectDeprecated({ - runtime, - context: stakeContext, - modelClass: ModelClass.LARGE, - }); - - const walletProvider = initWalletProvider(runtime); - const action = new StakeAction(walletProvider); - const paramOptions: StakeParams = { - chain: content.chain, - action: content.action, - amount: content.amount, - }; - try { - const stakeResp = await action.stake(paramOptions); - callback?.({ - text: stakeResp.response, - content: { ...stakeResp }, - }); - - return true; - } catch (error) { - elizaLogger.error("Error during stake:", error.message); - callback?.({ - text: `Stake failed: ${error.message}`, - content: { error: error.message }, - }); - return false; - } - }, - template: stakeTemplate, - validate: async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("BNB_PRIVATE_KEY"); - return typeof privateKey === "string" && privateKey.startsWith("0x"); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Stake 1 BNB on BSC", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you stake 1 BNB to Lista DAO on BSC", - action: "STAKE", - content: { - action: "deposit", - amount: "1", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Deposit 1 BNB to Lista DAO", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you deposit 1 BNB to Lista DAO on BSC", - action: "STAKE", - content: { - action: "deposit", - amount: "1", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Undelegate 1 slisBNB on BSC", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you undelegate 1 slisBNB from Lista DAO on BSC", - action: "STAKE", - content: { - action: "withdraw", - amount: "1", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Withdraw 1 slisBNB from Lista DAO", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you withdraw 1 slisBNB from Lista DAO on BSC", - action: "STAKE", - content: { - action: "withdraw", - amount: "1", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Claim unlocked BNB from Lista DAO", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you claim unlocked BNB from Lista DAO on BSC", - action: "STAKE", - content: { - action: "claim", - }, - }, - }, - ], - ], - similes: [ - "DELEGATE", - "STAKE", - "DEPOSIT", - "UNDELEGATE", - "UNSTAKE", - "WITHDRAW", - "CLAIM", - ], -}; diff --git a/packages/plugin-bnb/src/actions/swap.ts b/packages/plugin-bnb/src/actions/swap.ts deleted file mode 100644 index a8a416eeb28a7..0000000000000 --- a/packages/plugin-bnb/src/actions/swap.ts +++ /dev/null @@ -1,196 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { executeRoute, getRoutes } from "@lifi/sdk"; -import { parseEther } from "viem"; - -import { - bnbWalletProvider, - initWalletProvider, - type WalletProvider, -} from "../providers/wallet"; -import { swapTemplate } from "../templates"; -import type { SwapParams, SwapResponse } from "../types"; - -export { swapTemplate }; - -export class SwapAction { - constructor(private walletProvider: WalletProvider) {} - - async swap(params: SwapParams): Promise { - elizaLogger.debug("Swap params:", params); - this.validateAndNormalizeParams(params); - elizaLogger.debug("Normalized swap params:", params); - - const fromAddress = this.walletProvider.getAddress(); - const chainId = this.walletProvider.getChainConfigs(params.chain).id; - - this.walletProvider.configureLiFiSdk(params.chain); - - const resp: SwapResponse = { - chain: params.chain, - txHash: "0x", - fromToken: params.fromToken, - toToken: params.toToken, - amount: params.amount, - }; - - const routes = await getRoutes({ - fromChainId: chainId, - toChainId: chainId, - fromTokenAddress: params.fromToken, - toTokenAddress: params.toToken, - fromAmount: parseEther(params.amount).toString(), - fromAddress: fromAddress, - options: { - slippage: params.slippage, - order: "RECOMMENDED", - }, - }); - - if (!routes.routes.length) throw new Error("No routes found"); - - const execution = await executeRoute(routes.routes[0]); - const process = - execution.steps[0]?.execution?.process[ - execution.steps[0]?.execution?.process.length - 1 - ]; - - if (!process?.status || process.status === "FAILED") { - throw new Error("Transaction failed"); - } - - resp.txHash = process.txHash as `0x${string}`; - - return resp; - } - - validateAndNormalizeParams(params: SwapParams): void { - if (params.chain !== "bsc") { - throw new Error("Only BSC mainnet is supported"); - } - } -} - -export const swapAction = { - name: "swap", - description: "Swap tokens on the same chain", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting swap action..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - state.walletInfo = await bnbWalletProvider.get( - runtime, - message, - currentState - ); - - // Compose swap context - const swapContext = composeContext({ - state: currentState, - template: swapTemplate, - }); - const content = await generateObjectDeprecated({ - runtime, - context: swapContext, - modelClass: ModelClass.LARGE, - }); - - const walletProvider = initWalletProvider(runtime); - const action = new SwapAction(walletProvider); - const swapOptions: SwapParams = { - chain: content.chain, - fromToken: content.inputToken, - toToken: content.outputToken, - amount: content.amount, - slippage: content.slippage, - }; - try { - const swapResp = await action.swap(swapOptions); - callback?.({ - text: `Successfully swap ${swapResp.amount} ${swapResp.fromToken} tokens to ${swapResp.toToken}\nTransaction Hash: ${swapResp.txHash}`, - content: { ...swapResp }, - }); - return true; - } catch (error) { - elizaLogger.error("Error during swap:", error.message); - callback?.({ - text: `Swap failed: ${error.message}`, - content: { error: error.message }, - }); - return false; - } - }, - template: swapTemplate, - validate: async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("BNB_PRIVATE_KEY"); - return typeof privateKey === "string" && privateKey.startsWith("0x"); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Swap 1 BNB for USDC on BSC", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you swap 1 BNB for USDC on BSC", - action: "SWAP", - content: { - chain: "bsc", - inputToken: "BNB", - outputToken: "USDC", - amount: "1", - slippage: undefined, - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Buy some token of 0x1234 using 1 USDC on BSC. The slippage should be no more than 5%", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you swap 1 USDC for token 0x1234 on BSC", - action: "SWAP", - content: { - chain: "bsc", - inputToken: "USDC", - outputToken: "0x1234", - amount: "1", - slippage: 0.05, - }, - }, - }, - ], - ], - similes: ["SWAP", "TOKEN_SWAP", "EXCHANGE_TOKENS", "TRADE_TOKENS"], -}; diff --git a/packages/plugin-bnb/src/actions/transfer.ts b/packages/plugin-bnb/src/actions/transfer.ts deleted file mode 100644 index 0c90cc5829131..0000000000000 --- a/packages/plugin-bnb/src/actions/transfer.ts +++ /dev/null @@ -1,270 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { - formatEther, - formatUnits, - parseEther, - parseUnits, - erc20Abi, - type Hex, -} from "viem"; - -import { - bnbWalletProvider, - initWalletProvider, - type WalletProvider, -} from "../providers/wallet"; -import { transferTemplate } from "../templates"; -import type { TransferParams, TransferResponse } from "../types"; - -export { transferTemplate }; - -// Exported for tests -export class TransferAction { - private readonly TRANSFER_GAS = 21000n; - private readonly DEFAULT_GAS_PRICE = 3000000000n as const; // 3 Gwei - - constructor(private walletProvider: WalletProvider) {} - - async transfer(params: TransferParams): Promise { - elizaLogger.debug("Transfer params:", params); - this.validateAndNormalizeParams(params); - elizaLogger.debug("Normalized transfer params:", params); - - const fromAddress = this.walletProvider.getAddress(); - - this.walletProvider.switchChain(params.chain); - - const nativeToken = - this.walletProvider.chains[params.chain].nativeCurrency.symbol; - - const resp: TransferResponse = { - chain: params.chain, - txHash: "0x", - recipient: params.toAddress, - amount: "", - token: params.token ?? nativeToken, - }; - - if (!params.token || params.token === nativeToken) { - // Native token transfer - const options: { gas?: bigint; gasPrice?: bigint; data?: Hex } = { - data: params.data, - }; - let value: bigint; - if (!params.amount) { - // Transfer all balance minus gas - const publicClient = this.walletProvider.getPublicClient( - params.chain - ); - const balance = await publicClient.getBalance({ - address: fromAddress, - }); - - value = balance - this.DEFAULT_GAS_PRICE * 21000n; - options.gas = this.TRANSFER_GAS; - options.gasPrice = this.DEFAULT_GAS_PRICE; - } else { - value = parseEther(params.amount); - } - - resp.amount = formatEther(value); - resp.txHash = await this.walletProvider.transfer( - params.chain, - params.toAddress, - value, - options - ); - } else { - // ERC20 token transfer - let tokenAddress = params.token; - if (!params.token.startsWith("0x")) { - tokenAddress = await this.walletProvider.getTokenAddress( - params.chain, - params.token - ); - } - - const publicClient = this.walletProvider.getPublicClient( - params.chain - ); - const decimals = await publicClient.readContract({ - address: tokenAddress as `0x${string}`, - abi: erc20Abi, - functionName: "decimals", - }); - - let value: bigint; - if (!params.amount) { - value = await publicClient.readContract({ - address: tokenAddress as `0x${string}`, - abi: erc20Abi, - functionName: "balanceOf", - args: [fromAddress], - }); - } else { - value = parseUnits(params.amount, decimals); - } - - resp.amount = formatUnits(value, decimals); - resp.txHash = await this.walletProvider.transferERC20( - params.chain, - tokenAddress as `0x${string}`, - params.toAddress, - value - ); - } - - if (!resp.txHash || resp.txHash === "0x") { - throw new Error("Get transaction hash failed"); - } - - // wait for the transaction to be confirmed - const publicClient = this.walletProvider.getPublicClient(params.chain); - await publicClient.waitForTransactionReceipt({ - hash: resp.txHash, - }); - - return resp; - } - - async validateAndNormalizeParams(params: TransferParams): Promise { - if (!params.toAddress) { - throw new Error("To address is required"); - } - params.toAddress = await this.walletProvider.formatAddress( - params.toAddress - ); - } -} - -export const transferAction = { - name: "transfer", - description: "Transfer tokens between addresses on the same chain", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: Record, - callback?: HandlerCallback - ) => { - elizaLogger.log("Starting transfer action..."); - - // Validate transfer - if (!(message.content.source === "direct")) { - callback?.({ - text: "I can't do that for you.", - content: { error: "Transfer not allowed" }, - }); - return false; - } - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - state.walletInfo = await bnbWalletProvider.get( - runtime, - message, - currentState - ); - - // Compose transfer context - const transferContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - const content = await generateObjectDeprecated({ - runtime, - context: transferContext, - modelClass: ModelClass.LARGE, - }); - - const walletProvider = initWalletProvider(runtime); - const action = new TransferAction(walletProvider); - const paramOptions: TransferParams = { - chain: content.chain, - token: content.token, - amount: content.amount, - toAddress: content.toAddress, - data: content.data, - }; - try { - const transferResp = await action.transfer(paramOptions); - callback?.({ - text: `Successfully transferred ${transferResp.amount} ${transferResp.token} to ${transferResp.recipient}\nTransaction Hash: ${transferResp.txHash}`, - content: { ...transferResp }, - }); - - return true; - } catch (error) { - elizaLogger.error("Error during transfer:", error.message); - callback?.({ - text: `Transfer failed: ${error.message}`, - content: { error: error.message }, - }); - return false; - } - }, - template: transferTemplate, - validate: async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("BNB_PRIVATE_KEY"); - return typeof privateKey === "string" && privateKey.startsWith("0x"); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Transfer 1 BNB to 0x742d35Cc6634C0532925a3b844Bc454e4438f44e", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you transfer 1 BNB to 0x742d35Cc6634C0532925a3b844Bc454e4438f44e on BSC", - action: "TRANSFER", - content: { - chain: "bsc", - token: "BNB", - amount: "1", - toAddress: "0x742d35Cc6634C0532925a3b844Bc454e4438f44e", - }, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Transfer 1 token of 0x1234 to 0x742d35Cc6634C0532925a3b844Bc454e4438f44e", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll help you transfer 1 token of 0x1234 to 0x742d35Cc6634C0532925a3b844Bc454e4438f44e on BSC", - action: "TRANSFER", - content: { - chain: "bsc", - token: "0x1234", - amount: "1", - toAddress: "0x742d35Cc6634C0532925a3b844Bc454e4438f44e", - }, - }, - }, - ], - ], - similes: ["TRANSFER", "SEND_TOKENS", "TOKEN_TRANSFER", "MOVE_TOKENS"], -}; diff --git a/packages/plugin-bnb/src/contracts/Erc1155Contract.sol b/packages/plugin-bnb/src/contracts/Erc1155Contract.sol deleted file mode 100644 index 549b700d17b7d..0000000000000 --- a/packages/plugin-bnb/src/contracts/Erc1155Contract.sol +++ /dev/null @@ -1,39 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/token/ERC1155/ERC1155.sol"; -import "@openzeppelin/contracts/access/Ownable.sol"; -import "@openzeppelin/contracts/token/ERC1155/extensions/ERC1155URIStorage.sol"; - -contract ERC1155Contract is ERC1155, Ownable { - string public name; - - constructor( - string memory _name, - string memory _baseURI - ) ERC1155(_baseURI) Ownable(msg.sender) { - name = _name; - } - - function mint( - address to, - uint256 id, - uint256 amount, - bytes memory data - ) public onlyOwner { - _mint(to, id, amount, data); - } - - function mintBatch( - address to, - uint256[] memory ids, - uint256[] memory amounts, - bytes memory data - ) public onlyOwner { - _mintBatch(to, ids, amounts, data); - } - - function setURI(string memory newuri) public onlyOwner { - _setURI(newuri); - } -} diff --git a/packages/plugin-bnb/src/contracts/Erc20Contract.sol b/packages/plugin-bnb/src/contracts/Erc20Contract.sol deleted file mode 100644 index 12ddf39ff5d09..0000000000000 --- a/packages/plugin-bnb/src/contracts/Erc20Contract.sol +++ /dev/null @@ -1,23 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; -import "@openzeppelin/contracts/access/Ownable.sol"; - -contract ERC20Contract is ERC20, Ownable { - uint8 private immutable _decimals; - - constructor( - string memory name, - string memory symbol, - uint8 decimalsValue, - uint256 initialSupply - ) ERC20(name, symbol) Ownable(msg.sender) { - _decimals = decimalsValue; - _mint(msg.sender, initialSupply); - } - - function decimals() public view override returns (uint8) { - return _decimals; - } -} diff --git a/packages/plugin-bnb/src/contracts/Erc721Contract.sol b/packages/plugin-bnb/src/contracts/Erc721Contract.sol deleted file mode 100644 index d1f50a9b2fe84..0000000000000 --- a/packages/plugin-bnb/src/contracts/Erc721Contract.sol +++ /dev/null @@ -1,69 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/token/ERC721/ERC721.sol"; -import "@openzeppelin/contracts/access/Ownable.sol"; -import "@openzeppelin/contracts/token/ERC721/extensions/ERC721URIStorage.sol"; -import "@openzeppelin/contracts/token/ERC721/extensions/ERC721Enumerable.sol"; - -contract ERC721Contract is ERC721, ERC721URIStorage, ERC721Enumerable, Ownable { - string private _baseTokenURI; - - constructor( - string memory name, - string memory symbol, - string memory baseURI - ) ERC721(name, symbol) Ownable(msg.sender) { - _baseTokenURI = baseURI; - } - - function _baseURI() internal view override returns (string memory) { - return _baseTokenURI; - } - - function mint(address to, uint256 tokenId) public onlyOwner { - _safeMint(to, tokenId); - } - - function mintBatch(address to, uint256[] memory tokenIds) public onlyOwner { - for (uint i = 0; i < tokenIds.length; i++) { - _safeMint(to, tokenIds[i]); - } - } - - function setBaseURI(string memory newBaseURI) public onlyOwner { - _baseTokenURI = newBaseURI; - } - - function tokenURI( - uint256 tokenId - ) public view override(ERC721, ERC721URIStorage) returns (string memory) { - return super.tokenURI(tokenId); - } - - function supportsInterface( - bytes4 interfaceId - ) - public - view - override(ERC721, ERC721Enumerable, ERC721URIStorage) - returns (bool) - { - return super.supportsInterface(interfaceId); - } - - function _update( - address to, - uint256 tokenId, - address auth - ) internal override(ERC721, ERC721Enumerable) returns (address) { - return super._update(to, tokenId, auth); - } - - function _increaseBalance( - address account, - uint128 value - ) internal override(ERC721, ERC721Enumerable) { - super._increaseBalance(account, value); - } -} diff --git a/packages/plugin-bnb/src/index.ts b/packages/plugin-bnb/src/index.ts deleted file mode 100644 index e9b486f7ce2b3..0000000000000 --- a/packages/plugin-bnb/src/index.ts +++ /dev/null @@ -1,34 +0,0 @@ -export * from "./actions/swap"; -export * from "./actions/transfer"; -export * from "./providers/wallet"; -export * from "./types"; - -import type { Plugin } from "@elizaos/core"; -import { swapAction } from "./actions/swap"; -import { transferAction } from "./actions/transfer"; -import { bnbWalletProvider } from "./providers/wallet"; -import { getBalanceAction } from "./actions/getBalance"; -import { bridgeAction } from "./actions/bridge"; -import { stakeAction } from "./actions/stake"; -import { faucetAction } from "./actions/faucet"; -import { deployAction } from "./actions/deploy"; - -export const bnbPlugin: Plugin = { - name: "bnb", - description: - "BNB Smart Chain (BSC) and opBNB integration plugin supporting transfers, swaps, staking, bridging, and token deployments", - providers: [bnbWalletProvider], - evaluators: [], - services: [], - actions: [ - getBalanceAction, - transferAction, - swapAction, - bridgeAction, - stakeAction, - faucetAction, - deployAction, - ], -}; - -export default bnbPlugin; diff --git a/packages/plugin-bnb/src/providers/wallet.ts b/packages/plugin-bnb/src/providers/wallet.ts deleted file mode 100644 index 3ba7c6db36f0f..0000000000000 --- a/packages/plugin-bnb/src/providers/wallet.ts +++ /dev/null @@ -1,352 +0,0 @@ -import type { - IAgentRuntime, - Provider, - Memory, - State, -} from "@elizaos/core"; -import { EVM, createConfig, getToken } from "@lifi/sdk"; -import type { - Address, - WalletClient, - PublicClient, - Chain, - HttpTransport, - Account, - PrivateKeyAccount, - Hex, -} from "viem"; -import { - createPublicClient, - createWalletClient, - formatUnits, - http, - erc20Abi, -} from "viem"; -import { privateKeyToAccount } from "viem/accounts"; -import * as viemChains from "viem/chains"; -import { createWeb3Name } from "@web3-name-sdk/core"; - -import type { SupportedChain } from "../types"; - -export class WalletProvider { - private currentChain: SupportedChain = "bsc"; - chains: Record = { bsc: viemChains.bsc }; - account: PrivateKeyAccount; - - constructor(privateKey: `0x${string}`, chains?: Record) { - this.setAccount(privateKey); - this.setChains(chains); - - if (chains && Object.keys(chains).length > 0) { - this.setCurrentChain(Object.keys(chains)[0] as SupportedChain); - } - } - - getAccount(): PrivateKeyAccount { - return this.account; - } - - getAddress(): Address { - return this.account.address; - } - - getCurrentChain(): Chain { - return this.chains[this.currentChain]; - } - - getPublicClient( - chainName: SupportedChain - ): PublicClient { - const transport = this.createHttpTransport(chainName); - - const publicClient = createPublicClient({ - chain: this.chains[chainName], - transport, - }); - return publicClient; - } - - getWalletClient(chainName: SupportedChain): WalletClient { - const transport = this.createHttpTransport(chainName); - - const walletClient = createWalletClient({ - chain: this.chains[chainName], - transport, - account: this.account, - }); - - return walletClient; - } - - getChainConfigs(chainName: SupportedChain): Chain { - const chain = viemChains[chainName]; - - if (!chain?.id) { - throw new Error("Invalid chain name"); - } - - return chain; - } - - configureLiFiSdk(chainName: SupportedChain) { - const chains = Object.values(this.chains); - const walletClient = this.getWalletClient(chainName); - - createConfig({ - integrator: "eliza", - providers: [ - EVM({ - getWalletClient: async () => walletClient, - switchChain: async (chainId) => - createWalletClient({ - account: this.account, - chain: chains.find( - (chain) => chain.id === chainId - ) as Chain, - transport: http(), - }), - }), - ], - }); - } - - async formatAddress(address: string): Promise
    { - if (!address || address.length === 0) { - throw new Error("Empty address"); - } - - if (address.startsWith("0x") && address.length === 42) { - return address as Address; - } - - const resolvedAddress = await this.resolveWeb3Name(address); - if (resolvedAddress) { - return resolvedAddress as Address; - } - throw new Error("Invalid address"); - } - - async resolveWeb3Name(name: string): Promise { - const nameService = createWeb3Name(); - return await nameService.getAddress(name); - } - - async checkERC20Allowance( - chain: SupportedChain, - token: Address, - owner: Address, - spender: Address, - ): Promise { - const publicClient = this.getPublicClient(chain); - return await publicClient.readContract({ - address: token, - abi: erc20Abi, - functionName: "allowance", - args: [owner, spender], - }); - } - - async approveERC20( - chain: SupportedChain, - token: Address, - spender: Address, - amount: bigint - ): Promise { - const publicClient = this.getPublicClient(chain); - const walletClient = this.getWalletClient(chain); - const { request } = await publicClient.simulateContract({ - account: this.account, - address: token, - abi: erc20Abi, - functionName: "approve", - args: [spender, amount], - }); - - return await walletClient.writeContract(request); - } - - async transfer( - chain: SupportedChain, - toAddress: Address, - amount: bigint, - options?: { - gas?: bigint; - gasPrice?: bigint; - data?: Hex; - } - ): Promise { - const walletClient = this.getWalletClient(chain); - return await walletClient.sendTransaction({ - account: this.account, - to: toAddress, - value: amount, - chain: this.getChainConfigs(chain), - ...options, - }); - } - - async transferERC20( - chain: SupportedChain, - tokenAddress: Address, - toAddress: Address, - amount: bigint, - options?: { - gas?: bigint; - gasPrice?: bigint; - } - ): Promise { - const publicClient = this.getPublicClient(chain); - const walletClient = this.getWalletClient(chain); - const { request } = await publicClient.simulateContract({ - account: this.account, - address: tokenAddress as `0x${string}`, - abi: erc20Abi, - functionName: "transfer", - args: [toAddress as `0x${string}`, amount], - ...options, - }); - - return await walletClient.writeContract(request); - } - - async getBalance(): Promise { - const client = this.getPublicClient(this.currentChain); - const balance = await client.getBalance({ - address: this.account.address, - }); - return formatUnits(balance, 18); - } - - async getTokenAddress( - chainName: SupportedChain, - tokenSymbol: string - ): Promise { - const token = await getToken( - this.getChainConfigs(chainName).id, - tokenSymbol - ); - return token.address; - } - - addChain(chain: Record) { - this.setChains(chain); - } - - switchChain(chainName: SupportedChain, customRpcUrl?: string) { - if (!this.chains[chainName]) { - const chain = WalletProvider.genChainFromName( - chainName, - customRpcUrl - ); - this.addChain({ [chainName]: chain }); - } - this.setCurrentChain(chainName); - } - - private setAccount = (pk: `0x${string}`) => { - this.account = privateKeyToAccount(pk); - }; - - private setChains = (chains?: Record) => { - if (!chains) { - return; - } - for (const chain of Object.keys(chains)) { - this.chains[chain] = chains[chain]; - } - }; - - private setCurrentChain = (chain: SupportedChain) => { - this.currentChain = chain; - }; - - private createHttpTransport = (chainName: SupportedChain) => { - const chain = this.chains[chainName]; - - if (chain.rpcUrls.custom) { - return http(chain.rpcUrls.custom.http[0]); - } - return http(chain.rpcUrls.default.http[0]); - }; - - static genChainFromName( - chainName: string, - customRpcUrl?: string | null - ): Chain { - const baseChain = viemChains[chainName]; - - if (!baseChain?.id) { - throw new Error("Invalid chain name"); - } - - const viemChain: Chain = customRpcUrl - ? { - ...baseChain, - rpcUrls: { - ...baseChain.rpcUrls, - custom: { - http: [customRpcUrl], - }, - }, - } - : baseChain; - - return viemChain; - } -} - -const genChainsFromRuntime = ( - runtime: IAgentRuntime -): Record => { - const chainNames = ["bsc", "bscTestnet", "opBNB", "opBNBTestnet"]; - const chains = {}; - - for (const chainName of chainNames) { - const chain = WalletProvider.genChainFromName(chainName); - chains[chainName] = chain; - } - - const mainnet_rpcurl = runtime.getSetting("BSC_PROVIDER_URL"); - if (mainnet_rpcurl) { - const chain = WalletProvider.genChainFromName("bsc", mainnet_rpcurl); - chains["bsc"] = chain; - } - - const opbnb_rpcurl = runtime.getSetting("OPBNB_PROVIDER_URL"); - if (opbnb_rpcurl) { - const chain = WalletProvider.genChainFromName("opBNB", opbnb_rpcurl); - chains["opBNB"] = chain; - } - - return chains; -}; - -export const initWalletProvider = (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("BNB_PRIVATE_KEY"); - if (!privateKey) { - throw new Error("BNB_PRIVATE_KEY is missing"); - } - - const chains = genChainsFromRuntime(runtime); - - return new WalletProvider(privateKey as `0x${string}`, chains); -}; - -export const bnbWalletProvider: Provider = { - async get( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - try { - const walletProvider = initWalletProvider(runtime); - const address = walletProvider.getAddress(); - const balance = await walletProvider.getBalance(); - const chain = walletProvider.getCurrentChain(); - return `BNB chain Wallet Address: ${address}\nBalance: ${balance} ${chain.nativeCurrency.symbol}\nChain ID: ${chain.id}, Name: ${chain.name}`; - } catch (error) { - console.error("Error in BNB chain wallet provider:", error); - return null; - } - }, -}; diff --git a/packages/plugin-bnb/src/templates/index.ts b/packages/plugin-bnb/src/templates/index.ts deleted file mode 100644 index 6ca24c3702235..0000000000000 --- a/packages/plugin-bnb/src/templates/index.ts +++ /dev/null @@ -1,184 +0,0 @@ -export const getBalanceTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -Extract the following information about the requested check balance: -- Chain to execute on. Must be one of ["bsc", "bscTestnet", "opBNB", "opBNBTestnet"]. Default is "bsc". -- Address to check balance for. Optional, must be a valid Ethereum address starting with "0x" or a web3 domain name. If not provided, use the BNB chain Wallet Address. -- Token symbol or address. Could be a token symbol or address. If the address is provided, it must be a valid Ethereum address starting with "0x". Default is "BNB". -If any field is not provided, use the default value. If no default value is specified, use null. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined: - -\`\`\`json -{ - "chain": SUPPORTED_CHAINS, - "address": string | null, - "token": string -} -\`\`\` -`; - -export const transferTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -Extract the following information about the requested transfer: -- Chain to execute on. Must be one of ["bsc", "bscTestnet", "opBNB", "opBNBTestnet"]. Default is "bsc". -- Token symbol or address(string starting with "0x"). Optional. -- Amount to transfer. Optional. Must be a string representing the amount in ether (only number without coin symbol, e.g., "0.1"). -- Recipient address. Must be a valid Ethereum address starting with "0x" or a web3 domain name. -- Data. Optional, data to be included in the transaction. -If any field is not provided, use the default value. If no default value is specified, use null. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined: - -\`\`\`json -{ - "chain": SUPPORTED_CHAINS, - "token": string | null, - "amount": string | null, - "toAddress": string, - "data": string | null -} -\`\`\` -`; - -export const swapTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -Extract the following information about the requested token swap: -- Chain to execute on. Must be one of ["bsc", "bscTestnet", "opBNB", "opBNBTestnet"]. Default is "bsc". -- Input token symbol or address(string starting with "0x"). -- Output token symbol or address(string starting with "0x"). -- Amount to swap. Must be a string representing the amount in ether (only number without coin symbol, e.g., "0.1"). -- Slippage. Optional, expressed as decimal proportion, 0.03 represents 3%. -If any field is not provided, use the default value. If no default value is specified, use null. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined: - -\`\`\`json -{ - "chain": SUPPORTED_CHAINS, - "inputToken": string | null, - "outputToken": string | null, - "amount": string | null, - "slippage": number | null -} -\`\`\` -`; - -export const bridgeTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -Extract the following information about the requested token bridge: -- From chain. Must be one of ["bsc", "opBNB"]. -- To chain. Must be one of ["bsc", "opBNB"]. -- From token address. Optional, must be a valid Ethereum address starting with "0x". -- To token address. Optional, must be a valid Ethereum address starting with "0x". -- Amount to bridge. Must be a string representing the amount in ether (only number without coin symbol, e.g., "0.1"). -- To address. Optional, must be a valid Ethereum address starting with "0x" or a web3 domain name. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined: - -\`\`\`json -{ - "fromChain": "bsc" | "opBNB", - "toChain": "bsc" | "opBNB", - "fromToken": string | null, - "toToken": string | null, - "amount": string, - "toAddress": string | null -} -\`\`\` -`; - -export const stakeTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -Extract the following information about the requested stake action: -- Chain to execute on. Must be one of ["bsc", "bscTestnet", "opBNB", "opBNBTestnet"]. Default is "bsc". -- Action to execute. Must be one of ["deposit", "withdraw", "claim"]. -- Amount to execute. Optional, must be a string representing the amount in ether (only number without coin symbol, e.g., "0.1"). If the action is "deposit" or "withdraw", amount is required. -If any field is not provided, use the default value. If no default value is specified, use null. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined: - -\`\`\`json -{ - "chain": SUPPORTED_CHAINS, - "action": "deposit" | "withdraw" | "claim", - "amount": string | null, -} -\`\`\` -`; - -export const faucetTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -Extract the following information about the requested faucet request: -- Token. Token to request. Could be one of ["BNB", "BTC", "BUSD", "DAI", "ETH", "USDC"]. Optional. -- Recipient address. Optional, must be a valid Ethereum address starting with "0x" or a web3 domain name. If not provided, use the BNB chain Wallet Address. -If any field is not provided, use the default value. If no default value is specified, use null. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined: - -\`\`\`json -{ - "token": string | null, - "toAddress": string | null -} -\`\`\` -`; - -export const ercContractTemplate = `Given the recent messages and wallet information below: - -{{recentMessages}} - -{{walletInfo}} - -When user wants to deploy any type of token contract (ERC20/721/1155), this will trigger the DEPLOY_TOKEN action. - -Extract the following details for deploying a token contract: -- Chain to execute on. Must be one of ["bsc", "bscTestnet", "opBNB", "opBNBTestnet"]. Default is "bsc". -- contractType: The type of token contract to deploy - - For ERC20: Extract name, symbol, decimals, totalSupply - - For ERC721: Extract name, symbol, baseURI - - For ERC1155: Extract name, baseURI -- name: The name of the token. -- symbol: The token symbol (only for ERC20/721). -- decimals: Token decimals (only for ERC20). Default is 18. -- totalSupply: Total supply with decimals (only for ERC20). Default is "1000000000000000000". -- baseURI: Base URI for token metadata (only for ERC721/1155). -If any field is not provided, use the default value. If no default value is provided, use empty string. - -Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined: - -\`\`\`json -{ - "chain": SUPPORTED_CHAINS, - "contractType": "ERC20" | "ERC721" | "ERC1155", - "name": string, - "symbol": string | null, - "decimals": number | null, - "totalSupply": string | null, - "baseURI": string | null -} -\`\`\` -`; diff --git a/packages/plugin-bnb/src/tests/getBalance.test.ts b/packages/plugin-bnb/src/tests/getBalance.test.ts deleted file mode 100644 index 020d0108f4994..0000000000000 --- a/packages/plugin-bnb/src/tests/getBalance.test.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { describe, it, beforeEach, expect } from "vitest"; -import { - generatePrivateKey, - Account, - privateKeyToAccount, -} from "viem/accounts"; - -import { GetBalanceAction } from "../actions/getBalance"; -import { WalletProvider } from "../providers/wallet"; -import { GetBalanceParams } from "../types"; - -describe("GetBalance Action", () => { - let account: Account; - let wp: WalletProvider; - let ga: GetBalanceAction; - - beforeEach(async () => { - const pk = generatePrivateKey(); - account = privateKeyToAccount(pk); - wp = new WalletProvider(pk); - ga = new GetBalanceAction(wp); - }); - - describe("Get Balance", () => { - it("get BNB balance", async () => { - const input: GetBalanceParams = { - chain: "bsc", - address: account.address, - token: "BNB", - }; - const resp = await ga.getBalance(input); - expect(resp.balance).toBeDefined(); - expect(typeof resp.balance).toBe("object"); - }); - - it("get USDC balance", async () => { - const input: GetBalanceParams = { - chain: "bsc", - address: account.address, - token: "USDC", - }; - const resp = await ga.getBalance(input); - expect(resp.balance).toBeDefined(); - expect(typeof resp.balance).toBe("object"); - }); - - it("get balance by token contract address", async () => { - const input: GetBalanceParams = { - chain: "bsc", - address: account.address, - token: "0x55d398326f99059ff775485246999027b3197955", - }; - const resp = await ga.getBalance(input); - expect(resp.balance).toBeDefined(); - expect(typeof resp.balance).toBe("object"); - }); - }); -}); diff --git a/packages/plugin-bnb/src/tests/wallet.test.ts b/packages/plugin-bnb/src/tests/wallet.test.ts deleted file mode 100644 index 04570e24e6388..0000000000000 --- a/packages/plugin-bnb/src/tests/wallet.test.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { describe, it, expect, beforeAll } from "vitest"; -import { - Account, - generatePrivateKey, - privateKeyToAccount, -} from "viem/accounts"; -import { bsc, opBNB } from "viem/chains"; - -import { WalletProvider } from "../providers/wallet"; - -const customRpcUrls = { - bsc: "custom-rpc.bsc.io", - opBNB: "custom-rpc.opBNB.io", -}; - -describe("Wallet provider", () => { - let pk: `0x${string}`; - let account: Account; - let walletProvider: WalletProvider; - - beforeAll(() => { - pk = generatePrivateKey(); - account = privateKeyToAccount(pk); - walletProvider = new WalletProvider(pk); - }); - - describe("Constructor", () => { - it("get address", () => { - const expectedAddress = account.address; - - expect(walletProvider.getAddress()).toEqual(expectedAddress); - }); - it("get current chain", () => { - expect(walletProvider.getCurrentChain().id).toEqual(bsc.id); - }); - it("get chain configs", () => { - expect(walletProvider.getChainConfigs("bsc").id).toEqual(bsc.id); - expect(walletProvider.getChainConfigs("opBNB").id).toEqual( - opBNB.id - ); - }); - }); - describe("Clients", () => { - it("generates public client", () => { - const client = walletProvider.getPublicClient("bsc"); - expect(client.chain.id).toEqual(bsc.id); - expect(client.transport.url).toEqual(bsc.rpcUrls.default.http[0]); - }); - it("generates public client with custom rpcurl", () => { - const chain = WalletProvider.genChainFromName( - "bsc", - customRpcUrls.bsc - ); - const wp = new WalletProvider(pk, { ["bsc"]: chain }); - - const client = wp.getPublicClient("bsc"); - expect(client.chain.id).toEqual(bsc.id); - expect(client.chain.rpcUrls.default.http[0]).toEqual( - bsc.rpcUrls.default.http[0] - ); - expect(client.chain.rpcUrls.custom.http[0]).toEqual( - customRpcUrls.bsc - ); - expect(client.transport.url).toEqual(customRpcUrls.bsc); - }); - it("generates wallet client", () => { - const expectedAddress = account.address; - - const client = walletProvider.getWalletClient("bsc"); - - expect(client.account?.address).toEqual(expectedAddress); - expect(client.transport.url).toEqual(bsc.rpcUrls.default.http[0]); - }); - it("generates wallet client with custom rpcurl", () => { - const account = privateKeyToAccount(pk); - const expectedAddress = account.address; - const chain = WalletProvider.genChainFromName( - "bsc", - customRpcUrls.bsc - ); - const wp = new WalletProvider(pk, { ["bsc"]: chain }); - - const client = wp.getWalletClient("bsc"); - - expect(client.account?.address).toEqual(expectedAddress); - expect(client.chain?.id).toEqual(bsc.id); - expect(client.chain?.rpcUrls.default.http[0]).toEqual( - bsc.rpcUrls.default.http[0] - ); - expect(client.chain?.rpcUrls.custom.http[0]).toEqual( - customRpcUrls.bsc - ); - expect(client.transport.url).toEqual(customRpcUrls.bsc); - }); - }); -}); diff --git a/packages/plugin-bnb/src/types/index.ts b/packages/plugin-bnb/src/types/index.ts deleted file mode 100644 index 8dc0f00e0ff42..0000000000000 --- a/packages/plugin-bnb/src/types/index.ts +++ /dev/null @@ -1,2537 +0,0 @@ -import type { Address, Hash } from "viem"; - -export type SupportedChain = "bsc" | "bscTestnet" | "opBNB" | "opBNBTestnet"; -export type StakeAction = "deposit" | "withdraw" | "claim"; - -// Action parameters -export interface GetBalanceParams { - chain: SupportedChain; - address?: Address; - token: string; -} - -export interface TransferParams { - chain: SupportedChain; - token?: string; - amount?: string; - toAddress: Address; - data?: `0x${string}`; -} - -export interface SwapParams { - chain: SupportedChain; - fromToken: string; - toToken: string; - amount: string; - slippage?: number; -} - -export interface BridgeParams { - fromChain: SupportedChain; - toChain: SupportedChain; - fromToken?: Address; - toToken?: Address; - amount: string; - toAddress?: Address; -} - -export interface StakeParams { - chain: SupportedChain; - action: StakeAction; - amount?: string; -} - -export interface FaucetParams { - token?: string; - toAddress?: Address; -} - -// Action return types -export interface GetBalanceResponse { - chain: SupportedChain; - address: Address; - balance?: { token: string; amount: string }; -} - -export interface TransferResponse { - chain: SupportedChain; - txHash: Hash; - recipient: Address; - amount: string; - token: string; - data?: `0x${string}`; -} - -export interface SwapResponse { - chain: SupportedChain; - txHash: Hash; - fromToken: string; - toToken: string; - amount: string; -} - -export interface BridgeResponse { - fromChain: SupportedChain; - toChain: SupportedChain; - txHash: Hash; - recipient: Address; - fromToken: string; - toToken: string; - amount: string; -} - -export interface StakeResponse { - response: string; -} - -export interface FaucetResponse { - token: string; - recipient: Address; - txHash: Hash; -} - -export interface IDeployERC20Params { - chain: SupportedChain; - name: string; - symbol: string; - decimals: number; - totalSupply: string; -} - -export interface IDeployERC721Params { - chain: SupportedChain; - name: string; - symbol: string; - baseURI: string; -} - -export interface IDeployERC1155Params { - chain: SupportedChain; - name: string; - baseURI: string; -} - -// Contract ABIs -export const L1StandardBridgeAbi = [ - { - type: "constructor", - inputs: [], - stateMutability: "nonpayable", - }, - { - type: "receive", - stateMutability: "payable", - }, - { - type: "function", - name: "MESSENGER", - inputs: [], - outputs: [ - { - name: "", - type: "address", - internalType: "contract CrossDomainMessenger", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "OTHER_BRIDGE", - inputs: [], - outputs: [ - { - name: "", - type: "address", - internalType: "contract StandardBridge", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "bridgeERC20", - inputs: [ - { - name: "_localToken", - type: "address", - internalType: "address", - }, - { - name: "_remoteToken", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "nonpayable", - }, - { - type: "function", - name: "bridgeERC20To", - inputs: [ - { - name: "_localToken", - type: "address", - internalType: "address", - }, - { - name: "_remoteToken", - type: "address", - internalType: "address", - }, - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "nonpayable", - }, - { - type: "function", - name: "bridgeETH", - inputs: [ - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "payable", - }, - { - type: "function", - name: "bridgeETHTo", - inputs: [ - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "payable", - }, - { - type: "function", - name: "depositERC20", - inputs: [ - { - name: "_l1Token", - type: "address", - internalType: "address", - }, - { - name: "_l2Token", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "nonpayable", - }, - { - type: "function", - name: "depositERC20To", - inputs: [ - { - name: "_l1Token", - type: "address", - internalType: "address", - }, - { - name: "_l2Token", - type: "address", - internalType: "address", - }, - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "nonpayable", - }, - { - type: "function", - name: "depositETH", - inputs: [ - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "payable", - }, - { - type: "function", - name: "depositETHTo", - inputs: [ - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_minGasLimit", - type: "uint32", - internalType: "uint32", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "payable", - }, - { - type: "function", - name: "deposits", - inputs: [ - { - name: "", - type: "address", - internalType: "address", - }, - { - name: "", - type: "address", - internalType: "address", - }, - ], - outputs: [ - { - name: "", - type: "uint256", - internalType: "uint256", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "finalizeBridgeERC20", - inputs: [ - { - name: "_localToken", - type: "address", - internalType: "address", - }, - { - name: "_remoteToken", - type: "address", - internalType: "address", - }, - { - name: "_from", - type: "address", - internalType: "address", - }, - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "nonpayable", - }, - { - type: "function", - name: "finalizeBridgeETH", - inputs: [ - { - name: "_from", - type: "address", - internalType: "address", - }, - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "payable", - }, - { - type: "function", - name: "finalizeERC20Withdrawal", - inputs: [ - { - name: "_l1Token", - type: "address", - internalType: "address", - }, - { - name: "_l2Token", - type: "address", - internalType: "address", - }, - { - name: "_from", - type: "address", - internalType: "address", - }, - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "nonpayable", - }, - { - type: "function", - name: "finalizeETHWithdrawal", - inputs: [ - { - name: "_from", - type: "address", - internalType: "address", - }, - { - name: "_to", - type: "address", - internalType: "address", - }, - { - name: "_amount", - type: "uint256", - internalType: "uint256", - }, - { - name: "_extraData", - type: "bytes", - internalType: "bytes", - }, - ], - outputs: [], - stateMutability: "payable", - }, - { - type: "function", - name: "initialize", - inputs: [ - { - name: "_messenger", - type: "address", - internalType: "contract CrossDomainMessenger", - }, - { - name: "_superchainConfig", - type: "address", - internalType: "contract SuperchainConfig", - }, - { - name: "_systemConfig", - type: "address", - internalType: "contract SystemConfig", - }, - ], - outputs: [], - stateMutability: "nonpayable", - }, - { - type: "function", - name: "l2TokenBridge", - inputs: [], - outputs: [ - { - name: "", - type: "address", - internalType: "address", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "messenger", - inputs: [], - outputs: [ - { - name: "", - type: "address", - internalType: "contract CrossDomainMessenger", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "otherBridge", - inputs: [], - outputs: [ - { - name: "", - type: "address", - internalType: "contract StandardBridge", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "paused", - inputs: [], - outputs: [ - { - name: "", - type: "bool", - internalType: "bool", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "superchainConfig", - inputs: [], - outputs: [ - { - name: "", - type: "address", - internalType: "contract SuperchainConfig", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "systemConfig", - inputs: [], - outputs: [ - { - name: "", - type: "address", - internalType: "contract SystemConfig", - }, - ], - stateMutability: "view", - }, - { - type: "function", - name: "version", - inputs: [], - outputs: [ - { - name: "", - type: "string", - internalType: "string", - }, - ], - stateMutability: "view", - }, - { - type: "event", - name: "ERC20BridgeFinalized", - inputs: [ - { - name: "localToken", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "remoteToken", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: false, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "ERC20BridgeInitiated", - inputs: [ - { - name: "localToken", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "remoteToken", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: false, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "ERC20DepositInitiated", - inputs: [ - { - name: "l1Token", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "l2Token", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: false, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "ERC20WithdrawalFinalized", - inputs: [ - { - name: "l1Token", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "l2Token", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: false, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "ETHBridgeFinalized", - inputs: [ - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "ETHBridgeInitiated", - inputs: [ - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "ETHDepositInitiated", - inputs: [ - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "ETHWithdrawalFinalized", - inputs: [ - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - }, - { - type: "event", - name: "Initialized", - inputs: [ - { - name: "version", - type: "uint8", - indexed: false, - internalType: "uint8", - }, - ], - anonymous: false, - }, -] as const; - -export const L2StandardBridgeAbi = [ - { - type: "constructor", - inputs: [ - { - name: "_owner", - type: "address", - internalType: "address payable", - }, - { - name: "_delegationFee", - type: "uint256", - internalType: "uint256", - }, - ], - stateMutability: "nonpayable", - }, - { - name: "AddressEmptyCode", - type: "error", - inputs: [{ name: "target", type: "address", internalType: "address" }], - }, - { - name: "AddressInsufficientBalance", - type: "error", - inputs: [{ name: "account", type: "address", internalType: "address" }], - }, - { name: "FailedInnerCall", type: "error", inputs: [] }, - { - name: "OwnableInvalidOwner", - type: "error", - inputs: [{ name: "owner", type: "address", internalType: "address" }], - }, - { - name: "OwnableUnauthorizedAccount", - type: "error", - inputs: [{ name: "account", type: "address", internalType: "address" }], - }, - { - name: "SafeERC20FailedOperation", - type: "error", - inputs: [{ name: "token", type: "address", internalType: "address" }], - }, - { - name: "OwnershipTransferred", - type: "event", - inputs: [ - { - name: "previousOwner", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "newOwner", - type: "address", - indexed: true, - internalType: "address", - }, - ], - anonymous: false, - signature: - "0x8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0", - }, - { - name: "SetDelegationFee", - type: "event", - inputs: [ - { - name: "_delegationFee", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - ], - anonymous: false, - signature: - "0x0322f3257c2afe5fe8da7ab561f0d3384148487412fe2751678f2188731c0815", - }, - { - name: "WithdrawTo", - type: "event", - inputs: [ - { - name: "from", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "l2Token", - type: "address", - indexed: true, - internalType: "address", - }, - { - name: "to", - type: "address", - indexed: false, - internalType: "address", - }, - { - name: "amount", - type: "uint256", - indexed: false, - internalType: "uint256", - }, - { - name: "minGasLimit", - type: "uint32", - indexed: false, - internalType: "uint32", - }, - { - name: "extraData", - type: "bytes", - indexed: false, - internalType: "bytes", - }, - ], - anonymous: false, - signature: - "0x56f66275d9ebc94b7d6895aa0d96a3783550d0183ba106408d387d19f2e877f1", - }, - { - name: "L2_STANDARD_BRIDGE", - type: "function", - inputs: [], - outputs: [ - { - name: "", - type: "address", - value: "0x4200000000000000000000000000000000000010", - internalType: "contract IL2StandardBridge", - }, - ], - constant: true, - signature: "0x21d12763", - stateMutability: "view", - }, - { - name: "L2_STANDARD_BRIDGE_ADDRESS", - type: "function", - inputs: [], - outputs: [ - { - name: "", - type: "address", - value: "0x4200000000000000000000000000000000000010", - internalType: "address", - }, - ], - constant: true, - signature: "0x2cb7cb06", - stateMutability: "view", - }, - { - name: "delegationFee", - type: "function", - inputs: [], - outputs: [ - { - name: "", - type: "uint256", - value: "2000000000000000", - internalType: "uint256", - }, - ], - constant: true, - signature: "0xc5f0a58f", - stateMutability: "view", - }, - { - name: "owner", - type: "function", - inputs: [], - outputs: [ - { - name: "", - type: "address", - value: "0xCe4750fDc02A07Eb0d99cA798CD5c170D8F8410A", - internalType: "address", - }, - ], - constant: true, - signature: "0x8da5cb5b", - stateMutability: "view", - }, - { - name: "renounceOwnership", - type: "function", - inputs: [], - outputs: [], - signature: "0x715018a6", - stateMutability: "nonpayable", - }, - { - name: "setDelegationFee", - type: "function", - inputs: [ - { - name: "_delegationFee", - type: "uint256", - internalType: "uint256", - }, - ], - outputs: [], - signature: "0x55bfc81c", - stateMutability: "nonpayable", - }, - { - name: "transferOwnership", - type: "function", - inputs: [ - { name: "newOwner", type: "address", internalType: "address" }, - ], - outputs: [], - signature: "0xf2fde38b", - stateMutability: "nonpayable", - }, - { - name: "withdraw", - type: "function", - inputs: [ - { name: "_l2Token", type: "address", internalType: "address" }, - { name: "_amount", type: "uint256", internalType: "uint256" }, - { name: "_minGasLimit", type: "uint32", internalType: "uint32" }, - { name: "_extraData", type: "bytes", internalType: "bytes" }, - ], - outputs: [], - payable: true, - signature: "0x32b7006d", - stateMutability: "payable", - }, - { - name: "withdrawFee", - type: "function", - inputs: [ - { name: "_recipient", type: "address", internalType: "address" }, - ], - outputs: [], - signature: "0x1ac3ddeb", - stateMutability: "nonpayable", - }, - { - name: "withdrawFeeToL1", - type: "function", - inputs: [ - { name: "_recipient", type: "address", internalType: "address" }, - { name: "_minGasLimit", type: "uint32", internalType: "uint32" }, - { name: "_extraData", type: "bytes", internalType: "bytes" }, - ], - outputs: [], - signature: "0x244cafe0", - stateMutability: "nonpayable", - }, - { - name: "withdrawTo", - type: "function", - inputs: [ - { name: "_l2Token", type: "address", internalType: "address" }, - { name: "_to", type: "address", internalType: "address" }, - { name: "_amount", type: "uint256", internalType: "uint256" }, - { name: "_minGasLimit", type: "uint32", internalType: "uint32" }, - { name: "_extraData", type: "bytes", internalType: "bytes" }, - ], - outputs: [], - payable: true, - signature: "0xa3a79548", - stateMutability: "payable", - }, -] as const; - -export const ListaDaoAbi = [ - { inputs: [], stateMutability: "nonpayable", type: "constructor" }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_account", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "ClaimAllWithdrawals", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_uuid", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "ClaimUndelegated", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_validator", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_uuid", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "ClaimUndelegatedFrom", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_account", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_idx", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "ClaimWithdrawal", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "Delegate", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "_validator", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - { - indexed: false, - internalType: "bool", - name: "_delegateVotePower", - type: "bool", - }, - ], - name: "DelegateTo", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "_delegateTo", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_votesChange", - type: "uint256", - }, - ], - name: "DelegateVoteTo", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "_src", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "Deposit", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "DisableValidator", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint8", - name: "version", - type: "uint8", - }, - ], - name: "Initialized", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "Paused", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "ProposeManager", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "_src", - type: "address", - }, - { - indexed: false, - internalType: "address", - name: "_dest", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "ReDelegate", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_rewardsId", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "Redelegate", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "RemoveValidator", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_account", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_amountInSlisBnb", - type: "uint256", - }, - ], - name: "RequestWithdraw", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "RewardsCompounded", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - indexed: true, - internalType: "bytes32", - name: "previousAdminRole", - type: "bytes32", - }, - { - indexed: true, - internalType: "bytes32", - name: "newAdminRole", - type: "bytes32", - }, - ], - name: "RoleAdminChanged", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - indexed: true, - internalType: "address", - name: "account", - type: "address", - }, - { - indexed: true, - internalType: "address", - name: "sender", - type: "address", - }, - ], - name: "RoleGranted", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - indexed: true, - internalType: "address", - name: "account", - type: "address", - }, - { - indexed: true, - internalType: "address", - name: "sender", - type: "address", - }, - ], - name: "RoleRevoked", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_annualRate", - type: "uint256", - }, - ], - name: "SetAnnualRate", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "SetBSCValidator", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "SetManager", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_minBnb", - type: "uint256", - }, - ], - name: "SetMinBnb", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "SetRedirectAddress", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "SetReserveAmount", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "SetRevenuePool", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_synFee", - type: "uint256", - }, - ], - name: "SetSynFee", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_validator", - type: "address", - }, - { - indexed: false, - internalType: "address", - name: "_credit", - type: "address", - }, - { - indexed: false, - internalType: "bool", - name: "toRemove", - type: "bool", - }, - ], - name: "SyncCreditContract", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_nextUndelegatedRequestIndex", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "_bnbAmount", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "_shares", - type: "uint256", - }, - ], - name: "Undelegate", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_operator", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "_bnbAmount", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "_shares", - type: "uint256", - }, - ], - name: "UndelegateFrom", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "_amount", - type: "uint256", - }, - ], - name: "UndelegateReserve", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "Unpaused", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "address", - name: "_address", - type: "address", - }, - ], - name: "WhitelistValidator", - type: "event", - }, - { - inputs: [], - name: "BOT", - outputs: [{ internalType: "bytes32", name: "", type: "bytes32" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "DEFAULT_ADMIN_ROLE", - outputs: [{ internalType: "bytes32", name: "", type: "bytes32" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "GUARDIAN", - outputs: [{ internalType: "bytes32", name: "", type: "bytes32" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "TEN_DECIMALS", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "acceptNewManager", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "amountToDelegate", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "annualRate", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "uint256", name: "_bnbAmount", type: "uint256" }, - ], - name: "binarySearchCoveredMaxIndex", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_validator", type: "address" }, - ], - name: "claimUndelegated", - outputs: [ - { internalType: "uint256", name: "_uuid", type: "uint256" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "_idx", type: "uint256" }], - name: "claimWithdraw", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_user", type: "address" }, - { internalType: "uint256", name: "_idx", type: "uint256" }, - ], - name: "claimWithdrawFor", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "compoundRewards", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_operator", type: "address" }, - { internalType: "uint256", name: "_bnbAmount", type: "uint256" }, - ], - name: "convertBnbToShares", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "_amount", type: "uint256" }], - name: "convertBnbToSnBnb", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_operator", type: "address" }, - { internalType: "uint256", name: "_shares", type: "uint256" }, - ], - name: "convertSharesToBnb", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "uint256", - name: "_amountInSlisBnb", - type: "uint256", - }, - ], - name: "convertSnBnbToBnb", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "", type: "uint256" }], - name: "creditContracts", - outputs: [{ internalType: "address", name: "", type: "address" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "address", name: "", type: "address" }], - name: "creditStates", - outputs: [{ internalType: "bool", name: "", type: "bool" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_validator", type: "address" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - ], - name: "delegateTo", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "delegateVotePower", - outputs: [{ internalType: "bool", name: "", type: "bool" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_delegateTo", type: "address" }, - ], - name: "delegateVoteTo", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "deposit", - outputs: [], - stateMutability: "payable", - type: "function", - }, - { - inputs: [], - name: "depositReserve", - outputs: [], - stateMutability: "payable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "disableValidator", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "getAmountToUndelegate", - outputs: [ - { - internalType: "uint256", - name: "_amountToUndelegate", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "_uuid", type: "uint256" }], - name: "getBotUndelegateRequest", - outputs: [ - { - components: [ - { - internalType: "uint256", - name: "startTime", - type: "uint256", - }, - { - internalType: "uint256", - name: "endTime", - type: "uint256", - }, - { - internalType: "uint256", - name: "amount", - type: "uint256", - }, - { - internalType: "uint256", - name: "amountInSnBnb", - type: "uint256", - }, - ], - internalType: "struct IStakeManager.BotUndelegateRequest", - name: "", - type: "tuple", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_validator", type: "address" }, - ], - name: "getClaimableAmount", - outputs: [ - { internalType: "uint256", name: "_amount", type: "uint256" }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "getContracts", - outputs: [ - { internalType: "address", name: "_manager", type: "address" }, - { internalType: "address", name: "_slisBnb", type: "address" }, - { internalType: "address", name: "_bscValidator", type: "address" }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_validator", type: "address" }, - ], - name: "getDelegated", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "_amount", type: "uint256" }], - name: "getRedelegateFee", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "bytes32", name: "role", type: "bytes32" }], - name: "getRoleAdmin", - outputs: [{ internalType: "bytes32", name: "", type: "bytes32" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "getSlisBnbWithdrawLimit", - outputs: [ - { - internalType: "uint256", - name: "_slisBnbWithdrawLimit", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "getTotalBnbInValidators", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "getTotalPooledBnb", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_user", type: "address" }, - { internalType: "uint256", name: "_idx", type: "uint256" }, - ], - name: "getUserRequestStatus", - outputs: [ - { internalType: "bool", name: "_isClaimable", type: "bool" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "getUserWithdrawalRequests", - outputs: [ - { - components: [ - { internalType: "uint256", name: "uuid", type: "uint256" }, - { - internalType: "uint256", - name: "amountInSnBnb", - type: "uint256", - }, - { - internalType: "uint256", - name: "startTime", - type: "uint256", - }, - ], - internalType: "struct IStakeManager.WithdrawalRequest[]", - name: "", - type: "tuple[]", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "bytes32", name: "role", type: "bytes32" }, - { internalType: "address", name: "account", type: "address" }, - ], - name: "grantRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "bytes32", name: "role", type: "bytes32" }, - { internalType: "address", name: "account", type: "address" }, - ], - name: "hasRole", - outputs: [{ internalType: "bool", name: "", type: "bool" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_slisBnb", type: "address" }, - { internalType: "address", name: "_admin", type: "address" }, - { internalType: "address", name: "_manager", type: "address" }, - { internalType: "address", name: "_bot", type: "address" }, - { internalType: "uint256", name: "_synFee", type: "uint256" }, - { internalType: "address", name: "_revenuePool", type: "address" }, - { internalType: "address", name: "_validator", type: "address" }, - ], - name: "initialize", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "minBnb", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "nextConfirmedRequestUUID", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "pause", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "paused", - outputs: [{ internalType: "bool", name: "", type: "bool" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "placeholder", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "proposeNewManager", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "srcValidator", type: "address" }, - { internalType: "address", name: "dstValidator", type: "address" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - ], - name: "redelegate", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "redirectAddress", - outputs: [{ internalType: "address", name: "", type: "address" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "removeValidator", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "bytes32", name: "role", type: "bytes32" }, - { internalType: "address", name: "account", type: "address" }, - ], - name: "renounceRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "", type: "uint256" }], - name: "requestIndexMap", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "requestUUID", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "uint256", - name: "_amountInSlisBnb", - type: "uint256", - }, - ], - name: "requestWithdraw", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "reserveAmount", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "revenuePool", - outputs: [{ internalType: "address", name: "", type: "address" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "revokeBotRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "bytes32", name: "role", type: "bytes32" }, - { internalType: "address", name: "account", type: "address" }, - ], - name: "revokeRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "uint256", name: "_annualRate", type: "uint256" }, - ], - name: "setAnnualRate", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "setBSCValidator", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "setBotRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "_amount", type: "uint256" }], - name: "setMinBnb", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "setRedirectAddress", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "amount", type: "uint256" }], - name: "setReserveAmount", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "setRevenuePool", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "_synFee", type: "uint256" }], - name: "setSynFee", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "bytes4", name: "interfaceId", type: "bytes4" }, - ], - name: "supportsInterface", - outputs: [{ internalType: "bool", name: "", type: "bool" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "synFee", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "togglePause", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "toggleVote", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "totalDelegated", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "totalReserveAmount", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "unbondingBnb", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "undelegate", - outputs: [ - { internalType: "uint256", name: "_uuid", type: "uint256" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_operator", type: "address" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - ], - name: "undelegateFrom", - outputs: [ - { - internalType: "uint256", - name: "_actualBnbAmount", - type: "uint256", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "undelegatedQuota", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "address", name: "", type: "address" }], - name: "validators", - outputs: [{ internalType: "bool", name: "", type: "bool" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_address", type: "address" }, - ], - name: "whitelistValidator", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "amount", type: "uint256" }], - name: "withdrawReserve", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { stateMutability: "payable", type: "receive" }, -] as const; diff --git a/packages/plugin-bnb/src/utils/contracts.ts b/packages/plugin-bnb/src/utils/contracts.ts deleted file mode 100644 index 9ee41074b1164..0000000000000 --- a/packages/plugin-bnb/src/utils/contracts.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import fs from "node:fs"; -import { createRequire } from "node:module"; -import path from "node:path"; -import { fileURLToPath } from "node:url"; -import solc from "solc"; - -const require = createRequire(import.meta.url); -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const baseDir = path.resolve(__dirname, "../../plugin-bnb/src/contracts"); - -function getContractSource(contractPath: string) { - return fs.readFileSync(contractPath, "utf8"); -} - -function findImports(importPath: string) { - try { - if (importPath.startsWith("@openzeppelin/")) { - const modPath = require.resolve(importPath); - return { contents: fs.readFileSync(modPath, "utf8") }; - } - - const localPath = path.resolve("./contracts", importPath); - if (fs.existsSync(localPath)) { - return { contents: fs.readFileSync(localPath, "utf8") }; - } - return { error: "File not found" }; - } catch { - return { error: `File not found: ${importPath}` }; - } -} - -export async function compileSolidity(contractFileName: string) { - const contractPath = path.join(baseDir, `${contractFileName}.sol`); - const source = getContractSource(contractPath); - - const input = { - language: "Solidity", - sources: { - [contractFileName]: { - content: source, - }, - }, - settings: { - optimizer: { - enabled: true, - runs: 200, - }, - outputSelection: { - "*": { - "*": ["*"], - }, - }, - }, - }; - - elizaLogger.debug("Compiling contract..."); - - try { - const output = JSON.parse( - solc.compile(JSON.stringify(input), { import: findImports }) - ); - - if (output.errors) { - const hasError = output.errors.some( - (error) => error.type === "Error" - ); - if (hasError) { - throw new Error( - `Compilation errors: ${JSON.stringify(output.errors, null, 2)}` - ); - } - elizaLogger.warn("Compilation warnings:", output.errors); - } - - const contractName = path.basename(contractFileName, ".sol"); - const contract = output.contracts[contractFileName][contractName]; - - if (!contract) { - throw new Error("Contract compilation result is empty"); - } - - elizaLogger.debug("Contract compiled successfully"); - return { - abi: contract.abi, - bytecode: contract.evm.bytecode.object, - }; - } catch (error) { - elizaLogger.error("Compilation failed:", error.message); - throw error; - } -} diff --git a/packages/plugin-bnb/tsconfig.json b/packages/plugin-bnb/tsconfig.json deleted file mode 100644 index 8d95aebdba7c7..0000000000000 --- a/packages/plugin-bnb/tsconfig.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src", - "typeRoots": [ - "./node_modules/@types", - "./src/types" - ], - "declaration": true, - "strictNullChecks": true - }, - "include": [ - "src" - ] -} \ No newline at end of file diff --git a/packages/plugin-bnb/tsup.config.ts b/packages/plugin-bnb/tsup.config.ts deleted file mode 100644 index a68ccd636adf1..0000000000000 --- a/packages/plugin-bnb/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "viem", - "@lifi/sdk", - ], -}); diff --git a/packages/plugin-bootstrap/package.json b/packages/plugin-bootstrap/package.json index a6ba30f04ffe6..9febda46f1f66 100644 --- a/packages/plugin-bootstrap/package.json +++ b/packages/plugin-bootstrap/package.json @@ -1,6 +1,6 @@ { "name": "@elizaos/plugin-bootstrap", - "version": "0.25.6-alpha.1", + "version": "0.25.8", "type": "module", "main": "dist/index.js", "module": "dist/index.js", @@ -22,6 +22,9 @@ "@elizaos/core": "workspace:*", "tsup": "8.3.5" }, + "devDependencies": { + "@types/node": "^22.10.5" + }, "scripts": { "build": "tsup --format esm --dts", "dev": "tsup --format esm --dts --watch", @@ -29,5 +32,8 @@ }, "peerDependencies": { "whatwg-url": "7.1.0" + }, + "publishConfig": { + "access": "public" } } diff --git a/packages/plugin-bootstrap/src/providers/facts.ts b/packages/plugin-bootstrap/src/providers/facts.ts index 874ca6d21c6d7..a46a7f5f0f5dd 100644 --- a/packages/plugin-bootstrap/src/providers/facts.ts +++ b/packages/plugin-bootstrap/src/providers/facts.ts @@ -16,22 +16,20 @@ const factsProvider: Provider = { actors: state?.actorsData, }); - const _embedding = await embed(runtime, recentMessages); + const embedding = await embed(runtime, recentMessages); const memoryManager = new MemoryManager({ runtime, tableName: "facts", }); - const relevantFacts = []; - // await memoryManager.searchMemoriesByEmbedding( - // embedding, - // { - // roomId: message.roomId, - // count: 10, - // agentId: runtime.agentId, - // } - // ); + const relevantFacts = await memoryManager.searchMemoriesByEmbedding( + embedding, + { + roomId: message.roomId, + count: 10, + } + ); const recentFactsData = await memoryManager.getMemories({ roomId: message.roomId, diff --git a/packages/plugin-chainbase/README.md b/packages/plugin-chainbase/README.md deleted file mode 100644 index 1439da3fe4b3b..0000000000000 --- a/packages/plugin-chainbase/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# Chainbase Plugin for Eliza - -The Chainbase Plugin for Eliza bridges the gap between on-chain data and AI agents, enabling natural language interactions with blockchain data across multiple networks. This plugin leverages Chainbase's comprehensive blockchain data infrastructure to provide real-time insights and analytics. - -## Description - -This plugin serves as a powerful interface between Eliza AI agents and blockchain data, allowing users to query and analyze on-chain information using natural language. It transforms complex blockchain queries into actionable insights without requiring deep technical knowledge. - -## Key Features - -- **Multi-chain Data Access**: Access comprehensive data across multiple blockchain networks -- **Natural Language Processing**: Convert natural language queries into blockchain data analytics and insights -- **Real-time Data**: Get up-to-date blockchain information and analytics - -## Supported Networks - -- Ethereum -- Polygon -- BNB Smart Chain (BSC) -- Avalanche -- Arbitrum One -- Optimism -- Base -- zkSync -- Merlin - -## Usage Examples - -### On-chain Data Queries - -```plaintext -Query: "query onchain data: This address 0x8308964da9ed5d2e8012023d7c7ef02f9e6438c7 which tokens on Ethereum are held" -``` - -This query will return the token holdings for the specified Ethereum address. - -```plaintext -Query: "query onchain data: List the top 10 Ethereum blocks by total gas used in the last 24 hours" -``` - -This query will analyze and return gas usage statistics for recent Ethereum blocks. - -```plaintext -Query: "query onchain data: The address 0x8308964da9ed5d2e8012023d7c7ef02f9e6438c7 last 10 Ethereum token transfer" -``` - -This query will fetch the most recent 10 token transfer events for the specified Ethereum address, including both incoming and outgoing transfers. - -## Components - -- **Actions**: Pre-configured blockchain data retrieval and analysis actions -- **Providers**: Data providers for different blockchain networks -- **Evaluators**: Analysis tools for blockchain data interpretation -- **Services**: Specialized services for data processing and transformation - -## Getting Started - -To use this plugin, you'll need a Chainbase API key: - -1. Visit [Chainbase Platform](https://console.chainbase.com) to create an account -2. Once logged in, you can obtain a free API key from your dashboard -3. Set your API key as the `CHAINBASE_API_KEY` environment variable - -For development and testing purposes, you can use the API key "demo" to test the basic functionality. - -For more detailed information about the available APIs and endpoints, please refer to the [Chainbase API Documentation](https://docs.chainbase.com/api-reference/overview). diff --git a/packages/plugin-chainbase/__tests__/actions/queryData.test.ts b/packages/plugin-chainbase/__tests__/actions/queryData.test.ts deleted file mode 100644 index 6c34b97d97b10..0000000000000 --- a/packages/plugin-chainbase/__tests__/actions/queryData.test.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import { queryBlockChainData } from '../../src/actions/queryData'; -import { generateSQL, executeQuery } from '../../src/libs/chainbase'; -import { ModelClass, generateText } from '@elizaos/core'; - -// Mock external dependencies -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn() - }, - generateText: vi.fn(), - ModelClass: { - SMALL: 'small', - LARGE: 'large' - } -})); - -vi.mock('../../src/libs/chainbase', () => ({ - generateSQL: vi.fn(), - executeQuery: vi.fn() -})); - -describe('queryBlockChainData', () => { - let mockRuntime; - let mockMessage; - let mockCallback; - - beforeEach(() => { - mockRuntime = { - character: { - settings: { - secrets: { - CHAINBASE_API_KEY: 'test-api-key' - } - } - } - }; - - mockMessage = { - content: { - text: 'query onchain data: Get the latest block number' - } - }; - - mockCallback = vi.fn(); - - // Reset all mocks - vi.clearAllMocks(); - }); - - describe('validation', () => { - it('should validate successfully when API key is present', async () => { - const result = await queryBlockChainData.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should fail validation when API key is missing', async () => { - const runtimeWithoutKey = { - character: { - settings: { - secrets: {} - } - } - }; - const result = await queryBlockChainData.validate(runtimeWithoutKey, mockMessage); - expect(result).toBe(false); - }); - }); - - describe('handler', () => { - it('should handle valid query and return formatted response', async () => { - const mockSQL = 'SELECT block_number FROM ethereum.blocks ORDER BY block_number DESC LIMIT 1'; - const mockQueryResult = { - columns: ['block_number'], - data: [[12345678]], - totalRows: 1 - }; - const mockFormattedResponse = 'The latest block number is 12345678'; - - vi.mocked(generateSQL).mockResolvedValue(mockSQL); - vi.mocked(executeQuery).mockResolvedValue(mockQueryResult); - vi.mocked(generateText).mockResolvedValue(mockFormattedResponse); - - await queryBlockChainData.handler(mockRuntime, mockMessage, undefined, undefined, mockCallback); - - expect(generateSQL).toHaveBeenCalledWith('Get the latest block number'); - expect(executeQuery).toHaveBeenCalledWith(mockSQL); - expect(generateText).toHaveBeenCalled(); - expect(mockCallback).toHaveBeenCalledWith({ - text: mockFormattedResponse - }); - }); - - it('should handle missing query prefix', async () => { - const messageWithoutPrefix = { - content: { - text: 'Get the latest block number' - } - }; - - await queryBlockChainData.handler(mockRuntime, messageWithoutPrefix, undefined, undefined, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Please use the format: query onchain data:') - }); - expect(generateSQL).not.toHaveBeenCalled(); - }); - - it('should handle empty query', async () => { - const messageWithEmptyQuery = { - content: { - text: 'query onchain data: ' - } - }; - - await queryBlockChainData.handler(mockRuntime, messageWithEmptyQuery, undefined, undefined, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Please provide a specific query') - }); - expect(generateSQL).not.toHaveBeenCalled(); - }); - - it('should handle API errors gracefully', async () => { - vi.mocked(generateSQL).mockRejectedValue(new Error('API Error')); - - await queryBlockChainData.handler(mockRuntime, mockMessage, undefined, undefined, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('An error occurred') - }); - }); - }); - - describe('action properties', () => { - it('should have correct action properties', () => { - expect(queryBlockChainData.name).toBe('QUERY_BLOCKCHAIN_DATA'); - expect(queryBlockChainData.description).toBeDefined(); - expect(queryBlockChainData.similes).toBeDefined(); - expect(Array.isArray(queryBlockChainData.similes)).toBe(true); - expect(queryBlockChainData.examples).toBeDefined(); - expect(Array.isArray(queryBlockChainData.examples)).toBe(true); - }); - }); -}); diff --git a/packages/plugin-chainbase/__tests__/actions/retrieveTokenBalance.test.ts b/packages/plugin-chainbase/__tests__/actions/retrieveTokenBalance.test.ts deleted file mode 100644 index c460867ebe55e..0000000000000 --- a/packages/plugin-chainbase/__tests__/actions/retrieveTokenBalance.test.ts +++ /dev/null @@ -1,213 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import { retrieveTokenBalance } from '../../src/actions/retrieveTokenBalance'; -import { getTokenBalances } from '../../src/libs/chainbase'; -import { ModelClass, composeContext, generateObject, generateText } from '@elizaos/core'; - -// Mock external dependencies -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn() - }, - composeContext: vi.fn(), - generateObject: vi.fn(), - generateText: vi.fn(), - ModelClass: { - SMALL: 'small', - LARGE: 'large' - } -})); - -vi.mock('../../src/libs/chainbase', () => ({ - getTokenBalances: vi.fn() -})); - -describe('retrieveTokenBalance', () => { - let mockRuntime; - let mockMessage; - let mockState; - let mockCallback; - - beforeEach(() => { - mockRuntime = { - character: { - settings: { - secrets: { - CHAINBASE_API_KEY: 'test-api-key' - } - } - }, - composeState: vi.fn().mockResolvedValue({ - agentId: 'test-agent', - roomId: 'test-room' - }), - updateRecentMessageState: vi.fn().mockImplementation(state => Promise.resolve(state)) - }; - - mockMessage = { - content: { - text: 'Get token balances for address 0x123' - } - }; - - mockState = { - agentId: 'test-agent', - roomId: 'test-room' - }; - - mockCallback = vi.fn(); - - // Reset all mocks - vi.clearAllMocks(); - }); - - describe('validation', () => { - it('should validate successfully when API key is present', async () => { - const result = await retrieveTokenBalance.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should fail validation when API key is missing', async () => { - const runtimeWithoutKey = { - character: { - settings: { - secrets: {} - } - } - }; - const result = await retrieveTokenBalance.validate(runtimeWithoutKey, mockMessage); - expect(result).toBe(false); - }); - }); - - describe('handler', () => { - it('should handle valid token balance request', async () => { - const mockQueryParams = { - object: { - chain_id: '1', - address: '0x1234567890123456789012345678901234567890', - contract_address: '0x4567890123456789012345678901234567890123' - } - }; - - const mockTokens = [{ - name: 'Test Token', - symbol: 'TEST', - balance: '0x0de0b6b3a7640000', // 1 ETH in hex - decimals: 18, - contract_address: '0x456' - }]; - - const mockFormattedResponse = 'Test Token balance: 1.0 TEST'; - - vi.mocked(composeContext).mockReturnValue('mock-context'); - vi.mocked(generateObject).mockResolvedValue(mockQueryParams); - vi.mocked(getTokenBalances).mockResolvedValue(mockTokens); - vi.mocked(generateText).mockResolvedValue(mockFormattedResponse); - - await retrieveTokenBalance.handler(mockRuntime, mockMessage, mockState, undefined, mockCallback); - - expect(composeContext).toHaveBeenCalled(); - expect(generateObject).toHaveBeenCalled(); - expect(getTokenBalances).toHaveBeenCalledWith({ - chain_id: 1, - address: '0x1234567890123456789012345678901234567890', - contract_address: '0x4567890123456789012345678901234567890123' - }); - expect(mockCallback).toHaveBeenCalledWith({ - text: mockFormattedResponse - }); - }); - - it('should handle invalid query parameters', async () => { - const mockInvalidQueryParams = { - object: { - // Missing required fields - } - }; - - vi.mocked(composeContext).mockReturnValue('mock-context'); - vi.mocked(generateObject).mockResolvedValue(mockInvalidQueryParams); - - await retrieveTokenBalance.handler(mockRuntime, mockMessage, mockState, undefined, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith( - { - text: 'Invalid query params. Please check the inputs.' - }, - [] - ); - expect(getTokenBalances).not.toHaveBeenCalled(); - }); - - it('should handle API errors gracefully', async () => { - const mockQueryParams = { - object: { - chain_id: '1', - address: '0x1234567890123456789012345678901234567890', - contract_address: '0x4567890123456789012345678901234567890123' - } - }; - - vi.mocked(composeContext).mockReturnValue('mock-context'); - vi.mocked(generateObject).mockResolvedValue(mockQueryParams); - vi.mocked(getTokenBalances).mockRejectedValue(new Error('API Error')); - - await retrieveTokenBalance.handler(mockRuntime, mockMessage, mockState, undefined, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith({ - text: '❌ An error occurred while retrieving token balances. Please try again later.' - }); - }); - - it('should correctly format token balances', async () => { - const mockQueryParams = { - object: { - chain_id: '1', - address: '0x1234567890123456789012345678901234567890' - } - }; - - const mockTokens = [ - { - name: 'Token1', - symbol: 'TK1', - balance: '0x0de0b6b3a7640000', // 1 ETH in hex - decimals: 18, - contract_address: '0x456' - }, - { - name: 'Token2', - symbol: 'TK2', - balance: '0x0de0b6b3a7640000', // 1 ETH in hex - decimals: 6, - contract_address: '0x789' - } - ]; - - const mockFormattedResponse = 'Token balances: 1.0 TK1, 1000000.0 TK2'; - - vi.mocked(composeContext).mockReturnValue('mock-context'); - vi.mocked(generateObject).mockResolvedValue(mockQueryParams); - vi.mocked(getTokenBalances).mockResolvedValue(mockTokens); - vi.mocked(generateText).mockResolvedValue(mockFormattedResponse); - - await retrieveTokenBalance.handler(mockRuntime, mockMessage, mockState, undefined, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith({ - text: mockFormattedResponse - }); - }); - }); - - describe('action properties', () => { - it('should have correct action properties', () => { - expect(retrieveTokenBalance.name).toBe('RETRIEVE_TOKEN_BALANCE'); - expect(retrieveTokenBalance.description).toBeDefined(); - expect(retrieveTokenBalance.similes).toBeDefined(); - expect(Array.isArray(retrieveTokenBalance.similes)).toBe(true); - expect(retrieveTokenBalance.examples).toBeDefined(); - expect(Array.isArray(retrieveTokenBalance.examples)).toBe(true); - }); - }); -}); diff --git a/packages/plugin-chainbase/__tests__/libs/chainbase.test.ts b/packages/plugin-chainbase/__tests__/libs/chainbase.test.ts deleted file mode 100644 index 066652d7d9ff9..0000000000000 --- a/packages/plugin-chainbase/__tests__/libs/chainbase.test.ts +++ /dev/null @@ -1,220 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import { generateSQL, executeQuery, getTokenBalances } from '../../src/libs/chainbase'; -import { CHAINBASE_API_URL_ENDPOINT } from '../../src/constants'; - -// Mock fetch -const mockFetch = vi.fn(); -global.fetch = mockFetch; - -// Mock environment variables -process.env.CHAINBASE_API_KEY = 'test-api-key'; - -describe('chainbase library', () => { - beforeEach(() => { - mockFetch.mockReset(); - }); - - describe('generateSQL', () => { - it('should generate SQL from natural language prompt', async () => { - const mockResponse = { - sql: 'SELECT block_number FROM ethereum.blocks LIMIT 1' - }; - mockFetch.mockResolvedValueOnce({ - json: () => Promise.resolve(mockResponse) - }); - - const result = await generateSQL('Get the latest block number'); - - expect(mockFetch).toHaveBeenCalledWith( - `${CHAINBASE_API_URL_ENDPOINT}/api/v1/text2sql`, - expect.objectContaining({ - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: expect.any(String) - }) - ); - expect(result).toBe(mockResponse.sql); - }); - - it('should handle API errors', async () => { - mockFetch.mockRejectedValueOnce(new Error('API Error')); - - await expect(generateSQL('Invalid query')).rejects.toThrow('API Error'); - }); - }); - - describe('executeQuery', () => { - it('should execute SQL query and return results', async () => { - const mockExecuteResponse = { - data: [{ - executionId: 'test-execution-id' - }] - }; - - const mockPollResponse = { - data: { - status: 'FINISHED', - columns: ['block_number'], - data: [[12345678]], - total_row_count: 1 - } - }; - - vi.mocked(fetch) - .mockResolvedValueOnce({ - json: () => Promise.resolve(mockExecuteResponse) - } as Response) - .mockResolvedValueOnce({ - json: () => Promise.resolve(mockPollResponse) - } as Response); - - const result = await executeQuery('SELECT block_number FROM ethereum.blocks LIMIT 1'); - expect(result).toEqual({ - columns: ['block_number'], - data: [[12345678]], - totalRows: 1 - }); - }); - - it('should handle missing execution ID', async () => { - const mockExecuteResponse = { - data: [{}] // No executionId - }; - - mockFetch.mockResolvedValueOnce({ - json: () => Promise.resolve(mockExecuteResponse) - }); - - await expect(executeQuery('SELECT * FROM invalid.table')) - .rejects.toThrow('Failed to get execution_id'); - }); - - it('should handle query execution errors', async () => { - const mockExecuteResponse = { - data: [{ - executionId: 'test-execution-id' - }] - }; - - const mockPollResponse = { - data: { - status: 'FAILED', - message: 'Query execution failed' - } - }; - - vi.mocked(fetch) - .mockResolvedValueOnce({ - json: () => Promise.resolve(mockExecuteResponse) - } as Response) - .mockResolvedValueOnce({ - json: () => Promise.resolve(mockPollResponse) - } as Response); - - await expect(executeQuery('SELECT * FROM invalid.table')) - .rejects.toThrow('Query execution failed'); - }); - - it('should handle timeout after max retries', async () => { - // Mock a shorter MAX_RETRIES value for testing - const originalMaxRetries = process.env.MAX_RETRIES; - process.env.MAX_RETRIES = '2'; - - const mockExecuteResponse = { - data: [{ - executionId: 'test-execution-id' - }] - }; - - const mockPollResponse = { - data: { - status: 'RUNNING' - } - }; - - vi.mocked(fetch) - .mockResolvedValueOnce({ - json: () => Promise.resolve(mockExecuteResponse) - } as Response) - .mockResolvedValue({ - json: () => Promise.resolve(mockPollResponse) - } as Response); - - // Mock setTimeout to resolve immediately - vi.spyOn(global, 'setTimeout').mockImplementation((callback: any) => { - callback(); - return 0 as any; - }); - - await expect(executeQuery('SELECT * FROM large.table')) - .rejects.toThrow('Query timeout after 180 seconds'); - - // Restore original MAX_RETRIES value - process.env.MAX_RETRIES = originalMaxRetries; - }); - }); - - describe('getTokenBalances', () => { - it('should retrieve token balances for an address', async () => { - const mockResponse = { - data: [{ - name: 'Test Token', - symbol: 'TEST', - balance: '0x0de0b6b3a7640000', - decimals: 18, - contract_address: '0x123' - }] - }; - - vi.mocked(fetch).mockResolvedValueOnce({ - json: () => Promise.resolve(mockResponse) - } as Response); - - const result = await getTokenBalances({ - chain_id: 1, - address: '0x123', - contract_address: '0x456' - }); - - expect(fetch).toHaveBeenCalledWith( - expect.stringContaining('/v1/account/tokens'), - expect.objectContaining({ - headers: { - 'x-api-key': 'test-api-key' - }, - method: 'GET' - }) - ); - - expect(result).toEqual(mockResponse.data); - }); - - it('should handle API errors', async () => { - mockFetch.mockRejectedValueOnce(new Error('API Error')); - - await expect(getTokenBalances({ - chain_id: 1, - address: '0x123' - })).rejects.toThrow('API Error'); - }); - - it('should handle empty response', async () => { - const mockResponse = { - data: [] - }; - - mockFetch.mockResolvedValueOnce({ - json: () => Promise.resolve(mockResponse) - }); - - const result = await getTokenBalances({ - chain_id: 1, - address: '0x123' - }); - - expect(result).toEqual([]); - }); - }); -}); diff --git a/packages/plugin-chainbase/biome.json b/packages/plugin-chainbase/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-chainbase/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-chainbase/package.json b/packages/plugin-chainbase/package.json deleted file mode 100644 index 513c70dcccb9d..0000000000000 --- a/packages/plugin-chainbase/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "@elizaos/plugin-chainbase", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*" - }, - "devDependencies": { - "vitest": "^3.0.5", - "@biomejs/biome": "1.9.4" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - } -} diff --git a/packages/plugin-chainbase/src/actions/queryData.ts b/packages/plugin-chainbase/src/actions/queryData.ts deleted file mode 100644 index 4cada48548b48..0000000000000 --- a/packages/plugin-chainbase/src/actions/queryData.ts +++ /dev/null @@ -1,231 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - generateText, - ModelClass, -} from "@elizaos/core"; -import { generateSQL, executeQuery } from "../libs/chainbase"; -import { responsePrompt } from "../templates"; - -const QUERY_PREFIX = "query onchain data:"; - -export const queryBlockChainData: Action = { - name: "QUERY_BLOCKCHAIN_DATA", - similes: ["ANALYZE_BLOCKCHAIN", "GET_CHAIN_DATA", "QUERY_ONCHAIN_DATA"], - description: - "Query blockchain data using natural language starting with 'query onchain data:'", - - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.log("Validating runtime for QUERY_BLOCKCHAIN_DATA..."); - return !!( - runtime.character.settings.secrets?.CHAINBASE_API_KEY || - process.env.CHAINBASE_API_KEY - ); - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - try { - const messageText = message.content.text.toLowerCase(); - - if (!messageText.includes(QUERY_PREFIX)) { - callback({ - text: `Please use the format: ${QUERY_PREFIX} `, - }); - return; - } - - const queryText = message.content.text - .slice( - message.content.text.toLowerCase().indexOf(QUERY_PREFIX) + - QUERY_PREFIX.length - ) - .trim(); - - if (!queryText) { - callback({ - text: `Please provide a specific query after '${QUERY_PREFIX}'`, - }); - return; - } - - // Generate SQL from natural language - const sql = await generateSQL(queryText); - - // Execute query on Chainbase - const result = await executeQuery(sql); - - // Use generateText to format the response - const formattedResponse = await generateText({ - runtime, - context: responsePrompt( - { - sql, - columns: result.columns, - data: result.data, - totalRows: result.totalRows, - }, - queryText - ), - modelClass: ModelClass.SMALL, - }); - - callback({ - text: formattedResponse, - }); - } catch (error) { - elizaLogger.error("Error in queryChainbase action:", error); - callback({ - text: "An error occurred while querying the blockchain. Please try again later.", - }); - return ; - } - }, - - examples: [ - [ - { - user: "user", - content: { - text: "query onchain data: Calculate the average gas used per block on Ethereum in the last 100 blocks", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: Show me the top 10 active Ethereum addresses by transaction count in the last 1000 blocks", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: List Ethereum transactions with value greater than 1 ETH in the last 1000 blocks", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: Calculate the total ETH transaction fees collected in the last 100 Ethereum blocks", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: Show me the distribution of ETH transaction values in the last 1000 Ethereum transactions", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: Find Ethereum blocks that have more than 200 transactions in the last 24 hours", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: What's the average gas price trend on Ethereum mainnet in the last 1000 blocks", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: Show me Ethereum addresses that have both sent and received ETH in the last 100 blocks", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - [ - { - user: "user", - content: { - text: "query onchain data: List the top 10 Ethereum blocks by total gas used in the last 24 hours", - action: "QUERY_BLOCKCHAIN_DATA", - }, - }, - { - user: "assistant", - content: { - text: "📊 Query Results...", - }, - }, - ], - ], -}; diff --git a/packages/plugin-chainbase/src/actions/retrieveTokenBalance.ts b/packages/plugin-chainbase/src/actions/retrieveTokenBalance.ts deleted file mode 100644 index fab9a800f467b..0000000000000 --- a/packages/plugin-chainbase/src/actions/retrieveTokenBalance.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - ModelClass, - composeContext, - generateObject, - generateText, -} from "@elizaos/core"; -import Big from "big.js"; -import { getTokenBalances } from "../libs/chainbase"; -import { - retrieveTokenBalanceTemplate, - formatTokenBalancePrompt, -} from "../templates"; -import { - RetrieveTokenBalanceReqSchema, - isRetrieveTokenBalanceReq, -} from "../types"; - -export const retrieveTokenBalance: Action = { - name: "RETRIEVE_TOKEN_BALANCE", - similes: [ - "RETRIEVE_ALL_TOKENS", - "FETCH_ERC20_TOKENS", - "RETRIEVE_ERC20_TOKENS_BALANCE", - "RETRIEVE_TOKEN_BALANCE_LIST", - ], - description: - "Retrieve all token balances for all ERC20 tokens for a specified address.", - - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.log("Validating runtime for RETRIEVE_TOKEN_BALANCE..."); - return !!( - runtime.character.settings.secrets?.CHAINBASE_API_KEY || - process.env.CHAINBASE_API_KEY - ); - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback, - ) => { - try { - elizaLogger.log("Composing state for message:", message); - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const context = composeContext({ - state: currentState, - template: retrieveTokenBalanceTemplate, - }); - - const queryParams = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: RetrieveTokenBalanceReqSchema, - }); - - if (!isRetrieveTokenBalanceReq(queryParams.object)) { - callback( - { - text: "Invalid query params. Please check the inputs.", - }, - [], - ); - return; - } - - const { contract_address, address, chain_id } = queryParams.object; - - elizaLogger.log("Querying token balances:", { - chain_id, - address, - contract_address, - }); - const tokens = await getTokenBalances({ - chain_id: Number(chain_id), - address, - contract_address, - }); - - // Convert hex balance to decimal and adjust for token decimals - const processedTokens = tokens.map((token) => ({ - ...token, - balance: token.balance - ? new Big(Number.parseInt(token.balance, 16).toString()) - .div(new Big(10).pow(token.decimals)) - .toFixed(18) - : "0", - })); - - if (processedTokens.length > 0) { - const formattedResponse = await generateText({ - runtime, - context: formatTokenBalancePrompt(processedTokens, address), - modelClass: ModelClass.SMALL, - }); - - callback({ - text: formattedResponse, - }); - } else { - callback({ - text: `💫 No token balances found for address ${address}`, - }); - } - } catch (error) { - elizaLogger.error("Error in retrieveTokenBalance:", error.message); - callback({ - text: "❌ An error occurred while retrieving token balances. Please try again later.", - }); - } - }, - - examples: [ - [ - { - user: "user", - content: { - text: "Retrieve Ethereum token balances of address 0x7719fD6A5a951746c8c26E3DFd143f6b96Db6412", - action: "RETRIEVE_TOKEN_BALANCE", - }, - }, - { - user: "assistant", - content: { - text: "Sure! there're 20.25 USDT in address 0x7719fD6A5a951746c8c26E3DFd143f6b96Db6412", - }, - }, - ], - ], -}; diff --git a/packages/plugin-chainbase/src/constants.ts b/packages/plugin-chainbase/src/constants.ts deleted file mode 100644 index c57baba388bff..0000000000000 --- a/packages/plugin-chainbase/src/constants.ts +++ /dev/null @@ -1 +0,0 @@ -export const CHAINBASE_API_URL_ENDPOINT = "https://api.chainbase.com"; diff --git a/packages/plugin-chainbase/src/index.ts b/packages/plugin-chainbase/src/index.ts deleted file mode 100644 index 8311cb8e41d7f..0000000000000 --- a/packages/plugin-chainbase/src/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { retrieveTokenBalance } from "./actions/retrieveTokenBalance"; -import { queryBlockChainData } from "./actions/queryData"; - -export const chainbasePlugin: Plugin = { - name: "chainbase", - description: "Chainbase Plugin for Eliza", - actions: [retrieveTokenBalance, queryBlockChainData], - providers: [], - services: [], -}; diff --git a/packages/plugin-chainbase/src/libs/chainbase.ts b/packages/plugin-chainbase/src/libs/chainbase.ts deleted file mode 100644 index 5c56dc3d76bd7..0000000000000 --- a/packages/plugin-chainbase/src/libs/chainbase.ts +++ /dev/null @@ -1,195 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import { CHAINBASE_API_URL_ENDPOINT } from "../constants"; - -export interface TokenWithBalance { - name: string; - symbol: string; - balance: string; - decimals: number; - contract_address: string; -} - -export interface TokenBalanceParams { - chain_id: number; - address: string; - contract_address?: string; -} - -export async function generateSQL(prompt: string): Promise { - try { - const response = await fetch( - `${CHAINBASE_API_URL_ENDPOINT}/api/v1/text2sql`, - { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - assistant_id: "6b86c502-d203-4f6b-baf6-f406c23a9421", - input: { - messages: [ - { - type: "human", - content: prompt, - }, - ], - }, - }), - } - ); - - const data = await response.json(); - elizaLogger.log("Generated SQL:", data.sql); - return data.sql; - } catch (error) { - elizaLogger.error("Error generating SQL:", error); - throw error; - } -} - -const POLL_INTERVAL = 1000; // 1 second -const MAX_RETRIES = 180; // Maximum number of retries (3 minute) - -// Add new utility function -function getChainbaseApiKey(): string { - const apiKey = process.env.CHAINBASE_API_KEY; - if (!apiKey) { - throw new Error( - "CHAINBASE_API_KEY is not set in environment variables" - ); - } - return apiKey; -} - -export async function executeQuery(sql: string): Promise<{ - columns: string[]; - data: unknown[]; - totalRows: number; -}> { - try { - const apiKey = getChainbaseApiKey(); - - // Process SQL line breaks and semicolons - const processedSql = sql - .replace(/\n/g, " ") // Replace line breaks with spaces - .replace(/;/g, "") // Remove semicolons - .trim(); - - // 1. Execute query - elizaLogger.log("Executing Chainbase query:", processedSql); - const executeResponse = await fetch( - `${CHAINBASE_API_URL_ENDPOINT}/api/v1/query/execute`, - { - method: "POST", - headers: { - "Content-Type": "application/json", - "X-API-KEY": apiKey, - }, - body: JSON.stringify({ sql: processedSql }), - } - ); - - const executeData = await executeResponse.json(); - elizaLogger.log("Execute response:", executeData); - const executionId = executeData.data[0].executionId; - - if (!executionId) { - throw new Error("Failed to get execution_id from query execution"); - } - - // 2. Poll for results - let retries = 0; - while (retries < MAX_RETRIES) { - elizaLogger.log( - `Polling results (attempt ${retries + 1}/${MAX_RETRIES})...` - ); - const resultResponse = await fetch( - `${CHAINBASE_API_URL_ENDPOINT}/api/v1/execution/${executionId}/results`, - { - method: "GET", - headers: { - "Content-Type": "application/json", - "X-API-KEY": apiKey, - }, - } - ); - - const response = await resultResponse.json(); - elizaLogger.log("Poll response:", response); - - // If query fails, return error immediately - if (response.data.status === "FAILED") { - throw new Error( - response.data.message || "Query failed with unknown error" - ); - } - - // If query completes, return results - if (response.data.status === "FINISHED") { - elizaLogger.log("Query succeeded:", response.data); - return { - columns: response.data.columns, - data: response.data.data, - totalRows: response.data.total_row_count, - }; - } - - // Wait specified interval before polling again - await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL)); - retries++; - } - - throw new Error("Query timeout after 180 seconds"); - } catch (error) { - elizaLogger.error("Error executing Chainbase query:", error); - throw error; - } -} - -export async function getTokenBalances( - params: TokenBalanceParams -): Promise { - try { - const apiKey = getChainbaseApiKey(); - - elizaLogger.log("Fetching token balances:", params); - - const response = await fetch( - `${CHAINBASE_API_URL_ENDPOINT}/v1/account/tokens?chain_id=${params.chain_id}&address=${params.address}&limit=100`, - { - method: "GET", - headers: { - "x-api-key": apiKey, - }, - } - ); - - const { data } = (await response.json()) as { - data?: TokenWithBalance[]; - }; - - if (!data) { - throw new Error("No data returned from Chainbase API"); - } - - elizaLogger.log("Token balances retrieved:", data); - - // Filter out tokens without name and symbol - return data.filter( - (token) => !(token.name.length === 0 && token.symbol.length === 0) - ); - } catch (error) { - elizaLogger.error("Error fetching token balances:", error); - throw error; - } -} - -export function formatTokenBalance(token: TokenWithBalance): string { - // Handle balance in hex format - const balanceValue = token.balance.startsWith("0x") - ? Number.parseInt(token.balance, 16) - : Number.parseFloat(token.balance); - - const balance = balanceValue / (10 ** token.decimals); - return `${balance.toLocaleString(undefined, { maximumFractionDigits: 4 })} ${token.symbol} (${token.name})`; -} diff --git a/packages/plugin-chainbase/src/templates/index.ts b/packages/plugin-chainbase/src/templates/index.ts deleted file mode 100644 index f021df2bba104..0000000000000 --- a/packages/plugin-chainbase/src/templates/index.ts +++ /dev/null @@ -1,118 +0,0 @@ -export const responsePrompt = ( - result: any, - query: string -) => `You are a blockchain data analyst. Your task is to analyze and present the query results from Chainbase in a clear and informative way. - -SQL Query:""" -${result.sql} -""" - -Query Results:""" -${JSON.stringify( - { - columns: result.columns, - data: result.data, - totalRows: result.totalRows, - }, - null, - 2 -)} -""" - -User's Query:""" -${query} -""" - -Instructions: -1. **Format the Response**: Present the results in a structured format with emojis for better readability: - - 📊 Query Results (with user's original query) - - 🔍 SQL Query used - - 📋 Data in table format - - 📈 Brief analysis of the results - -2. **Data Presentation**: - - Format numbers with appropriate commas and decimals - - Present data in a clean table format using | for columns - - Highlight key metrics and trends - -3. **Analysis**: - - Provide a brief, clear analysis of what the data shows - - Focus on the most relevant insights related to the user's query - - Use simple, non-technical language when possible - -Format your response following this example: -📊 Query Results for: [user's query] - -🔍 SQL Query: -[SQL query used] - -📋 Data: -| Column1 | Column2 | -| Value1 | Value2 | - -📈 Analysis: [Brief analysis of the results] - -Remember: -- Keep the response concise and focused -- Format numbers for readability -- Highlight key insights -- Relate the analysis back to the user's original query`; - -export const retrieveTokenBalanceTemplate = ` -Extract query parameters for fetching all erc20 token balance for a wallet address: -- **address** (string, required): The address of the wallet to which the api queries. -- **chain_id** (string, optional): Specify The chain on which token bases. -- **contract_address** (string, optional): Specify one token contract address to check of. - -Supported chains and their chain IDs: -- Ethereum (chain_id: "1") -- Polygon (chain_id: "137") -- BSC (chain_id: "56") -- Avalanche (chain_id: "43114") -- Arbitrum One (chain_id: "42161") -- Optimism (chain_id: "10") -- Base (chain_id: "8453") -- zkSync (chain_id: "324") -- Merlin (chain_id: "4200") - -Provide the details in the following JSON format: -\`\`\`json -{ - "address": "", - "chain_id"?: "", - "contract_address"?: "", -} -\`\`\` - -Example for reading the balance of an ERC20 token: -\`\`\`json -{ - "address": "0xaC21F9e3550E525e568aC47Bc08095e7606c8B3F", - "chain_id": "1", - "contract_address"?: "0xdac17f958d2ee523a2206206994597c13d831ec7", -} -\`\`\` - -Example for reading the balance of all ERC20 tokens on evm mainnet: -\`\`\`json -{ - "address": "0xaC21F9e3550E525e568aC47Bc08095e7606c8B3F", -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const formatTokenBalancePrompt = (tokens: any[], address: string) => ` -You are a blockchain data analyst. Format the following token balance data in a clear and readable way. -Show the token balances for address ${address}. - -Token data: -${JSON.stringify(tokens, null, 2)} - -Format the response to be concise but informative. Include: -- A brief summary line -- List each token with its balance, symbol and USD value if available -- Use appropriate emojis and formatting -`; diff --git a/packages/plugin-chainbase/src/types.ts b/packages/plugin-chainbase/src/types.ts deleted file mode 100644 index 5816b30ea5ab4..0000000000000 --- a/packages/plugin-chainbase/src/types.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { z } from "zod"; - -export interface RetrieveTokenBalanceReq { - address: `0x${string}`; - chain_id?: string; - contract_address?: `0x${string}`; -} - -export const RetrieveTokenBalanceReqSchema = z.object({ - address: z.string().regex(/^0x[a-fA-F0-9]{40}$/), - chain_id: z.string().optional(), - contract_address: z.string().regex(/^0x[a-fA-F0-9]{40}$/).optional(), -}); - -export const isRetrieveTokenBalanceReq = ( - obj: unknown -): obj is RetrieveTokenBalanceReq => { - return RetrieveTokenBalanceReqSchema.safeParse(obj).success; -}; - -export interface TokenWithBalance { - balance: string; - symbol: string; - name: string; - contract_address: string; - decimals: number; -} diff --git a/packages/plugin-chainbase/tsconfig.json b/packages/plugin-chainbase/tsconfig.json deleted file mode 100644 index 65ec37c9e63d3..0000000000000 --- a/packages/plugin-chainbase/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src" - }, - "include": [ - "src" - ] -} diff --git a/packages/plugin-chainbase/tsup.config.ts b/packages/plugin-chainbase/tsup.config.ts deleted file mode 100644 index 1a55f7a745f6d..0000000000000 --- a/packages/plugin-chainbase/tsup.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [], -}); diff --git a/packages/plugin-chainbase/vitest.config.ts b/packages/plugin-chainbase/vitest.config.ts deleted file mode 100644 index 2af7cfb6cd955..0000000000000 --- a/packages/plugin-chainbase/vitest.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node' - } -}); diff --git a/packages/plugin-coinbase/.npmignore b/packages/plugin-coinbase/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-coinbase/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-coinbase/README.md b/packages/plugin-coinbase/README.md deleted file mode 100644 index 840d82716ef2c..0000000000000 --- a/packages/plugin-coinbase/README.md +++ /dev/null @@ -1,213 +0,0 @@ -# @elizaos/plugin-coinbase - -A comprehensive Coinbase integration plugin for ElizaOS that provides access to Coinbase's various APIs and services. - -## Features - -- **Commerce Integration**: Create and manage payment charges using Coinbase Commerce -- **Trading**: Execute trades and swaps between different assets -- **Token Contract Management**: Deploy and interact with ERC20, ERC721, and ERC1155 smart contracts -- **Mass Payments**: Process bulk transfers and payments to multiple addresses -- **Advanced Trading**: Access to Coinbase Advanced Trading API features -- **Webhook Management**: Create and manage webhooks for various blockchain events - -## Installation - -```bash -npm install @elizaos/plugin-coinbase -``` - -## Configuration - -The plugin requires several environment variables to be set: - -```env -COINBASE_API_KEY=your_api_key -COINBASE_PRIVATE_KEY=your_private_key -COINBASE_COMMERCE_KEY=your_commerce_key -COINBASE_NOTIFICATION_URI=your_webhook_notification_uri -``` - -## Usage - -```typescript -import { plugins } from "@elizaos/plugin-coinbase"; - -// Register all plugins -const { - coinbaseMassPaymentsPlugin, - coinbaseCommercePlugin, - tradePlugin, - tokenContractPlugin, - webhookPlugin, - advancedTradePlugin, -} = plugins; - -// Register individual plugins as needed -runtime.registerPlugin(coinbaseCommercePlugin); -runtime.registerPlugin(tradePlugin); -// etc... -``` - -## Available Plugins - -### Commerce Plugin - -- Create charges with fixed or dynamic pricing -- Support for multiple currencies (USD, EUR, USDC) -- Charge status tracking and management - -### Trade Plugin - -- Execute basic trades between assets -- Support for market and limit orders -- Transaction logging and tracking - -### Token Contract Plugin - -- Deploy ERC20, ERC721, and ERC1155 contracts -- Interact with deployed contracts -- Read contract data and balances - -### Mass Payments Plugin - -- Process bulk transfers to multiple addresses -- Support for various assets and networks -- Transaction logging and CSV export - -### Advanced Trade Plugin - -- Access to advanced trading features -- Support for complex order types -- Detailed trade history and tracking - -### Webhook Plugin - -- Create and manage blockchain event webhooks -- Support for various event types and filters -- Webhook status tracking and logging - -## Supported Networks - -- Base (Mainnet & Sepolia) -- Ethereum (Mainnet & Holesky) -- Polygon Mainnet -- Solana (Mainnet & Devnet) -- Arbitrum Mainnet -- And more... - -## CSV Logging - -The plugin automatically logs various operations to CSV files: - -- `trades.csv`: Trading operations -- `transactions.csv`: Mass payment transactions -- `webhooks.csv`: Webhook configurations -- `advanced_trades.csv`: Advanced trading operations - -## Dependencies - -- `@elizaos/core`: Core ElizaOS functionality -- `coinbase-api`: Coinbase API integration -- `coinbase-advanced-sdk`: Coinbase Advanced Trading SDK -- Additional type definitions and utilities - -## Future Enhancements - -1. **Advanced Trading Features** - - - Real-time market data streaming - - Advanced order types (OCO, trailing stop) - - Portfolio rebalancing automation - - Custom trading strategies implementation - - Multi-exchange arbitrage support - -2. **Enhanced Commerce Integration** - - - Subscription payment handling - - Multi-currency checkout optimization - - Advanced refund management - - Custom payment flow templates - - Automated invoice generation - -3. **Improved Token Management** - - - Batch token operations - - Gas optimization for token contracts - - Token metadata management system - - Automated token listing process - - Smart contract deployment templates - -4. **Security Enhancements** - - - Advanced API key management - - Multi-signature support - - Transaction monitoring system - - Risk assessment tools - - Rate limiting improvements - -5. **Analytics and Reporting** - - - Custom report generation - - Trading performance analytics - - Payment flow analytics - - Real-time monitoring dashboard - - Historical data analysis tools - -6. **Webhook Management** - - - Enhanced event filtering - - Retry mechanism improvements - - Webhook monitoring dashboard - - Custom webhook templates - - Event batching support - -7. **Developer Tools** - - - SDK expansion - - Testing environment improvements - - Documentation generator - - CLI tools for common operations - - Integration templates - -8. **Cross-Platform Integration** - - Mobile SDK support - - Browser extension support - - Desktop application integration - - IoT device support - - Cross-chain bridging capabilities - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Coinbase](https://www.coinbase.com/): Digital currency exchange platform -- [Coinbase Commerce](https://commerce.coinbase.com/): Cryptocurrency payment solution -- [Coinbase Cloud](https://www.coinbase.com/cloud): Blockchain infrastructure -- [Coinbase Advanced Trade API](https://docs.cloud.coinbase.com/advanced-trade-api/): Trading interface -- [Coinbase Prime](https://prime.coinbase.com/): Institutional trading platform - -Special thanks to: - -- The Coinbase development team -- The Coinbase Commerce team -- The Coinbase Cloud infrastructure team -- The Advanced Trade API maintainers -- The Eliza community for their contributions and feedback - -For more information about Coinbase capabilities: - -- [Coinbase API Documentation](https://docs.cloud.coinbase.com/) -- [Commerce API Reference](https://docs.cloud.coinbase.com/commerce/reference/) -- [Advanced Trade Documentation](https://docs.cloud.coinbase.com/advanced-trade-api/) -- [Coinbase Prime Documentation](https://docs.prime.coinbase.com/) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-coinbase/__tests__/commerce.test.ts b/packages/plugin-coinbase/__tests__/commerce.test.ts deleted file mode 100644 index bdb0ddceb77a0..0000000000000 --- a/packages/plugin-coinbase/__tests__/commerce.test.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { coinbaseCommercePlugin, createCharge } from '../src/plugins/commerce'; -import { IAgentRuntime, type Memory, State } from '@elizaos/core'; - -// Mock fetch -global.fetch = vi.fn(); - -// Mock runtime -const mockRuntime = { - getSetting: vi.fn().mockReturnValue('test-api-key'), - getProvider: vi.fn().mockReturnValue({ apiKey: 'test-api-key' }), - character: { - name: 'test-character' - } -}; - -describe('Coinbase Commerce Plugin', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('createCharge', () => { - it('should create a charge successfully', async () => { - const mockResponse = { - data: { - id: 'test-charge-id', - name: 'Test Charge', - description: 'Test Description', - pricing_type: 'fixed_price', - local_price: { - amount: '100', - currency: 'USD' - } - } - }; - - (global.fetch as any).mockResolvedValueOnce({ - ok: true, - json: () => Promise.resolve(mockResponse) - }); - - const params = { - name: 'Test Charge', - description: 'Test Description', - pricing_type: 'fixed_price', - local_price: { - amount: '100', - currency: 'USD' - } - }; - - const result = await createCharge('test-api-key', params); - expect(result).toEqual(mockResponse.data); - expect(global.fetch).toHaveBeenCalledWith( - 'https://api.commerce.coinbase.com/charges', - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-CC-Api-Key': 'test-api-key' - }, - body: JSON.stringify(params) - } - ); - }); - - it('should handle errors when creating charge', async () => { - (global.fetch as any).mockResolvedValueOnce({ - ok: false, - statusText: 'Bad Request' - }); - - const params = { - name: 'Test Charge', - description: 'Test Description', - pricing_type: 'fixed_price', - local_price: { - amount: '100', - currency: 'USD' - } - }; - - await expect(createCharge('test-api-key', params)) - .rejects - .toThrow('Failed to create charge: Bad Request'); - }); - }); - - describe('coinbaseCommercePlugin', () => { - it('should have correct plugin properties', () => { - expect(coinbaseCommercePlugin.name).toBe('coinbaseCommerce'); - expect(coinbaseCommercePlugin.actions).toBeDefined(); - expect(Array.isArray(coinbaseCommercePlugin.actions)).toBe(true); - }); - - it('should validate plugin actions', async () => { - const mockMessage: Memory = { - id: '1', - user: 'test-user', - content: { text: 'test message' }, - timestamp: new Date(), - type: 'text' - }; - - const createChargeAction = coinbaseCommercePlugin.actions.find( - action => action.name === 'CREATE_CHARGE' - ); - - expect(createChargeAction).toBeDefined(); - if (createChargeAction) { - const result = await createChargeAction.validate(mockRuntime as any, mockMessage); - expect(result).toBe(true); - } - }); - }); -}); diff --git a/packages/plugin-coinbase/__tests__/utils.test.ts b/packages/plugin-coinbase/__tests__/utils.test.ts deleted file mode 100644 index fb5b36f019ba0..0000000000000 --- a/packages/plugin-coinbase/__tests__/utils.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { getWalletDetails } from '../src/utils'; -import { Coinbase, Wallet } from '@coinbase/coinbase-sdk'; - -vi.mock('@coinbase/coinbase-sdk'); - -// Mock the runtime -const mockRuntime = { - getSetting: vi.fn() - .mockReturnValueOnce('test-seed') // COINBASE_GENERATED_WALLET_HEX_SEED - .mockReturnValueOnce('test-wallet-id'), // COINBASE_GENERATED_WALLET_ID - getProvider: vi.fn().mockReturnValue({ apiKey: 'test-api-key' }), - character: { - name: 'test-character' - } -}; - -// Mock Wallet class -const mockWallet = { - getDefaultAddress: vi.fn().mockResolvedValue('0x123'), - getNetworkId: vi.fn().mockReturnValue('eth-mainnet'), - listBalances: vi.fn().mockResolvedValue([ - ['ETH', { toString: () => '1.0' }] - ]), - getTransactions: vi.fn().mockResolvedValue([]), - export: vi.fn().mockReturnValue({ - seed: 'test-seed', - walletId: 'test-wallet-id' - }) -}; - -describe('Utils', () => { - describe('getWalletDetails', () => { - beforeEach(() => { - vi.clearAllMocks(); - (Coinbase as any).networks = { - EthereumMainnet: 'eth-mainnet' - }; - (Wallet as any).import = vi.fn().mockResolvedValue(mockWallet); - }); - - it('should fetch wallet details successfully', async () => { - const result = await getWalletDetails(mockRuntime as any); - - expect(result).toEqual({ - balances: [{ asset: 'ETH', amount: '1.0' }], - transactions: [] - }); - - expect(Wallet.import).toHaveBeenCalledWith({ - seed: 'test-seed', - walletId: 'test-wallet-id' - }); - }); - - it('should handle errors when fetching wallet details', async () => { - (Wallet as any).import = vi.fn().mockRejectedValue(new Error('Unable to retrieve wallet details.')); - - await expect(getWalletDetails(mockRuntime as any)) - .rejects - .toThrow('Unable to retrieve wallet details.'); - }); - }); -}); diff --git a/packages/plugin-coinbase/advanced-sdk-ts/.gitignore b/packages/plugin-coinbase/advanced-sdk-ts/.gitignore deleted file mode 100644 index 722ad2ade5afa..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -.env -src/rest/main.ts -dist/ -node_modules/ -.idea/ -package-lock.json \ No newline at end of file diff --git a/packages/plugin-coinbase/advanced-sdk-ts/CHANGELOG.md b/packages/plugin-coinbase/advanced-sdk-ts/CHANGELOG.md deleted file mode 100644 index eda768580a2c4..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/CHANGELOG.md +++ /dev/null @@ -1,9 +0,0 @@ -# Changelog - -## [0.1.0] - 2024-SEP-06 - -### Added - -- Support for all Coinbase Advanced API REST endpoints via central client -- Custom Request and Response objects for endpoints -- Custom error types diff --git a/packages/plugin-coinbase/advanced-sdk-ts/README.md b/packages/plugin-coinbase/advanced-sdk-ts/README.md deleted file mode 100644 index 73f0bf459dc14..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# Coinbase Advanced API TypeScript SDK - -Welcome to the Coinbase Advanced API TypeScript SDK. This TypeScript project was created to allow developers to easily plug into the [Coinbase Advanced API](https://docs.cdp.coinbase.com/advanced-trade/docs/welcome). - -Coinbase Advanced Trade offers a comprehensive API for traders, providing access to real-time market data, order management, and execution. Elevate your trading strategies and develop sophisticated solutions using our powerful tools and features. - -For more information on all the available REST endpoints, see the [API Reference](https://docs.cdp.coinbase.com/advanced-trade/reference/). - ---- - -## Installation - -```bash -npm install -``` - ---- - -## Build and Use - -To build the project, run the following command: - -```bash -npm run build -``` - -_Note: To avoid potential issues, do not forget to build your project again after making any changes to it._ - -After building the project, each `.ts` file will have its `.js` counterpart generated. - -To run a file, use the following command: - -``` -node dist/{INSERT-FILENAME}.js -``` - -For example, a `main.ts` file would be run like: - -```bash -node dist/main.js -``` - ---- - -## Coinbase Developer Platform (CDP) API Keys - -This SDK uses Cloud Developer Platform (CDP) API keys. To use this SDK, you will need to create a CDP API key and secret by following the instructions [here](https://docs.cdp.coinbase.com/advanced-trade/docs/getting-started). -Make sure to save your API key and secret in a safe place. You will not be able to retrieve your secret again. - ---- - -## Importing the RESTClient - -All the REST endpoints are available directly from the client, therefore it's all you need to import. - -``` -import { RESTClient } from './rest'; -``` - ---- - -## Authentication - -Authentication of CDP API Keys is handled automatically by the SDK when making a REST request. - -After creating your CDP API keys, store them using your desired method and simply pass them into the client during initialization like: - -``` -const client = new RESTClient(API_KEY, API_SECRET); -``` - ---- - -## Making Requests - -Here are a few examples requests: - -**[List Accounts](https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getaccounts)** - -``` -client - .listAccounts({}) - .then((result) => { - console.log(result); - }) - .catch((error) => { - console.error(error.message); - }); -``` - -**[Get Product](https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getproduct)** - -``` -client - .getProduct({productId: "BTC-USD"}) - .then((result) => { - console.log(result); - }) - .catch((error) => { - console.error(error.message); - }); -``` - -**[Create Order](https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_postorder)** - -_$10 Market Buy on BTC-USD_ - -``` -client - .createOrder({ - clientOrderId: "00000001", - productId: "BTC-USD", - side: OrderSide.BUY, - orderConfiguration:{ - market_market_ioc: { - quote_size: "10" - } - } - }) - .then((result) => { - console.log(result); - }) - .catch((error) => { - console.error(error.message); - }); -``` diff --git a/packages/plugin-coinbase/advanced-sdk-ts/package.json b/packages/plugin-coinbase/advanced-sdk-ts/package.json deleted file mode 100644 index 3e48546836beb..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/package.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "@coinbase-samples/advanced-sdk-ts", - "version": "0.1.0", - "main": "dist/main.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1", - "build": "tsc", - "format": "prettier --write \"**/*.{js,ts,tsx,json,css,md}\"" - }, - "files": [ - "dist/" - ], - "keywords": [], - "author": "", - "license": "ISC", - "description": "", - "dependencies": { - "jsonwebtoken": "^9.0.2", - "node-fetch": "^2.6.1" - }, - "devDependencies": { - "@types/jsonwebtoken": "^9.0.7", - "@types/node-fetch": "^2.6.11", - "@typescript-eslint/eslint-plugin": "^5.59.0", - "@typescript-eslint/parser": "^5.59.0", - "dotenv": "^16.4.5", - "eslint": "^8.35.0", - "eslint-config-prettier": "^8.5.0", - "eslint-plugin-prettier": "^4.2.1", - "prettier": "^2.8.8", - "typescript": "^5.5.4" - } -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/constants.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/constants.ts deleted file mode 100644 index 89623659076f8..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/constants.ts +++ /dev/null @@ -1,6 +0,0 @@ -export const BASE_URL = 'api.coinbase.com'; -export const API_PREFIX = '/api/v3/brokerage'; -export const ALGORITHM = 'ES256'; -export const VERSION = '0.1.0'; -export const USER_AGENT = `coinbase-advanced-ts/${VERSION}`; -export const JWT_ISSUER = 'cdp'; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/jwt-generator.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/jwt-generator.ts deleted file mode 100644 index 1e2f2e74b06bb..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/jwt-generator.ts +++ /dev/null @@ -1,31 +0,0 @@ -import jwt from 'jsonwebtoken'; -import { BASE_URL, ALGORITHM, JWT_ISSUER } from './constants'; -import crypto from 'crypto'; - -export function generateToken( - requestMethod: string, - requestPath: string, - apiKey: string, - apiSecret: string -): string { - const uri = `${requestMethod} ${BASE_URL}${requestPath}`; - const payload = { - iss: JWT_ISSUER, - nbf: Math.floor(Date.now() / 1000), - exp: Math.floor(Date.now() / 1000) + 120, - sub: apiKey, - uri, - }; - - const header = { - alg: ALGORITHM, - kid: apiKey, - nonce: crypto.randomBytes(16).toString('hex'), - }; - const options: jwt.SignOptions = { - algorithm: ALGORITHM as jwt.Algorithm, - header: header, - }; - - return jwt.sign(payload, apiSecret as string, options); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/accounts.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/accounts.ts deleted file mode 100644 index 6fbf9e7c2bbea..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/accounts.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - GetAccountRequest, - GetAccountResponse, - ListAccountsRequest, - ListAccountsResponse, -} from './types/accounts-types'; -import { method } from './types/request-types'; - -// [GET] Get Account -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getaccount -export function getAccount( - this: RESTBase, - { accountUuid }: GetAccountRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/accounts/${accountUuid}`, - isPublic: false, - }); -} - -// [GET] List Accounts -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getaccounts -export function listAccounts( - this: RESTBase, - requestParams: ListAccountsRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/accounts`, - queryParams: requestParams, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/converts.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/converts.ts deleted file mode 100644 index 590886ef7e380..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/converts.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - CommitConvertTradeRequest, - CommitConvertTradeResponse, - CreateConvertQuoteRequest, - CreateConvertQuoteResponse, - GetConvertTradeRequest, - GetConvertTradeResponse, -} from './types/converts-types'; -import { method } from './types/request-types'; - -// [POST] Create Convert Quote -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_createconvertquote -export function createConvertQuote( - this: RESTBase, - requestParams: CreateConvertQuoteRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/convert/quote`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Convert Trade -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getconverttrade -export function getConvertTrade( - this: RESTBase, - { tradeId, ...requestParams }: GetConvertTradeRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/convert/trade/${tradeId}`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [POST] Commit Connvert Trade -// https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_commitconverttrade -export function commitConvertTrade( - this: RESTBase, - { tradeId, ...requestParams }: CommitConvertTradeRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/convert/trade/${tradeId}`, - bodyParams: requestParams, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/dataAPI.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/dataAPI.ts deleted file mode 100644 index 299e5430a5e0c..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/dataAPI.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; - -import { method } from './types/request-types'; -import type { GetAPIKeyPermissionsResponse } from './types/dataAPI-types'; - -// [GET] Get API Key Permissions -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getapikeypermissions -export function getAPIKeyPermissions( - this: RESTBase -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/key_permissions`, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/errors.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/errors.ts deleted file mode 100644 index e03addb62254d..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/errors.ts +++ /dev/null @@ -1,36 +0,0 @@ -import type { Response } from 'node-fetch'; - -class CoinbaseError extends Error { - statusCode: number; - response: Response; - - constructor(message: string, statusCode: number, response: Response) { - super(message); - this.name = 'CoinbaseError'; - this.statusCode = statusCode; - this.response = response; - } -} - -export function handleException( - response: Response, - responseText: string, - reason: string -) { - let message: string | undefined; - - if ( - (400 <= response.status && response.status <= 499) || - (500 <= response.status && response.status <= 599) - ) { - if ( - response.status == 403 && - responseText.includes('"error_details":"Missing required scopes"') - ) { - message = `${response.status} Coinbase Error: Missing Required Scopes. Please verify your API keys include the necessary permissions.`; - } else - message = `${response.status} Coinbase Error: ${reason} ${responseText}`; - - throw new CoinbaseError(message, response.status, response); - } -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/fees.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/fees.ts deleted file mode 100644 index 365b48b9c170c..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/fees.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - GetTransactionsSummaryRequest, - GetTransactionsSummaryResponse, -} from './types/fees-types'; -import { method } from './types/request-types'; - -// [GET] Get Transaction Summary -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_commitconverttrade -export function getTransactionSummary( - this: RESTBase, - requestParams: GetTransactionsSummaryRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/transaction_summary`, - queryParams: requestParams, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/futures.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/futures.ts deleted file mode 100644 index bd4dd6e4dd330..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/futures.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - CancelPendingFuturesSweep, - GetCurrentMarginWindowRequest, - GetCurrentMarginWindowResponse, - GetFuturesBalanceSummaryResponse, - GetFuturesPositionRequest, - GetFuturesPositionResponse, - GetIntradayMarginSettingResponse, - ListFuturesPositionsResponse, - ListFuturesSweepsResponse, - ScheduleFuturesSweepRequest, - ScheduleFuturesSweepResponse, - SetIntradayMarginSettingRequest, - SetIntradayMarginSettingResponse, -} from './types/futures-types'; -import { method } from './types/request-types'; - -// [GET] Get Futures Balance Summary -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getfcmbalancesummary -export function getFuturesBalanceSummary( - this: RESTBase -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/cfm/balance_summary`, - isPublic: false, - }); -} - -// [GET] Get Intraday Margin Setting -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getintradaymarginsetting -export function getIntradayMarginSetting( - this: RESTBase -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/cfm/intraday/margin_setting`, - isPublic: false, - }); -} - -// [POST] Set Intraday Margin Setting -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_setintradaymarginsetting -export function setIntradayMarginSetting( - this: RESTBase, - requestParams: SetIntradayMarginSettingRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/cfm/intraday/margin_setting`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Current Margin Window -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getcurrentmarginwindow -export function getCurrentMarginWindow( - this: RESTBase, - requestParams: GetCurrentMarginWindowRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/cfm/intraday/current_margin_window`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] List Futures Positions -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getfcmpositions -export function listFuturesPositions( - this: RESTBase -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/cfm/positions`, - isPublic: false, - }); -} - -// [GET] Get Futures Position -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getfcmposition -export function getFuturesPosition( - this: RESTBase, - { productId }: GetFuturesPositionRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/cfm/positions/${productId}`, - isPublic: false, - }); -} - -// [POST] Schedule Futures Sweep -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_schedulefcmsweep -export function scheduleFuturesSweep( - this: RESTBase, - requestParams: ScheduleFuturesSweepRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/cfm/sweeps/schedule`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [GET] List Futures Sweeps -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getfcmsweeps -export function listFuturesSweeps( - this: RESTBase -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/cfm/sweeps`, - isPublic: false, - }); -} - -// [DELETE] Cancel Pending Futures Sweep -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_cancelfcmsweep -export function cancelPendingFuturesSweep( - this: RESTBase -): Promise { - return this.request({ - method: method.DELETE, - endpoint: `${API_PREFIX}/cfm/sweeps`, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/index.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/index.ts deleted file mode 100644 index 4a17332c2b0cb..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/index.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { RESTBase } from './rest-base'; -import * as Accounts from './accounts'; -import * as Converts from './converts'; -import * as DataAPI from './dataAPI'; -import * as Fees from './fees'; -import * as Futures from './futures'; -import * as Orders from './orders'; -import * as Payments from './payments'; -import * as Perpetuals from './perpetuals'; -import * as Portfolios from './portfolios'; -import * as Products from './products'; -import * as Public from './public'; - -export class RESTClient extends RESTBase { - constructor(key?: string | undefined, secret?: string | undefined) { - super(key, secret); - } - - // =============== ACCOUNTS endpoints =============== - public getAccount = Accounts.getAccount.bind(this); - public listAccounts = Accounts.listAccounts.bind(this); - - // =============== CONVERTS endpoints =============== - public createConvertQuote = Converts.createConvertQuote.bind(this); - public commitConvertTrade = Converts.commitConvertTrade.bind(this); - public getConvertTrade = Converts.getConvertTrade.bind(this); - - // =============== DATA API endpoints =============== - public getAPIKeyPermissions = DataAPI.getAPIKeyPermissions.bind(this); - - // =============== FEES endpoints =============== - public getTransactionSummary = Fees.getTransactionSummary.bind(this); - - // =============== FUTURES endpoints =============== - public getFuturesBalanceSummary = - Futures.getFuturesBalanceSummary.bind(this); - public getIntradayMarginSetting = - Futures.getIntradayMarginSetting.bind(this); - public setIntradayMarginSetting = - Futures.setIntradayMarginSetting.bind(this); - public getCurrentMarginWindow = Futures.getCurrentMarginWindow.bind(this); - public listFuturesPositions = Futures.listFuturesPositions.bind(this); - public getFuturesPosition = Futures.getFuturesPosition.bind(this); - public scheduleFuturesSweep = Futures.scheduleFuturesSweep.bind(this); - public listFuturesSweeps = Futures.listFuturesSweeps.bind(this); - public cancelPendingFuturesSweep = - Futures.cancelPendingFuturesSweep.bind(this); - - // =============== ORDERS endpoints =============== - public createOrder = Orders.createOrder.bind(this); - public cancelOrders = Orders.cancelOrders.bind(this); - public editOrder = Orders.editOrder.bind(this); - public editOrderPreview = Orders.editOrderPreview.bind(this); - public listOrders = Orders.listOrders.bind(this); - public listFills = Orders.listFills.bind(this); - public getOrder = Orders.getOrder.bind(this); - public previewOrder = Orders.previewOrder.bind(this); - public closePosition = Orders.closePosition.bind(this); - - // =============== PAYMENTS endpoints =============== - public listPaymentMethods = Payments.listPaymentMethods.bind(this); - public getPaymentMethod = Payments.getPaymentMethod.bind(this); - - // =============== PERPETUALS endpoints =============== - public allocatePortfolio = Perpetuals.allocatePortfolio.bind(this); - public getPerpetualsPortfolioSummary = - Perpetuals.getPerpetualsPortfolioSummary.bind(this); - public listPerpetualsPositions = - Perpetuals.listPerpetualsPositions.bind(this); - public getPerpetualsPosition = Perpetuals.getPerpertualsPosition.bind(this); - public getPortfolioBalances = Perpetuals.getPortfolioBalances.bind(this); - public optInOutMultiAssetCollateral = - Perpetuals.optInOutMultiAssetCollateral.bind(this); - - // =============== PORTFOLIOS endpoints =============== - public listPortfolios = Portfolios.listPortfolios.bind(this); - public createPortfolio = Portfolios.createPortfolio.bind(this); - public deletePortfolio = Portfolios.deletePortfolio.bind(this); - public editPortfolio = Portfolios.editPortfolio.bind(this); - public movePortfolioFunds = Portfolios.movePortfolioFunds.bind(this); - public getPortfolioBreakdown = Portfolios.getPortfolioBreakdown.bind(this); - - // =============== PRODUCTS endpoints =============== - public getBestBidAsk = Products.getBestBidAsk.bind(this); - public getProductBook = Products.getProductBook.bind(this); - public listProducts = Products.listProducts.bind(this); - public getProduct = Products.getProduct.bind(this); - public getProductCandles = Products.getProductCandles.bind(this); - public getMarketTrades = Products.getMarketTrades.bind(this); - - // =============== PUBLIC endpoints =============== - public getServerTime = Public.getServerTime.bind(this); - public getPublicProductBook = Public.getPublicProductBook.bind(this); - public listPublicProducts = Public.listPublicProducts.bind(this); - public getPublicProduct = Public.getPublicProduct.bind(this); - public getPublicProductCandles = Public.getPublicProductCandles.bind(this); - public getPublicMarketTrades = Public.getPublicMarketTrades.bind(this); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/orders.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/orders.ts deleted file mode 100644 index bf1d6b55d6725..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/orders.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - CancelOrdersRequest, - CancelOrdersResponse, - ClosePositionRequest, - ClosePositionResponse, - CreateOrderRequest, - CreateOrderResponse, - EditOrderPreviewRequest, - EditOrderPreviewResponse, - EditOrderRequest, - EditOrderResponse, - GetOrderRequest, - GetOrderResponse, - ListFillsRequest, - ListFillsResponse, - ListOrdersRequest, - ListOrdersResponse, - PreviewOrderRequest, - PreviewOrderResponse, -} from './types/orders-types'; -import { method } from './types/request-types'; - -// [POST] Create Order -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_postorder -export function createOrder( - this: RESTBase, - requestParams: CreateOrderRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/orders`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [POST] Cancel Orders -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_cancelorders -export function cancelOrders( - this: RESTBase, - requestParams: CancelOrdersRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/orders/batch_cancel`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [POST] Edit Order -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_editorder -export function editOrder( - this: RESTBase, - requestParams: EditOrderRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/orders/edit`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [POST] Edit Order Preview -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_previeweditorder -export function editOrderPreview( - this: RESTBase, - requestParams: EditOrderPreviewRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/orders/edit_preview`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [GET] List Orders -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_gethistoricalorders -export function listOrders( - this: RESTBase, - requestParams: ListOrdersRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/orders/historical/batch`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] List Fills -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getfills -export function listFills( - this: RESTBase, - requestParams: ListFillsRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/orders/historical/fills`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Order -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_gethistoricalorder -export function getOrder( - this: RESTBase, - { orderId }: GetOrderRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/orders/historical/${orderId}`, - isPublic: false, - }); -} - -// [POST] Preview Order -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_previeworder -export function previewOrder( - this: RESTBase, - requestParams: PreviewOrderRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/orders/preview`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [POST] Close Position -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_closeposition -export function closePosition( - this: RESTBase, - requestParams: ClosePositionRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/orders/close_position`, - queryParams: undefined, - bodyParams: requestParams, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/payments.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/payments.ts deleted file mode 100644 index acb4bcbe0a6b5..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/payments.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - GetPaymentMethodRequest, - GetPaymentMethodResponse, - ListPaymentMethodsResponse, -} from './types/payments-types'; -import { method } from './types/request-types'; - -// [GET] List Payment Methods -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getpaymentmethods -export function listPaymentMethods( - this: RESTBase -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/payment_methods`, - isPublic: false, - }); -} - -// [GET] Get Payment Method -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getpaymentmethod -export function getPaymentMethod( - this: RESTBase, - { paymentMethodId }: GetPaymentMethodRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/payment_methods/${paymentMethodId}`, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/perpetuals.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/perpetuals.ts deleted file mode 100644 index 50c9fb553f84e..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/perpetuals.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - AllocatePortfolioRequest, - AllocatePortfolioResponse, - GetPerpetualsPortfolioSummaryRequest, - GetPerpetualsPortfolioSummaryResponse, - GetPerpetualsPositionRequest, - GetPerpetualsPositionResponse, - GetPortfolioBalancesRequest, - GetPortfolioBalancesResponse, - ListPerpetualsPositionsRequest, - ListPerpetualsPositionsResponse, - OptInOutMultiAssetCollateralRequest, - OptInOutMultiAssetCollateralResponse, -} from './types/perpetuals-types'; -import { method } from './types/request-types'; - -// [POST] Allocate Portfolio -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_allocateportfolio -export function allocatePortfolio( - this: RESTBase, - requestParams: AllocatePortfolioRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/intx/allocate`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Perpetuals Portfolio Summary -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getintxportfoliosummary -export function getPerpetualsPortfolioSummary( - this: RESTBase, - { portfolioUuid }: GetPerpetualsPortfolioSummaryRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/intx/portfolio/${portfolioUuid}`, - isPublic: false, - }); -} - -// [GET] List Perpetuals Positions -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getintxpositions -export function listPerpetualsPositions( - this: RESTBase, - { portfolioUuid }: ListPerpetualsPositionsRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/intx/positions/${portfolioUuid}`, - isPublic: false, - }); -} - -// [GET] Get Perpetuals Position -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getintxposition -export function getPerpertualsPosition( - this: RESTBase, - { portfolioUuid, symbol }: GetPerpetualsPositionRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/intx/positions/${portfolioUuid}/${symbol}`, - isPublic: false, - }); -} - -// [GET] Get Portfolio Balances -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getintxbalances -export function getPortfolioBalances( - this: RESTBase, - { portfolioUuid }: GetPortfolioBalancesRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/intx/balances/${portfolioUuid}`, - isPublic: false, - }); -} - -// [POST] Opt In or Out of Multi Asset Collateral -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_intxmultiassetcollateral -export function optInOutMultiAssetCollateral( - this: RESTBase, - requestParams: OptInOutMultiAssetCollateralRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/intx/multi_asset_collateral`, - bodyParams: requestParams, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/portfolios.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/portfolios.ts deleted file mode 100644 index df71f69aaad7f..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/portfolios.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - CreatePortfolioRequest, - CreatePortfolioResponse, - DeletePortfolioRequest, - DeletePortfolioResponse, - EditPortfolioRequest, - EditPortfolioResponse, - GetPortfolioBreakdownRequest, - GetPortfolioBreakdownResponse, - ListPortfoliosRequest, - ListPortfoliosResponse, - MovePortfolioFundsRequest, - MovePortfolioFundsResponse, -} from './types/portfolios-types'; -import { method } from './types/request-types'; - -// [GET] List Portfolios -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getportfolios -export function listPortfolios( - this: RESTBase, - requestParams: ListPortfoliosRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/portfolios`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [POST] Create Portfolio -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_createportfolio -export function createPortfolio( - this: RESTBase, - requestParams: CreatePortfolioRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/portfolios`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [POST] Move Portfolio Funds -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_moveportfoliofunds -export function movePortfolioFunds( - this: RESTBase, - requestParams: MovePortfolioFundsRequest -): Promise { - return this.request({ - method: method.POST, - endpoint: `${API_PREFIX}/portfolios/move_funds`, - bodyParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Portfolio Breakdown -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getportfoliobreakdown -export function getPortfolioBreakdown( - this: RESTBase, - { portfolioUuid, ...requestParams }: GetPortfolioBreakdownRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/portfolios/${portfolioUuid}`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [DELETE] Delete Portfolio -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_deleteportfolio -export function deletePortfolio( - this: RESTBase, - { portfolioUuid }: DeletePortfolioRequest -): Promise { - return this.request({ - method: method.DELETE, - endpoint: `${API_PREFIX}/portfolios/${portfolioUuid}`, - isPublic: false, - }); -} - -// [PUT] Edit Portfolio -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_editportfolio -export function editPortfolio( - this: RESTBase, - { portfolioUuid, ...requestParams }: EditPortfolioRequest -): Promise { - return this.request({ - method: method.PUT, - endpoint: `${API_PREFIX}/portfolios/${portfolioUuid}`, - bodyParams: requestParams, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/products.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/products.ts deleted file mode 100644 index 1e65a79215374..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/products.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - GetBestBidAskRequest, - GetBestBidAskResponse, - GetMarketTradesRequest, - GetMarketTradesResponse, - GetProductBookRequest, - GetProductBookResponse, - GetProductCandlesRequest, - GetProductCandlesResponse, - GetProductRequest, - GetProductResponse, - ListProductsRequest, - ListProductsResponse, -} from './types/products-types'; -import { method } from './types/request-types'; - -// [GET] Get Best Bid Ask -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getbestbidask -export function getBestBidAsk( - this: RESTBase, - requestParams: GetBestBidAskRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/best_bid_ask`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Product Book -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getproductbook -export function getProductBook( - this: RESTBase, - requestParams: GetProductBookRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/product_book`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] List Products -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getproducts -export function listProducts( - this: RESTBase, - requestParams: ListProductsRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/products`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Product -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getproduct -export function getProduct( - this: RESTBase, - { productId, ...requestParams }: GetProductRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/products/${productId}`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Product Candles -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getcandles -export function getProductCandles( - this: RESTBase, - { productId, ...requestParams }: GetProductCandlesRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/products/${productId}/candles`, - queryParams: requestParams, - isPublic: false, - }); -} - -// [GET] Get Market Trades -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getmarkettrades -export function getMarketTrades( - this: RESTBase, - { productId, ...requestParams }: GetMarketTradesRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/products/${productId}/ticker`, - queryParams: requestParams, - isPublic: false, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/public.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/public.ts deleted file mode 100644 index a96b0ac584842..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/public.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { API_PREFIX } from '../constants'; -import type { RESTBase } from './rest-base'; -import type { - GetPublicMarketTradesRequest, - GetPublicMarketTradesResponse, - GetPublicProductBookRequest, - GetPublicProductBookResponse, - GetPublicProductCandlesRequest, - GetPublicProductCandlesResponse, - GetPublicProductRequest, - GetPublicProductResponse, - GetServerTimeResponse, - ListPublicProductsRequest, - ListPublicProductsResponse, -} from './types/public-types'; -import { method } from './types/request-types'; - -// [GET] Get Server Time -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getservertime -export function getServerTime(this: RESTBase): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/time`, - isPublic: true, - }); -} - -// [GET] Get Public Product Book -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getpublicproductbook -export function getPublicProductBook( - this: RESTBase, - requestParams: GetPublicProductBookRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/market/product_book`, - queryParams: requestParams, - isPublic: true, - }); -} - -// [GET] List Public Products -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getpublicproducts -export function listPublicProducts( - this: RESTBase, - requestParams: ListPublicProductsRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/market/products`, - queryParams: requestParams, - isPublic: true, - }); -} - -// [GET] Get Public Product -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getpublicproduct -export function getPublicProduct( - this: RESTBase, - { productId }: GetPublicProductRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/market/products/${productId}`, - isPublic: true, - }); -} - -// [GET] Get Public Product Candles -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getpubliccandles -export function getPublicProductCandles( - this: RESTBase, - { productId, ...requestParams }: GetPublicProductCandlesRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/market/products/${productId}/candles`, - queryParams: requestParams, - isPublic: true, - }); -} - -// [GET] Get Public Market Trades -// Official Documentation: https://docs.cdp.coinbase.com/advanced-trade/reference/retailbrokerageapi_getpublicmarkettrades -export function getPublicMarketTrades( - this: RESTBase, - { productId, ...requestParams }: GetPublicMarketTradesRequest -): Promise { - return this.request({ - method: method.GET, - endpoint: `${API_PREFIX}/products/${productId}/ticker`, - queryParams: requestParams, - isPublic: true, - }); -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/rest-base.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/rest-base.ts deleted file mode 100644 index a431c5394aa90..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/rest-base.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { generateToken } from '../jwt-generator'; -import fetch, { Headers, type RequestInit, type Response } from 'node-fetch'; -import { BASE_URL, USER_AGENT } from '../constants'; -import type { RequestOptions } from './types/request-types'; -import { handleException } from './errors'; - -export class RESTBase { - private apiKey: string | undefined; - private apiSecret: string | undefined; - - constructor(key?: string, secret?: string) { - if (!key || !secret) { - console.log( - 'Could not authenticate. Only public endpoints accessible.' - ); - } - this.apiKey = key; - this.apiSecret = secret; - } - - request(options: RequestOptions): Promise { - const { method, endpoint, isPublic } = options; - let { queryParams, bodyParams } = options; - - queryParams = queryParams ? this.filterParams(queryParams) : {}; - - if (bodyParams !== undefined) - bodyParams = bodyParams ? this.filterParams(bodyParams) : {}; - - return this.prepareRequest( - method, - endpoint, - queryParams, - bodyParams, - isPublic - ); - } - - prepareRequest( - httpMethod: string, - urlPath: string, - queryParams?: Record, - bodyParams?: Record, - isPublic?: boolean - ) { - const headers: Headers = this.setHeaders(httpMethod, urlPath, isPublic); - - const requestOptions: RequestInit = { - method: httpMethod, - headers: headers, - body: JSON.stringify(bodyParams), - }; - - const queryString = this.buildQueryString(queryParams); - const url = `https://${BASE_URL}${urlPath}${queryString}`; - - return this.sendRequest(headers, requestOptions, url); - } - - async sendRequest( - headers: Headers, - requestOptions: RequestInit, - url: string - ) { - const response: Response = await fetch(url, requestOptions); - const responseText = await response.text(); - handleException(response, responseText, response.statusText); - - return responseText; - } - - setHeaders(httpMethod: string, urlPath: string, isPublic?: boolean) { - const headers: Headers = new Headers(); - headers.append('Content-Type', 'application/json'); - headers.append('User-Agent', USER_AGENT); - if (this.apiKey !== undefined && this.apiSecret !== undefined) - headers.append( - 'Authorization', - `Bearer ${generateToken( - httpMethod, - urlPath, - this.apiKey, - this.apiSecret - )}` - ); - else if (isPublic == undefined || isPublic == false) - throw new Error( - 'Attempting to access authenticated endpoint with invalid API_KEY or API_SECRET.' - ); - - return headers; - } - - filterParams(data: Record) { - const filteredParams: Record = {}; - - for (const key in data) { - if (data[key] !== undefined) { - filteredParams[key] = data[key]; - } - } - - return filteredParams; - } - - buildQueryString(queryParams?: Record): string { - if (!queryParams || Object.keys(queryParams).length === 0) { - return ''; - } - - const queryString = Object.entries(queryParams) - .flatMap(([key, value]) => { - if (Array.isArray(value)) { - return value.map( - (item) => - `${encodeURIComponent(key)}=${encodeURIComponent(item)}` - ); - } else { - return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`; - } - }) - .join('&'); - - return `?${queryString}`; - } -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/accounts-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/accounts-types.ts deleted file mode 100644 index e51901e259848..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/accounts-types.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { Account } from './common-types'; - -// Get Account -export type GetAccountRequest = { - // Path Params - accountUuid: string; -}; - -export type GetAccountResponse = { - account?: Account; -}; - -// List Accounts -export type ListAccountsRequest = { - // Query Params - limit?: number; - cursor?: string; - retailPortfolioId?: string; -}; - -export type ListAccountsResponse = { - accounts?: Account[]; - has_next: boolean; - cursor?: string; - size?: number; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/common-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/common-types.ts deleted file mode 100644 index f99da0858e703..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/common-types.ts +++ /dev/null @@ -1,447 +0,0 @@ -// ----- ENUMS ----- -export enum ProductType { - UNKNOWN = 'UNKNOWN_PRODUCT_TYPE', - SPOT = 'SPOT', - FUTURE = 'FUTURE', -} - -export enum ContractExpiryType { - UNKNOWN = 'UNKNOWN_CONTRACT_EXPIRY_TYPE', - EXPIRING = 'EXPIRING', - PERPETUAL = 'PERPETUAL', -} - -export enum ExpiringContractStatus { - UNKNOWN = 'UNKNOWN_EXPIRING_CONTRACT_STATUS', - UNEXPIRED = 'STATUS_UNEXPIRED', - EXPIRED = 'STATUS_EXPIRED', - ALL = 'STATUS_ALL', -} - -export enum PortfolioType { - UNDEFINED = 'UNDEFINED', - DEFAULT = 'DEFAULT', - CONSUMER = 'CONSUMER', - INTX = 'INTX', -} - -export enum MarginType { - CROSS = 'CROSS', - ISOLATED = 'ISOLATED', -} - -export enum OrderPlacementSource { - UNKNOWN = 'UNKNOWN_PLACEMENT_SOURCE', - RETAIL_SIMPLE = 'RETAIL_SIMPLE', - RETAIL_ADVANCED = 'RETAIL_ADVANCED', -} - -export enum SortBy { - UNKNOWN = 'UNKNOWN_SORT_BY', - LIMIT_PRICE = 'LIMIT_PRICE', - LAST_FILL_TIME = 'LAST_FILL_TIME', -} - -export enum OrderSide { - BUY = 'BUY', - SELL = 'SELL', -} - -export enum StopDirection { - UP = 'STOP_DIRECTION_STOP_UP', - DOWN = 'STOP_DIRECTION_STOP_DOWN', -} - -export enum Granularity { - UNKNOWN = 'UNKNOWN_GRANULARITY', - ONE_MINUTE = 'ONE_MINUTE', - FIVE_MINUTE = 'FIVE_MINUTE', - FIFTEEN_MINUTE = 'FIFTEEN_MINUTE', - THIRTY_MINUTE = 'THIRTY_MINUTE', - ONE_HOUR = 'ONE_HOUR', - TWO_HOUR = 'TWO_HOUR', - SIX_HOUR = 'SIX_HOUR', - ONE_DAY = 'ONE_DAY', -} - -export enum ProductVenue { - UNKNOWN = 'UNKNOWN_VENUE_TYPE', - CBE = 'CBE', - FCM = 'FCM', - INTX = 'INTX', -} - -export enum IntradayMarginSetting { - UNSPECIFIED = 'INTRADAY_MARGIN_SETTING_UNSPECIFIED', - STANDARD = 'INTRADAY_MARGIN_SETTING_STANDARD', - INTRADAY = 'INTRADAY_MARGIN_SETTING_INTRADAY', -} - -// ----- TYPES ----- -export type Account = { - uuid?: string; - name?: string; - currency?: string; - available_balance?: Record; - default?: boolean; - active?: boolean; - created_at?: string; - updated_at?: string; - deleted_at?: string; - type?: Record; - ready?: boolean; - hold?: Record; - retail_portfolio_id?: string; -}; - -export type TradeIncentiveMetadata = { - userIncentiveId?: string; - codeVal?: string; -}; - -export type OrderConfiguration = - | { market_market_ioc: MarketMarketIoc } - | { sor_limit_ioc: SorLimitIoc } - | { limit_limit_gtc: LimitLimitGtc } - | { limit_limit_gtd: LimitLimitGtd } - | { limit_limit_fok: LimitLimitFok } - | { stop_limit_stop_limit_gtc: StopLimitStopLimitGtc } - | { stop_limit_stop_limit_gtd: StopLimitStopLimitGtd } - | { trigger_bracket_gtc: TriggerBracketGtc } - | { trigger_bracket_gtd: TriggerBracketGtd }; - -export type MarketMarketIoc = { quote_size: string } | { base_size: string }; - -export type SorLimitIoc = { - baseSize: string; - limitPrice: string; -}; - -export type LimitLimitGtc = { - baseSize: string; - limitPrice: string; - postOnly: boolean; -}; - -export type LimitLimitGtd = { - baseSize: string; - limitPrice: string; - endTime: string; - postOnly: boolean; -}; - -export type LimitLimitFok = { - baseSize: string; - limitPrice: string; -}; - -export type StopLimitStopLimitGtc = { - baseSize: string; - limitPrice: string; - stopPrice: string; - stopDirection: StopDirection; -}; - -export type StopLimitStopLimitGtd = { - baseSize: string; - limitPrice: string; - stopPrice: string; - endTime: string; - stopDirection: StopDirection; -}; - -export type TriggerBracketGtc = { - baseSize: string; - limitPrice: string; - stopTriggerPrice: string; -}; - -export type TriggerBracketGtd = { - baseSize: string; - limitPrice: string; - stopTriggerPrice: string; - endTime: string; -}; - -export type RatConvertTrade = { - id?: string; - status?: Record; - user_entered_amount?: Record; - amount?: Record; - subtotal?: Record; - total?: Record; - fees?: Record; - total_fee?: Record; - source?: Record; - target?: Record; - unit_price?: Record; - user_warnings?: Record; - user_reference?: string; - source_curency?: string; - cancellation_reason?: Record; - source_id?: string; - target_id?: string; - subscription_info?: Record; - exchange_rate?: Record; - tax_details?: Record; - trade_incentive_info?: Record; - total_fee_without_tax?: Record; - fiat_denoted_total?: Record; -}; - -export type FCMBalanceSummary = { - futures_buying_power?: Record; - total_usd_balance?: Record; - cbi_usd_balance?: Record; - cfm_usd_balance?: Record; - total_open_orders_hold_amount?: Record; - unrealized_pnl?: Record; - daily_realized_pnl?: Record; - initial_margin?: Record; - available_margin?: Record; - liquidation_threshold?: Record; - liquidation_buffer_amount?: Record; - liquidation_buffer_percentage?: string; - intraday_margin_window_measure?: Record; - overnight_margin_window_measure?: Record; -}; - -export type FCMPosition = { - product_id?: string; - expiration_time?: Record; - side?: Record; - number_of_contracts?: string; - current_price?: string; - avg_entry_price?: string; - unrealized_pnl?: string; - daily_realized_pnl?: string; -}; - -export type FCMSweep = { - id: string; - requested_amount: Record; - should_sweep_all: boolean; - status: Record; - schedule_time: Record; -}; - -export type CancelOrderObject = { - success: boolean; - failure_reason: Record; - order_id: string; -}; - -export type Order = { - order_id: string; - product_id: string; - user_id: string; - order_configuration: OrderConfiguration; - side: OrderSide; - client_order_id: string; - status: Record; - time_in_force?: Record; - created_time: Record; - completion_percentage: string; - filled_size?: string; - average_filled_price: string; - fee?: string; - number_of_fills: string; - filled_value?: string; - pending_cancel: boolean; - size_in_quote: boolean; - total_fees: string; - size_inclusive_of_fees: boolean; - total_value_after_fees: string; - trigger_status?: Record; - order_type?: Record; - reject_reason?: Record; - settled?: boolean; - product_type?: ProductType; - reject_message?: string; - cancel_message?: string; - order_placement_source?: OrderPlacementSource; - outstanding_hold_amount?: string; - is_liquidation?: boolean; - last_fill_time?: Record; - edit_history?: Record[]; - leverage?: string; - margin_type?: MarginType; - retail_portfolio_id?: string; - originating_order_id?: string; - attached_order_id?: string; -}; - -export type PaymentMethod = { - id?: string; - type?: string; - name?: string; - currency?: string; - verified?: boolean; - allow_buy?: boolean; - allow_sell?: boolean; - allow_deposit?: boolean; - allow_withdraw?: boolean; - created_at?: string; - updated_at?: string; -}; - -export type PerpetualPortfolio = { - portfolio_uuid?: string; - collateral?: string; - position_notional?: string; - open_position_notional?: string; - pending_fees?: string; - borrow?: string; - accrued_interest?: string; - rolling_debt?: string; - portfolio_initial_margin?: string; - portfolio_im_notional?: Record; - liquidation_percentage?: string; - liquidation_buffer?: string; - margin_type?: Record; - margin_flags?: Record; - liquidation_status?: Record; - unrealized_pnl?: Record; - total_balance?: Record; -}; - -export type PortfolioSummary = { - unrealized_pnl?: Record; - buying_power?: Record; - total_balance?: Record; - max_withdrawal_amount?: Record; -}; - -export type PositionSummary = { - aggregated_pnl?: Record; -}; - -export type Position = { - product_id?: string; - product_uuid?: string; - portfolio_uuid?: string; - symbol?: string; - vwap?: Record; - entry_vwap?: Record; - position_side?: Record; - margin_type?: Record; - net_size?: string; - buy_order_size?: string; - sell_order_size?: string; - im_contribution?: string; - unrealized_pnl?: Record; - mark_price?: Record; - liquidation_price?: Record; - leverage?: string; - im_notional?: Record; - mm_notional?: Record; - position_notional?: Record; - aggregated_pnl?: Record; -}; - -export type Balance = { - asset: Record; - quantity: string; - hold: string; - transfer_hold: string; - collateral_value: string; - collateral_weight: string; - max_withdraw_amount: string; - loan: string; - loan_collateral_requirement_usd: string; - pledged_quantity: string; -}; - -export type Portfolio = { - name?: string; - uuid?: string; - type?: string; -}; - -export type PortfolioBreakdown = { - portfolio?: Portfolio; - portfolio_balances?: Record; - spot_positions?: Record[]; - perp_positions?: Record[]; - futures_positions?: Record[]; -}; - -export type PriceBook = { - product_id: string; - bids: Record[]; - asks: Record[]; - time?: Record; -}; - -export type Products = { - products?: Product[]; - num_products?: number; -}; - -export type Product = { - product_id: string; - price: string; - price_percentage_change_24h: string; - volume_24h: string; - volume_percentage_change_24h: string; - base_increment: string; - quote_increment: string; - quote_min_size: string; - quote_max_size: string; - base_min_size: string; - base_max_size: string; - base_name: string; - quote_name: string; - watched: boolean; - is_disabled: boolean; - new: boolean; - status: string; - cancel_only: boolean; - limit_only: boolean; - post_only: boolean; - trading_disabled: boolean; - auction_mode: boolean; - product_type?: ProductType; - quote_currency_id?: string; - base_currency_id?: string; - fcm_trading_session_details?: Record; - mid_market_price?: string; - alias?: string; - alias_to?: string[]; - base_display_symbol: string; - quote_display_symbol?: string; - view_only?: boolean; - price_increment?: string; - display_name?: string; - product_venue?: ProductVenue; - approximate_quote_24h_volume?: string; - future_product_details?: Record; -}; - -export type Candles = { - candles?: Candle[]; -}; - -export type Candle = { - start?: string; - low?: string; - high?: string; - open?: string; - close?: string; - volume?: string; -}; - -export type HistoricalMarketTrade = { - trade_id?: string; - product_id?: string; - price?: string; - size?: string; - time?: string; - side?: OrderSide; -}; - -export type PortfolioBalance = { - portfolio_uuid?: string; - balances?: Balance[]; - is_margin_limit_reached?: boolean; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/converts-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/converts-types.ts deleted file mode 100644 index edda3d9f7a00a..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/converts-types.ts +++ /dev/null @@ -1,42 +0,0 @@ -// Create Convert Quote -import type { RatConvertTrade, TradeIncentiveMetadata } from './common-types'; - -export type CreateConvertQuoteRequest = { - // Body Params - fromAccount: string; - toAccount: string; - amount: string; - tradeIncentiveMetadata?: TradeIncentiveMetadata; -}; - -export type CreateConvertQuoteResponse = { - trade?: RatConvertTrade; -}; - -// Get Convert Trade -export type GetConvertTradeRequest = { - // Path Params - tradeId: string; - - //Query Params - fromAccount: string; - toAccount: string; -}; - -export type GetConvertTradeResponse = { - trade?: RatConvertTrade; -}; - -// Commit Convert Trade -export type CommitConvertTradeRequest = { - // Path Params - tradeId: string; - - // Body Params - fromAccount: string; - toAccount: string; -}; - -export type CommitConvertTradeResponse = { - trade?: RatConvertTrade; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/dataAPI-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/dataAPI-types.ts deleted file mode 100644 index 6e1eaecfb4fb0..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/dataAPI-types.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { PortfolioType } from './common-types'; - -// Get API Key Permissions -export type GetAPIKeyPermissionsResponse = { - can_view?: boolean; - can_trade?: boolean; - can_transfer?: boolean; - portfolio_uuid?: string; - portfolio_type?: PortfolioType; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/fees-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/fees-types.ts deleted file mode 100644 index a816954666747..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/fees-types.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { ContractExpiryType, ProductType, ProductVenue } from './common-types'; - -// Get Transactions Summary -export type GetTransactionsSummaryRequest = { - // Query Params - productType?: ProductType; - contractExpiryType?: ContractExpiryType; - productVenue?: ProductVenue; -}; - -export type GetTransactionsSummaryResponse = { - total_volume: number; - total_fees: number; - fee_tier: Record; - margin_rate?: Record; - goods_and_services_tax?: Record; - advanced_trade_only_volumes?: number; - advanced_trade_only_fees?: number; - coinbase_pro_volume?: number; // deprecated - coinbase_pro_fees?: number; // deprecated - total_balance?: string; - has_promo_fee?: boolean; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/futures-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/futures-types.ts deleted file mode 100644 index 65412b8101948..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/futures-types.ts +++ /dev/null @@ -1,71 +0,0 @@ -import type { - FCMBalanceSummary, - FCMPosition, - FCMSweep, - IntradayMarginSetting, -} from './common-types'; - -// Get Futures Balance Summary -export type GetFuturesBalanceSummaryResponse = { - balance_summary?: FCMBalanceSummary; -}; - -// Get Intraday Margin Setting -export type GetIntradayMarginSettingResponse = { - setting?: IntradayMarginSetting; -}; - -// Set Intraday Margin Setting -export type SetIntradayMarginSettingRequest = { - // Body Params - setting?: IntradayMarginSetting; -}; - -export type SetIntradayMarginSettingResponse = Record; - -// Get Current Margin Window -export type GetCurrentMarginWindowRequest = { - // Query Params - marginProfileType?: string; -}; - -export type GetCurrentMarginWindowResponse = { - margin_window?: Record; - is_intraday_margin_killswitch_enabled?: boolean; - is_intraday_margin_enrollment_killswitch_enabled?: boolean; -}; - -// List Futures Positions -export type ListFuturesPositionsResponse = { - positions?: FCMPosition[]; -}; - -// Get Futures Position -export type GetFuturesPositionRequest = { - // Path Params - productId: string; -}; - -export type GetFuturesPositionResponse = { - position?: FCMPosition; -}; - -// Schedule Futures Sweep -export type ScheduleFuturesSweepRequest = { - // Body Params - usdAmount?: string; -}; - -export type ScheduleFuturesSweepResponse = { - success?: boolean; -}; - -// List Futures Sweeps -export type ListFuturesSweepsResponse = { - sweeps: FCMSweep[]; -}; - -// Cancel Pending Futures Sweep = { -export type CancelPendingFuturesSweep = { - success?: boolean; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/orders-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/orders-types.ts deleted file mode 100644 index 501b81aedc291..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/orders-types.ts +++ /dev/null @@ -1,185 +0,0 @@ -import type { - CancelOrderObject, - ContractExpiryType, - MarginType, - Order, - OrderConfiguration, - OrderPlacementSource, - OrderSide, - ProductType, - SortBy, -} from './common-types'; - -// Create Order -export type CreateOrderRequest = { - // Body Params - clientOrderId: string; - productId: string; - side: OrderSide; - orderConfiguration: OrderConfiguration; - selfTradePreventionId?: string; - leverage?: string; - marginType?: MarginType; - retailPortfolioId?: string; -}; - -export type CreateOrderResponse = { - success: boolean; - failure_reason?: Record; // deprecated - order_id?: string; // deprecated - response?: - | { success_response: Record } - | { error_response: Record }; - order_configuration?: OrderConfiguration; -}; - -// Cancel Orders -export type CancelOrdersRequest = { - // Body Params - orderIds: string[]; -}; - -export type CancelOrdersResponse = { - results?: CancelOrderObject[]; -}; - -// Edit Order -export type EditOrderRequest = { - // Body Params - orderId: string; - price?: string; - size?: string; -}; - -export type EditOrderResponse = { - success: boolean; - response?: - | { success_response: Record } // deprecated - | { error_response: Record }; // deprecated - errors?: Record[]; -}; - -// Edit Order Preview -export type EditOrderPreviewRequest = { - // Body Params - orderId: string; - price?: string; - size?: string; -}; - -export type EditOrderPreviewResponse = { - errors: Record[]; - slippage?: string; - order_total?: string; - commission_total?: string; - quote_size?: string; - base_size?: string; - best_bid?: string; - average_filled_price?: string; -}; - -// List Orders -export type ListOrdersRequest = { - // Query Params - orderIds?: string[]; - productIds?: string[]; - orderStatus?: string[]; - limit?: number; - startDate?: string; - endDate?: string; - orderType?: string; - orderSide?: OrderSide; - cursor?: string; - productType?: ProductType; - orderPlacementSource?: OrderPlacementSource; - contractExpiryType?: ContractExpiryType; - assetFilters?: string[]; - retailPortfolioId?: string; - timeInForces?: string; - sortBy?: SortBy; -}; - -export type ListOrdersResponse = { - orders: Order[]; - sequence?: number; // deprecated - has_next: boolean; - cursor?: string; -}; - -// List Fills -export type ListFillsRequest = { - // Query Params - orderIds?: string[]; - tradeIds?: string[]; - productIds?: string[]; - startSequenceTimestamp?: string; - endSequenceTimestamp?: string; - retailPortfolioId?: string; - limit?: number; - cursor?: string; - sortBy?: SortBy; -}; - -export type ListFillsResponse = { - fills?: Record[]; - cursor?: string; -}; - -// Get Order -export type GetOrderRequest = { - // Path Params - orderId: string; -}; - -export type GetOrderResponse = { - order?: Order; -}; - -// Preview Order -export type PreviewOrderRequest = { - // Body Params - productId: string; - side: OrderSide; - orderConfiguration: OrderConfiguration; - leverage?: string; - marginType?: MarginType; - retailPortfolioId?: string; -}; - -export type PreviewOrderResponse = { - order_total: string; - commission_total: string; - errs: Record[]; - warning: Record[]; - quote_size: string; - base_size: string; - best_bid: string; - best_ask: string; - is_max: boolean; - order_margin_total?: string; - leverage?: string; - long_leverage?: string; - short_leverage?: string; - slippage?: string; - preview_id?: string; - current_liquidation_buffer?: string; - projected_liquidation_buffer?: string; - max_leverage?: string; - pnl_configuration?: Record; -}; - -// Close Position -export type ClosePositionRequest = { - // Body Params - clientOrderId: string; - productId: string; - size?: string; -}; - -export type ClosePositionResponse = { - success: boolean; - response?: - | { success_response: Record } - | { error_response: Record }; - order_configuration?: OrderConfiguration; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/payments-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/payments-types.ts deleted file mode 100644 index e85cd9f63a45d..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/payments-types.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { PaymentMethod } from './common-types'; - -// List Payment Methods -export type ListPaymentMethodsResponse = { - paymentMethods?: PaymentMethod; -}; - -// Get Payment Method -export type GetPaymentMethodRequest = { - // Path Params - paymentMethodId: string; -}; - -export type GetPaymentMethodResponse = { - paymentMethod?: PaymentMethod; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/perpetuals-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/perpetuals-types.ts deleted file mode 100644 index 045b494ce9950..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/perpetuals-types.ts +++ /dev/null @@ -1,72 +0,0 @@ -import type { - PerpetualPortfolio, - PortfolioBalance, - PortfolioSummary, - Position, - PositionSummary, -} from './common-types'; - -// Allocate Portfolio -export type AllocatePortfolioRequest = { - // Body Params - portfolioUuid: string; - symbol: string; - amount: string; - currency: string; -}; - -export type AllocatePortfolioResponse = Record; - -// Get Perpetuals Portfolio Summary -export type GetPerpetualsPortfolioSummaryRequest = { - // Path Params - portfolioUuid: string; -}; - -export type GetPerpetualsPortfolioSummaryResponse = { - portfolios?: PerpetualPortfolio[]; - summary?: PortfolioSummary; -}; - -// List Perpetuals Positions -export type ListPerpetualsPositionsRequest = { - // Path Params - portfolioUuid: string; -}; - -export type ListPerpetualsPositionsResponse = { - positions?: Position[]; - summary?: PositionSummary; -}; - -// Get Perpetuals Position -export type GetPerpetualsPositionRequest = { - // Path Params - portfolioUuid: string; - symbol: string; -}; - -export type GetPerpetualsPositionResponse = { - position?: Position; -}; - -// Get Portfolio Balances -export type GetPortfolioBalancesRequest = { - // Path Params - portfolioUuid: string; -}; - -export type GetPortfolioBalancesResponse = { - portfolio_balancces?: PortfolioBalance[]; -}; - -// Opt In or Out of Multi Asset Collateral -export type OptInOutMultiAssetCollateralRequest = { - // Body Params - portfolioUuid?: string; - multiAssetCollateralEnabled?: boolean; -}; - -export type OptInOutMultiAssetCollateralResponse = { - cross_collateral_enabled?: boolean; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/portfolios-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/portfolios-types.ts deleted file mode 100644 index 38a997f998c19..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/portfolios-types.ts +++ /dev/null @@ -1,68 +0,0 @@ -import type { Portfolio, PortfolioBreakdown, PortfolioType } from './common-types'; - -// List Portfolios -export type ListPortfoliosRequest = { - // Query Params - portfolioType?: PortfolioType; -}; - -export type ListPortfoliosResponse = { - portfolios?: Portfolio[]; -}; - -// Create Portfolio -export type CreatePortfolioRequest = { - // Body Params - name: string; -}; - -export type CreatePortfolioResponse = { - portfolio?: Portfolio; -}; - -// Move Portfolio Funds -export type MovePortfolioFundsRequest = { - // Body Params - funds: Record; - sourcePortfolioUuid: string; - targetPortfolioUuid: string; -}; - -export type MovePortfolioFundsResponse = { - source_portfolio_uuid?: string; - target_portfolio_uuid?: string; -}; - -// Get Portfolio Breakdown -export type GetPortfolioBreakdownRequest = { - // Path Params - portfolioUuid: string; - - // Query Params - currency?: string; -}; - -export type GetPortfolioBreakdownResponse = { - breakdown?: PortfolioBreakdown; -}; - -// Delete Portfolio -export type DeletePortfolioRequest = { - // Path Params - portfolioUuid: string; -}; - -export type DeletePortfolioResponse = Record; - -// Edit Portfolio -export type EditPortfolioRequest = { - // Path Params - portfolioUuid: string; - - // Body Params - name: string; -}; - -export type EditPortfolioResponse = { - portfolio?: Portfolio; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/products-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/products-types.ts deleted file mode 100644 index 5123bf2e263d0..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/products-types.ts +++ /dev/null @@ -1,96 +0,0 @@ -import type { - Candles, - ContractExpiryType, - ExpiringContractStatus, - Granularity, - HistoricalMarketTrade, - PriceBook, - Product, - Products, - ProductType, -} from './common-types'; - -// Get Best Bid Ask -export type GetBestBidAskRequest = { - // Query Params - productIds?: string[]; -}; - -export type GetBestBidAskResponse = { - pricebooks: PriceBook[]; -}; - -// Get Product Book -export type GetProductBookRequest = { - // Query Params - productId: string; - limit?: number; - aggregationPriceIncrement?: number; -}; - -export type GetProductBookResponse = { - pricebook: PriceBook; -}; - -// List Products -export type ListProductsRequest = { - // Query Params - limit?: number; - offset?: number; - productType?: ProductType; - productIds?: string[]; - contractExpiryType?: ContractExpiryType; - expiringContractStatus?: ExpiringContractStatus; - getTradabilityStatus?: boolean; - getAllProducts?: boolean; -}; - -export type ListProductsResponse = { - body?: Products; -}; - -// Get Product -export type GetProductRequest = { - // Path Params - productId: string; - - // Query Params - getTradabilityStatus?: boolean; -}; - -export type GetProductResponse = { - body?: Product; -}; - -// Get Product Candles -export type GetProductCandlesRequest = { - // Path Params - productId: string; - - // Query Params - start: string; - end: string; - granularity: Granularity; - limit?: number; -}; - -export type GetProductCandlesResponse = { - body?: Candles; -}; - -// Get Market Trades -export type GetMarketTradesRequest = { - // Path Params - productId: string; - - // Query Params - limit: number; - start?: string; - end?: string; -}; - -export type GetMarketTradesResponse = { - trades?: HistoricalMarketTrade[]; - best_bid?: string; - best_ask?: string; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/public-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/public-types.ts deleted file mode 100644 index 0593a8a5e2ecb..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/public-types.ts +++ /dev/null @@ -1,88 +0,0 @@ -import type { - Candles, - ContractExpiryType, - ExpiringContractStatus, - HistoricalMarketTrade, - PriceBook, - Product, - Products, - ProductType, -} from './common-types'; - -// Get Server Time -export type GetServerTimeResponse = { - iso?: string; - epochSeconds?: number; - epochMillis?: number; -}; - -// Get Public Product Book -export type GetPublicProductBookRequest = { - // Query Params - productId: string; - limit?: number; - aggregationPriceIncrement?: number; -}; - -export type GetPublicProductBookResponse = { - pricebook: PriceBook; -}; - -// List Public Products -export type ListPublicProductsRequest = { - // Query Params - limit?: number; - offset?: number; - productType?: ProductType; - productIds?: string[]; - contractExpiryType?: ContractExpiryType; - expiringContractStatus?: ExpiringContractStatus; - getAllProducts?: boolean; -}; - -export type ListPublicProductsResponse = { - body?: Products; -}; - -// Get Public Product -export type GetPublicProductRequest = { - // Path Params - productId: string; -}; - -export type GetPublicProductResponse = { - body?: Product; -}; - -//Get Public Product Candles -export type GetPublicProductCandlesRequest = { - // Path Params - productId: string; - - // Query Params - start: string; - end: string; - granularity: string; - limit?: number; -}; - -export type GetPublicProductCandlesResponse = { - body?: Candles; -}; - -// Get Public Market Trades -export type GetPublicMarketTradesRequest = { - // Path Params - productId: string; - - // Query Params - limit: number; - start?: string; - end?: string; -}; - -export type GetPublicMarketTradesResponse = { - trades?: HistoricalMarketTrade[]; - best_bid?: string; - best_ask?: string; -}; diff --git a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/request-types.ts b/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/request-types.ts deleted file mode 100644 index 9b0dddbe08fbb..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/src/rest/types/request-types.ts +++ /dev/null @@ -1,14 +0,0 @@ -export enum method { - GET = 'GET', - POST = 'POST', - PUT = 'PUT', - DELETE = 'DELETE', -} - -export interface RequestOptions { - method: method; - endpoint: string; - queryParams?: Record; - bodyParams?: Record; - isPublic: boolean; -} diff --git a/packages/plugin-coinbase/advanced-sdk-ts/tsconfig.json b/packages/plugin-coinbase/advanced-sdk-ts/tsconfig.json deleted file mode 100644 index c769329338e8c..0000000000000 --- a/packages/plugin-coinbase/advanced-sdk-ts/tsconfig.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - /* Language and Environment */ - "target": "es6" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - /* Modules */ - "module": "commonjs" /* Specify what module code is generated. */, - "rootDir": "./src" /* Specify the root folder within your source files. */, - // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ - // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ - // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ - // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - "outDir": "./dist" /* Specify an output folder for all emitted files. */, - // "removeComments": true, /* Disable emitting comments. */ - // "noEmit": true, /* Disable emitting files from a compilation. */ - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ - // "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, - /* Type Checking */ - "strict": true /* Enable all strict type-checking options. */, - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Skip type checking all .d.ts files. */ - }, - "include": ["src/**/*.ts"], // Include all .ts files in the src directory and subdirectories - "exclude": ["node_modules"] -} diff --git a/packages/plugin-coinbase/package.json b/packages/plugin-coinbase/package.json deleted file mode 100644 index b89ee23c61c49..0000000000000 --- a/packages/plugin-coinbase/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/plugin-coinbase", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@types/jsonwebtoken": "^9.0.7", - "coinbase-advanced-sdk": "file:../../packages/plugin-coinbase/advanced-sdk-ts", - "coinbase-api": "1.0.5", - "jsonwebtoken": "^9.0.2", - "node-fetch": "^2.6.1" - }, - "devDependencies": { - "@types/node": "^20.0.0", - "tsup": "8.3.5", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest" - } -} diff --git a/packages/plugin-coinbase/src/constants.ts b/packages/plugin-coinbase/src/constants.ts deleted file mode 100644 index d9c09e529f3f7..0000000000000 --- a/packages/plugin-coinbase/src/constants.ts +++ /dev/null @@ -1,224 +0,0 @@ -export const ABI = [ - { - inputs: [], - name: "name", - outputs: [ - { - name: "", - type: "string", - internalType: "string", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - name: "spender", - type: "address", - internalType: "address", - }, - { - name: "amount", - type: "uint256", - internalType: "uint256", - }, - ], - name: "approve", - outputs: [ - { - name: "", - type: "bool", - internalType: "bool", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "totalSupply", - outputs: [ - { - name: "", - type: "uint256", - internalType: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - name: "from", - type: "address", - internalType: "address", - }, - { - name: "to", - type: "address", - internalType: "address", - }, - { - name: "amount", - type: "uint256", - internalType: "uint256", - }, - ], - name: "transferFrom", - outputs: [ - { - name: "", - type: "bool", - internalType: "bool", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [], - name: "decimals", - outputs: [ - { - name: "", - type: "uint8", - internalType: "uint8", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - name: "account", - type: "address", - internalType: "address", - }, - ], - name: "balanceOf", - outputs: [ - { - name: "", - type: "uint256", - internalType: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "symbol", - outputs: [ - { - name: "", - type: "string", - internalType: "string", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - name: "to", - type: "address", - internalType: "address", - }, - { - name: "amount", - type: "uint256", - internalType: "uint256", - }, - ], - name: "transfer", - outputs: [ - { - name: "", - type: "bool", - internalType: "bool", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - name: "owner", - type: "address", - internalType: "address", - }, - { - name: "spender", - type: "address", - internalType: "address", - }, - ], - name: "allowance", - outputs: [ - { - name: "", - type: "uint256", - internalType: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - indexed: true, - name: "owner", - type: "address", - internalType: "address", - }, - { - indexed: true, - name: "spender", - type: "address", - internalType: "address", - }, - { - indexed: false, - name: "value", - type: "uint256", - internalType: "uint256", - }, - ], - name: "Approval", - type: "event", - anonymous: false, - }, - { - inputs: [ - { - indexed: true, - name: "from", - type: "address", - internalType: "address", - }, - { - indexed: true, - name: "to", - type: "address", - internalType: "address", - }, - { - indexed: false, - name: "value", - type: "uint256", - internalType: "uint256", - }, - ], - name: "Transfer", - type: "event", - anonymous: false, - }, -]; diff --git a/packages/plugin-coinbase/src/index.ts b/packages/plugin-coinbase/src/index.ts deleted file mode 100644 index 69d4c8382640f..0000000000000 --- a/packages/plugin-coinbase/src/index.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { coinbaseMassPaymentsPlugin } from "./plugins/massPayments"; -import { coinbaseCommercePlugin } from "./plugins/commerce"; -import { tradePlugin } from "./plugins/trade"; -import { tokenContractPlugin } from "./plugins/tokenContract"; -import { webhookPlugin } from "./plugins/webhooks"; -import { advancedTradePlugin } from "./plugins/advancedTrade"; - -export const plugins = { - coinbaseMassPaymentsPlugin, - coinbaseCommercePlugin, - tradePlugin, - tokenContractPlugin, - webhookPlugin, - advancedTradePlugin, -}; - -export * from "./plugins/massPayments"; -export * from "./plugins/commerce"; -export * from "./plugins/trade"; -export * from "./plugins/tokenContract"; -export * from "./plugins/webhooks"; -export * from "./plugins/advancedTrade"; diff --git a/packages/plugin-coinbase/src/plugins/advancedTrade.ts b/packages/plugin-coinbase/src/plugins/advancedTrade.ts deleted file mode 100644 index b2f0bf8e8e5de..0000000000000 --- a/packages/plugin-coinbase/src/plugins/advancedTrade.ts +++ /dev/null @@ -1,445 +0,0 @@ -import { RESTClient } from "../../advanced-sdk-ts/src/rest"; -import { - type Action, - type Plugin, - elizaLogger, - type IAgentRuntime, - type Memory, - type HandlerCallback, - type State, - composeContext, - generateObject, - ModelClass, - type Provider, -} from "@elizaos/core"; -import { advancedTradeTemplate } from "../templates"; -import { isAdvancedTradeContent, AdvancedTradeSchema } from "../types"; -import { readFile } from "fs/promises"; -import { parse } from "csv-parse/sync"; -import path from "path"; -import { fileURLToPath } from "url"; -import fs from "fs"; -import { createArrayCsvWriter } from "csv-writer"; -import { - OrderSide, - type OrderConfiguration, -} from "../../advanced-sdk-ts/src/rest/types/common-types"; -import type { CreateOrderResponse } from "../../advanced-sdk-ts/src/rest/types/orders-types"; - -// File path setup remains the same -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const baseDir = path.resolve(__dirname, "../../plugin-coinbase/src/plugins"); -const tradeCsvFilePath = path.join(baseDir, "advanced_trades.csv"); - -const tradeProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.debug("Starting tradeProvider function"); - try { - const client = new RESTClient( - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY - ); - - // Get accounts and products information - let accounts, products; - try { - accounts = await client.listAccounts({}); - } catch (error) { - elizaLogger.error("Error fetching accounts:", error); - return []; - } - - try { - products = await client.listProducts({}); - } catch (error) { - elizaLogger.error("Error fetching products:", error); - return []; - } - - // Read CSV file logic remains the same - if (!fs.existsSync(tradeCsvFilePath)) { - const csvWriter = createArrayCsvWriter({ - path: tradeCsvFilePath, - header: [ - "Order ID", - "Success", - "Order Configuration", - "Response", - ], - }); - await csvWriter.writeRecords([]); - } - - let csvData, records; - try { - csvData = await readFile(tradeCsvFilePath, "utf-8"); - } catch (error) { - elizaLogger.error("Error reading CSV file:", error); - return []; - } - - try { - records = parse(csvData, { - columns: true, - skip_empty_lines: true, - }); - } catch (error) { - elizaLogger.error("Error parsing CSV data:", error); - return []; - } - - return { - accounts: accounts.accounts, - products: products.products, - trades: records, - }; - } catch (error) { - elizaLogger.error("Error in tradeProvider:", error); - return []; - } - }, -}; - -export async function appendTradeToCsv(tradeResult: any) { - elizaLogger.debug("Starting appendTradeToCsv function"); - try { - const csvWriter = createArrayCsvWriter({ - path: tradeCsvFilePath, - header: ["Order ID", "Success", "Order Configuration", "Response"], - append: true, - }); - elizaLogger.info("Trade result:", tradeResult); - - // Format trade data based on success/failure - const formattedTrade = [ - tradeResult.success_response?.order_id || - tradeResult.failure_response?.order_id || - "", - tradeResult.success, - // JSON.stringify(tradeResult.order_configuration || {}), - // JSON.stringify(tradeResult.success_response || tradeResult.failure_response || {}) - ]; - - elizaLogger.info("Formatted trade for CSV:", formattedTrade); - await csvWriter.writeRecords([formattedTrade]); - elizaLogger.info("Trade written to CSV successfully"); - } catch (error) { - elizaLogger.error("Error writing trade to CSV:", error); - // Log the actual error for debugging - if (error instanceof Error) { - elizaLogger.error("Error details:", error.message); - } - } -} - -async function hasEnoughBalance( - client: RESTClient, - currency: string, - amount: number, - side: string -): Promise { - elizaLogger.debug("Starting hasEnoughBalance function"); - try { - const response = await client.listAccounts({}); - const accounts = JSON.parse(response); - elizaLogger.info("Accounts:", accounts); - const checkCurrency = side === "BUY" ? "USD" : currency; - elizaLogger.info( - `Checking balance for ${side} order of ${amount} ${checkCurrency}` - ); - - // Find account with exact currency match - const account = accounts?.accounts.find( - (acc) => - acc.currency === checkCurrency && - (checkCurrency === "USD" - ? acc.type === "ACCOUNT_TYPE_FIAT" - : acc.type === "ACCOUNT_TYPE_CRYPTO") - ); - - if (!account) { - elizaLogger.error(`No ${checkCurrency} account found`); - return false; - } - - const available = Number.parseFloat(account.available_balance.value); - // Add buffer for fees only on USD purchases - const requiredAmount = side === "BUY" ? amount * 1.01 : amount; - elizaLogger.info( - `Required amount (including buffer): ${requiredAmount} ${checkCurrency}` - ); - - const hasBalance = available >= requiredAmount; - elizaLogger.info(`Has sufficient balance: ${hasBalance}`); - - return hasBalance; - } catch (error) { - elizaLogger.error("Balance check failed with error:", { - error: error instanceof Error ? error.message : "Unknown error", - currency, - amount, - side, - }); - return false; - } -} - -export const executeAdvancedTradeAction: Action = { - name: "EXECUTE_ADVANCED_TRADE", - description: "Execute a trade using Coinbase Advanced Trading API", - validate: async (runtime: IAgentRuntime) => { - return ( - !!( - runtime.getSetting("COINBASE_API_KEY") || - process.env.COINBASE_API_KEY - ) && - !!( - runtime.getSetting("COINBASE_PRIVATE_KEY") || - process.env.COINBASE_PRIVATE_KEY - ) - ); - }, - similes: [ - "EXECUTE_ADVANCED_TRADE", - "ADVANCED_MARKET_ORDER", - "ADVANCED_LIMIT_ORDER", - "COINBASE_PRO_TRADE", - "PROFESSIONAL_TRADE", - ], - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - let client: RESTClient; - - // Initialize client - elizaLogger.debug("Starting advanced trade client initialization"); - try { - client = new RESTClient( - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY - ); - elizaLogger.info("Advanced trade client initialized"); - } catch (error) { - elizaLogger.error("Client initialization failed:", error); - callback( - { - text: "Failed to initialize trading client. Please check your API credentials.", - }, - [] - ); - return; - } - - // Generate trade details - let tradeDetails; - elizaLogger.debug("Starting trade details generation"); - try { - tradeDetails = await generateObject({ - runtime, - context: composeContext({ - state, - template: advancedTradeTemplate, - }), - modelClass: ModelClass.LARGE, - schema: AdvancedTradeSchema, - }); - elizaLogger.info("Trade details generated:", tradeDetails.object); - } catch (error) { - elizaLogger.error("Trade details generation failed:", error); - callback( - { - text: "Failed to generate trade details. Please provide valid trading parameters.", - }, - [] - ); - return; - } - - // Validate trade content - if (!isAdvancedTradeContent(tradeDetails.object)) { - elizaLogger.error("Invalid trade content:", tradeDetails.object); - callback( - { - text: "Invalid trade details. Please check your input parameters.", - }, - [] - ); - return; - } - - const { productId, amount, side, orderType, limitPrice } = - tradeDetails.object; - - // Configure order - let orderConfiguration: OrderConfiguration; - elizaLogger.debug("Starting order configuration"); - try { - if (orderType === "MARKET") { - orderConfiguration = - side === "BUY" - ? { - market_market_ioc: { - quote_size: amount.toString(), - }, - } - : { - market_market_ioc: { - base_size: amount.toString(), - }, - }; - } else { - if (!limitPrice) { - throw new Error("Limit price is required for limit orders"); - } - orderConfiguration = { - limit_limit_gtc: { - baseSize: amount.toString(), - limitPrice: limitPrice.toString(), - postOnly: false, - }, - }; - } - elizaLogger.info( - "Order configuration created:", - orderConfiguration - ); - } catch (error) { - elizaLogger.error("Order configuration failed:", error); - callback( - { - text: - error instanceof Error - ? error.message - : "Failed to configure order parameters.", - }, - [] - ); - return; - } - - // Execute trade - let order: CreateOrderResponse; - try { - elizaLogger.debug("Executing the trade"); - if ( - !(await hasEnoughBalance( - client, - productId.split("-")[0], - amount, - side - )) - ) { - callback( - { - text: `Insufficient ${side === "BUY" ? "USD" : productId.split("-")[0]} balance to execute this trade`, - }, - [] - ); - return; - } - - order = await client.createOrder({ - clientOrderId: crypto.randomUUID(), - productId, - side: side === "BUY" ? OrderSide.BUY : OrderSide.SELL, - orderConfiguration, - }); - - elizaLogger.info("Trade executed successfully:", order); - } catch (error) { - elizaLogger.error("Trade execution failed:", error?.message); - callback( - { - text: `Failed to execute trade: ${error instanceof Error ? error.message : "Unknown error occurred"}`, - }, - [] - ); - return; - } - // Log trade to CSV - try { - // await appendTradeToCsv(order); - elizaLogger.info("Trade logged to CSV"); - } catch (csvError) { - elizaLogger.warn("Failed to log trade to CSV:", csvError); - // Continue execution as this is non-critical - } - - callback( - { - text: `Advanced Trade executed successfully: -- Product: ${productId} -- Type: ${orderType} Order -- Side: ${side} -- Amount: ${amount} -- ${orderType === "LIMIT" ? `- Limit Price: ${limitPrice}\n` : ""}- Order ID: ${order.order_id} -- Status: ${order.success} -- Order Id: ${order.order_id} -- Response: ${JSON.stringify(order.response)} -- Order Configuration: ${JSON.stringify(order.order_configuration)}`, - }, - [] - ); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Place an advanced market order to buy $1 worth of BTC", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Advanced Trade executed successfully: -- Product: BTC-USD -- Type: Market Order -- Side: BUY -- Amount: 1000 -- Order ID: CB-ADV-12345 -- Success: true -- Response: {"success_response":{}} -- Order Configuration: {"market_market_ioc":{"quote_size":"1000"}}`, - }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "Set a limit order to sell 0.5 ETH at $2000" }, - }, - { - user: "{{agentName}}", - content: { - text: `Advanced Trade executed successfully: -- Product: ETH-USD -- Type: Limit Order -- Side: SELL -- Amount: 0.5 -- Limit Price: 2000 -- Order ID: CB-ADV-67890 -- Success: true -- Response: {"success_response":{}} -- Order Configuration: {"limit_limit_gtc":{"baseSize":"0.5","limitPrice":"2000","postOnly":false}}`, - }, - }, - ], - ], -}; - -export const advancedTradePlugin: Plugin = { - name: "advancedTradePlugin", - description: "Enables advanced trading using Coinbase Advanced Trading API", - actions: [executeAdvancedTradeAction], - providers: [tradeProvider], -}; diff --git a/packages/plugin-coinbase/src/plugins/commerce.ts b/packages/plugin-coinbase/src/plugins/commerce.ts deleted file mode 100644 index 38ab0875bb4a9..0000000000000 --- a/packages/plugin-coinbase/src/plugins/commerce.ts +++ /dev/null @@ -1,540 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObject, - ModelClass, - type Provider, -} from "@elizaos/core"; -import type { - Action, - HandlerCallback, - IAgentRuntime, - Memory, - Plugin, - State, -} from "@elizaos/core"; -import { type ChargeContent, ChargeSchema, isChargeContent } from "../types"; -import { chargeTemplate, getChargeTemplate } from "../templates"; -import { getWalletDetails } from "../utils"; -import { Coinbase } from "@coinbase/coinbase-sdk"; - -const url = "https://api.commerce.coinbase.com/charges"; -interface ChargeRequest { - name: string; - description: string; - pricing_type: string; - local_price: { - amount: string; - currency: string; - }; -} - -export async function createCharge(apiKey: string, params: ChargeRequest) { - elizaLogger.debug("Starting createCharge function"); - try { - const response = await fetch(url, { - method: "POST", - headers: { - "Content-Type": "application/json", - "X-CC-Api-Key": apiKey, - }, - body: JSON.stringify(params), - }); - - if (!response.ok) { - throw new Error(`Failed to create charge: ${response.statusText}`); - } - - const data = await response.json(); - return data.data; - } catch (error) { - elizaLogger.error("Error creating charge:", error); - throw error; - } -} - -// Function to fetch all charges -export async function getAllCharges(apiKey: string) { - elizaLogger.debug("Starting getAllCharges function"); - try { - const response = await fetch(url, { - method: "GET", - headers: { - "Content-Type": "application/json", - "X-CC-Api-Key": apiKey, - }, - }); - - if (!response.ok) { - throw new Error( - `Failed to fetch all charges: ${response.statusText}` - ); - } - - const data = await response.json(); - return data.data; - } catch (error) { - elizaLogger.error("Error fetching charges:", error); - throw error; - } -} - -// Function to fetch details of a specific charge -export async function getChargeDetails(apiKey: string, chargeId: string) { - elizaLogger.debug("Starting getChargeDetails function"); - const getUrl = `${url}/${chargeId}`; - - try { - const response = await fetch(getUrl, { - method: "GET", - headers: { - "Content-Type": "application/json", - "X-CC-Api-Key": apiKey, - }, - }); - - if (!response.ok) { - throw new Error( - `Failed to fetch charge details: ${response.statusText}` - ); - } - - const data = await response.json(); - return data; - } catch (error) { - elizaLogger.error( - `Error fetching charge details for ID ${chargeId}:`, - error - ); - throw error; - } -} - -export const createCoinbaseChargeAction: Action = { - name: "CREATE_CHARGE", - similes: [ - "MAKE_CHARGE", - "INITIATE_CHARGE", - "GENERATE_CHARGE", - "CREATE_TRANSACTION", - "COINBASE_CHARGE", - "GENERATE_INVOICE", - "CREATE_PAYMENT", - "SETUP_BILLING", - "REQUEST_PAYMENT", - "CREATE_CHECKOUT", - "GET_CHARGE_STATUS", - "LIST_CHARGES", - ], - description: - "Create and manage payment charges using Coinbase Commerce. Supports fixed and dynamic pricing, multiple currencies (USD, EUR, USDC), and provides charge status tracking and management features.", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - const coinbaseCommerceKeyOk = !!runtime.getSetting( - "COINBASE_COMMERCE_KEY" - ); - - // Ensure Coinbase Commerce API key is available - return coinbaseCommerceKeyOk; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.info("Composing state for message:", message); - if (!state) { - state = (await runtime.composeState(message)) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - - const context = composeContext({ - state, - template: chargeTemplate, - }); - - const chargeDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: ChargeSchema, - }); - if (!isChargeContent(chargeDetails.object)) { - throw new Error("Invalid content"); - } - const charge = chargeDetails.object as ChargeContent; - if (!charge || !charge.price || !charge.type) { - callback( - { - text: "Invalid charge details provided.", - }, - [] - ); - return; - } - - elizaLogger.info("Charge details received:", chargeDetails); - - // Initialize Coinbase Commerce client - elizaLogger.debug("Starting Coinbase Commerce client initialization"); - try { - // Create a charge - const chargeResponse = await createCharge( - runtime.getSetting("COINBASE_COMMERCE_KEY"), - { - local_price: { - amount: charge.price.toString(), - currency: charge.currency, - }, - pricing_type: charge.type, - name: charge.name, - description: charge.description, - } - ); - - elizaLogger.info( - "Coinbase Commerce charge created:", - chargeResponse - ); - - callback( - { - text: `Charge created successfully: ${chargeResponse.hosted_url}`, - attachments: [ - { - id: chargeResponse.id, - url: chargeResponse.hosted_url, - title: "Coinbase Commerce Charge", - description: `Charge ID: ${chargeResponse.id}`, - text: `Pay here: ${chargeResponse.hosted_url}`, - source: "coinbase", - }, - ], - }, - [] - ); - } catch (error) { - elizaLogger.error( - "Error creating Coinbase Commerce charge:", - error - ); - callback( - { - text: "Failed to create a charge. Please try again.", - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Create a charge for $100 USD for Digital Art NFT with description 'Exclusive digital artwork collection'", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Charge created successfully:\n- Amount: $100 USD\n- Name: Digital Art NFT\n- Description: Exclusive digital artwork collection\n- Type: fixed_price\n- Charge URL: https://commerce.coinbase.com/charges/...", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Set up a dynamic price charge for Premium Membership named 'VIP Access Pass'", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Charge created successfully:\n- Type: dynamic_price\n- Name: VIP Access Pass\n- Description: Premium Membership\n- Charge URL: https://commerce.coinbase.com/charges/...", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Generate a payment request for 50 EUR for Workshop Registration", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Charge created successfully:\n- Amount: 50 EUR\n- Name: Workshop Registration\n- Type: fixed_price\n- Charge URL: https://commerce.coinbase.com/charges/...", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Create an invoice for 1000 USDC for Consulting Services", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Charge created successfully:\n- Amount: 1000 USDC\n- Name: Consulting Services\n- Type: fixed_price\n- Charge URL: https://commerce.coinbase.com/charges/...", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Check the status of charge abc-123-def", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Charge details retrieved:\n- ID: abc-123-def\n- Status: COMPLETED\n- Amount: 100 USD\n- Created: 2024-01-20T10:00:00Z\n- Expires: 2024-01-21T10:00:00Z", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "List all active charges", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Active charges retrieved:\n1. ID: abc-123 - $100 USD - Digital Art NFT\n2. ID: def-456 - 50 EUR - Workshop\n3. ID: ghi-789 - 1000 USDC - Consulting\n\nTotal active charges: 3", - }, - }, - ], - ], -} as Action; - -export const getAllChargesAction: Action = { - name: "GET_ALL_CHARGES", - similes: ["FETCH_ALL_CHARGES", "RETRIEVE_ALL_CHARGES", "LIST_ALL_CHARGES"], - description: "Fetch all charges using Coinbase Commerce.", - validate: async (runtime: IAgentRuntime) => { - const coinbaseCommerceKeyOk = !!runtime.getSetting( - "COINBASE_COMMERCE_KEY" - ); - - // Ensure Coinbase Commerce API key is available - return coinbaseCommerceKeyOk; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - try { - elizaLogger.info("Composing state for message:", message); - if (!state) { - state = (await runtime.composeState(message)) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - const charges = await getAllCharges( - runtime.getSetting("COINBASE_COMMERCE_KEY") - ); - - elizaLogger.info("Fetched all charges:", charges); - - callback( - { - text: `Successfully fetched all charges. Total charges: ${charges.length}`, - attachments: charges, - }, - [] - ); - } catch (error) { - elizaLogger.error("Error fetching all charges:", error); - callback( - { - text: "Failed to fetch all charges. Please try again.", - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "Fetch all charges" }, - }, - { - user: "{{agentName}}", - content: { - text: "Successfully fetched all charges.", - action: "GET_ALL_CHARGES", - }, - }, - ], - ], -} as Action; - -export const getChargeDetailsAction: Action = { - name: "GET_CHARGE_DETAILS", - similes: ["FETCH_CHARGE_DETAILS", "RETRIEVE_CHARGE_DETAILS", "GET_CHARGE"], - description: "Fetch details of a specific charge using Coinbase Commerce.", - validate: async (runtime: IAgentRuntime) => { - const coinbaseCommerceKeyOk = !!runtime.getSetting( - "COINBASE_COMMERCE_KEY" - ); - - // Ensure Coinbase Commerce API key is available - return coinbaseCommerceKeyOk; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.info("Composing state for message:", message); - if (!state) { - state = (await runtime.composeState(message)) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - - const context = composeContext({ - state, - template: getChargeTemplate, - }); - const chargeDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: ChargeSchema, - }); - if (!isChargeContent(chargeDetails.object)) { - throw new Error("Invalid content"); - } - const charge = chargeDetails.object as ChargeContent; - if (!charge.id) { - callback( - { - text: "Missing charge ID. Please provide a valid charge ID.", - }, - [] - ); - return; - } - - try { - const chargeDetails = await getChargeDetails( - runtime.getSetting("COINBASE_COMMERCE_KEY"), - charge.id - ); - - elizaLogger.info("Fetched charge details:", chargeDetails); - - const chargeData = chargeDetails.data; - - callback( - { - text: `Successfully fetched charge details for ID: ${charge.id}`, - attachments: [ - { - id: chargeData.id, - url: chargeData.hosted_url, - title: `Charge Details for ${charge.id}`, - source: "coinbase", - description: JSON.stringify(chargeDetails, null, 2), - text: `Pay here: ${chargeData.hosted_url}`, - contentType: "application/json", - }, - ], - }, - [] - ); - } catch (error) { - elizaLogger.error( - `Error fetching details for charge ID ${charge.id}:`, - error - ); - callback( - { - text: `Failed to fetch details for charge ID: ${charge.id}. Please try again.`, - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Fetch details of charge ID: 123456", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Successfully fetched charge details. {{charge.id}} for {{charge.amount}} {{charge.currency}} to {{charge.name}} for {{charge.description}}", - action: "GET_CHARGE_DETAILS", - }, - }, - ], - ], -}; - -export const chargeProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.debug("Starting chargeProvider.get function"); - const charges = await getAllCharges( - runtime.getSetting("COINBASE_COMMERCE_KEY") - ); - // Ensure API key is available - const coinbaseAPIKey = - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY; - const coinbasePrivateKey = - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY; - const balances = []; - const transactions = []; - if (coinbaseAPIKey && coinbasePrivateKey) { - Coinbase.configure({ - apiKeyName: coinbaseAPIKey, - privateKey: coinbasePrivateKey, - }); - const { balances, transactions } = await getWalletDetails(runtime); - elizaLogger.info("Current Balances:", balances); - elizaLogger.info("Last Transactions:", transactions); - } - const formattedCharges = charges.map((charge) => ({ - id: charge.id, - name: charge.name, - description: charge.description, - pricing: charge.pricing, - })); - elizaLogger.info("Charges:", formattedCharges); - return { charges: formattedCharges, balances, transactions }; - }, -}; - -export const coinbaseCommercePlugin: Plugin = { - name: "coinbaseCommerce", - description: - "Integration with Coinbase Commerce for creating and managing charges.", - actions: [ - createCoinbaseChargeAction, - getAllChargesAction, - getChargeDetailsAction, - ], - evaluators: [], - providers: [chargeProvider], -}; diff --git a/packages/plugin-coinbase/src/plugins/massPayments.ts b/packages/plugin-coinbase/src/plugins/massPayments.ts deleted file mode 100644 index 68d8319fe9cfe..0000000000000 --- a/packages/plugin-coinbase/src/plugins/massPayments.ts +++ /dev/null @@ -1,470 +0,0 @@ -import { Coinbase, type Wallet } from "@coinbase/coinbase-sdk"; -import { - composeContext, - elizaLogger, - generateObject, - ModelClass, - type Action, - type IAgentRuntime, - type Memory, - type Provider, - type State, - type HandlerCallback, - type Plugin, -} from "@elizaos/core"; -import { - TransferSchema, - isTransferContent, - type TransferContent, - type Transaction, -} from "../types"; -import { transferTemplate } from "../templates"; -import { readFile } from "fs/promises"; -import { parse } from "csv-parse/sync"; -import path from "path"; -import { fileURLToPath } from "url"; -import fs from "fs"; -import { createArrayCsvWriter } from "csv-writer"; -import { - appendTransactionsToCsv, - executeTransfer, - getCharityAddress, - getWalletDetails, - initializeWallet, -} from "../utils"; - -// Dynamically resolve the file path to the src/plugins directory -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const baseDir = path.resolve(__dirname, "../../plugin-coinbase/src/plugins"); -const csvFilePath = path.join(baseDir, "transactions.csv"); - -export const massPayoutProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.debug("Starting massPayoutProvider.get function"); - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - elizaLogger.info("Reading CSV file from:", csvFilePath); - - // Ensure the CSV file exists - if (!fs.existsSync(csvFilePath)) { - elizaLogger.warn("CSV file not found. Creating a new one."); - const csvWriter = createArrayCsvWriter({ - path: csvFilePath, - header: [ - "Address", - "Amount", - "Status", - "Error Code", - "Transaction URL", - ], - }); - await csvWriter.writeRecords([]); // Create an empty file with headers - elizaLogger.info("New CSV file created with headers."); - } - - // Read and parse the CSV file - const csvData = await readFile(csvFilePath, "utf-8"); - const records = parse(csvData, { - columns: true, - skip_empty_lines: true, - }); - - const { balances, transactions } = await getWalletDetails(runtime); - - elizaLogger.info("Parsed CSV records:", records); - elizaLogger.info("Current Balances:", balances); - elizaLogger.info("Last Transactions:", transactions); - - return { - currentTransactions: records.map((record: any) => ({ - address: record["Address"] || undefined, - amount: Number.parseFloat(record["Amount"]) || undefined, - status: record["Status"] || undefined, - errorCode: record["Error Code"] || "", - transactionUrl: record["Transaction URL"] || "", - })), - balances, - transactionHistory: transactions, - }; - } catch (error) { - elizaLogger.error("Error in massPayoutProvider:", error); - return { csvRecords: [], balances: [], transactions: [] }; - } - }, -}; - -async function executeMassPayout( - runtime: IAgentRuntime, - networkId: string, - receivingAddresses: string[], - transferAmount: number, - assetId: string -): Promise { - elizaLogger.debug("Starting executeMassPayout function"); - const transactions: Transaction[] = []; - const assetIdLowercase = assetId.toLowerCase(); - let sendingWallet: Wallet; - try { - elizaLogger.debug("Initializing sending wallet"); - sendingWallet = await initializeWallet(runtime, networkId); - } catch (error) { - elizaLogger.error("Error initializing sending wallet:", error); - throw error; - } - for (const address of receivingAddresses) { - elizaLogger.info("Processing payout for address:", address); - if (address) { - try { - // Check balance before initiating transfer - - const walletBalance = - await sendingWallet.getBalance(assetIdLowercase); - - elizaLogger.info("Wallet balance for asset:", { - assetId, - walletBalance, - }); - - if (walletBalance.lessThan(transferAmount)) { - const insufficientFunds = `Insufficient funds for address ${sendingWallet.getDefaultAddress()} to send to ${address}. Required: ${transferAmount}, Available: ${walletBalance}`; - elizaLogger.error(insufficientFunds); - - transactions.push({ - address, - amount: transferAmount, - status: "Failed", - errorCode: insufficientFunds, - transactionUrl: null, - }); - continue; - } - - // Execute the transfer - const transfer = await executeTransfer( - sendingWallet, - transferAmount, - assetIdLowercase, - address - ); - - transactions.push({ - address, - amount: transfer.getAmount().toNumber(), - status: "Success", - errorCode: null, - transactionUrl: transfer.getTransactionLink(), - }); - } catch (error) { - elizaLogger.error( - "Error during transfer for address:", - address, - error - ); - transactions.push({ - address, - amount: transferAmount, - status: "Failed", - errorCode: error?.code || "Unknown Error", - transactionUrl: null, - }); - } - } else { - elizaLogger.info("Skipping invalid or empty address."); - transactions.push({ - address: "Invalid or Empty", - amount: transferAmount, - status: "Failed", - errorCode: "Invalid Address", - transactionUrl: null, - }); - } - } - // Send 1% to charity - const charityAddress = getCharityAddress(networkId); - - try { - elizaLogger.debug("Sending 1% to charity:", charityAddress); - const charityTransfer = await executeTransfer( - sendingWallet, - transferAmount * 0.01, - assetId, - charityAddress - ); - - transactions.push({ - address: charityAddress, - amount: charityTransfer.getAmount().toNumber(), - status: "Success", - errorCode: null, - transactionUrl: charityTransfer.getTransactionLink(), - }); - } catch (error) { - elizaLogger.error("Error during charity transfer:", error); - transactions.push({ - address: charityAddress, - amount: transferAmount * 0.01, - status: "Failed", - errorCode: error?.message || "Unknown Error", - transactionUrl: null, - }); - } - await appendTransactionsToCsv(transactions); - elizaLogger.info("Finished processing mass payouts."); - return transactions; -} - -// Action for sending mass payouts -export const sendMassPayoutAction: Action = { - name: "SEND_MASS_PAYOUT", - similes: ["BULK_TRANSFER", "DISTRIBUTE_FUNDS", "SEND_PAYMENTS"], - description: - "Sends mass payouts to a list of receiving addresses using a predefined sending wallet and logs all transactions to a CSV file.", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.info("Validating runtime and message..."); - return ( - !!( - runtime.character.settings.secrets?.COINBASE_API_KEY || - process.env.COINBASE_API_KEY - ) && - !!( - runtime.character.settings.secrets?.COINBASE_PRIVATE_KEY || - process.env.COINBASE_PRIVATE_KEY - ) - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.debug("Starting SEND_MASS_PAYOUT handler..."); - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - if (!state) { - state = (await runtime.composeState(message, { - providers: [massPayoutProvider], - })) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - - const context = composeContext({ - state, - template: transferTemplate, - }); - - const transferDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: TransferSchema, - }); - - elizaLogger.info( - "Transfer details generated:", - transferDetails.object - ); - - if (!isTransferContent(transferDetails.object)) { - callback( - { - text: "Invalid transfer details. Please check the inputs.", - }, - [] - ); - return; - } - - const { receivingAddresses, transferAmount, assetId, network } = - transferDetails.object as TransferContent; - - const allowedNetworks = Object.values(Coinbase.networks); - - if ( - !network || - !allowedNetworks.includes(network.toLowerCase() as any) || - !receivingAddresses?.length || - transferAmount <= 0 || - !assetId - ) { - elizaLogger.error("Missing or invalid input parameters:", { - network, - receivingAddresses, - transferAmount, - assetId, - }); - callback( - { - text: `Invalid input parameters. Please ensure: -- Network is one of: ${allowedNetworks.join(", ")}. -- Receiving addresses are provided. -- Transfer amount is greater than zero. -- Asset ID is valid.`, - }, - [] - ); - return; - } - - elizaLogger.info("◎ Starting mass payout..."); - const transactions = await executeMassPayout( - runtime, - network, - receivingAddresses, - transferAmount, - assetId - ); - - const successTransactions = transactions.filter( - (tx) => tx.status === "Success" - ); - const failedTransactions = transactions.filter( - (tx) => tx.status === "Failed" - ); - const successDetails = successTransactions - .map( - (tx) => - `Address: ${tx.address}, Amount: ${tx.amount}, Transaction URL: ${ - tx.transactionUrl || "N/A" - }` - ) - .join("\n"); - const failedDetails = failedTransactions - .map( - (tx) => - `Address: ${tx.address}, Amount: ${tx.amount}, Error Code: ${ - tx.errorCode || "Unknown Error" - }` - ) - .join("\n"); - const charityTransactions = transactions.filter( - (tx) => tx.address === getCharityAddress(network) - ); - const charityDetails = charityTransactions - .map( - (tx) => - `Address: ${tx.address}, Amount: ${tx.amount}, Transaction URL: ${ - tx.transactionUrl || "N/A" - }` - ) - .join("\n"); - callback( - { - text: `Mass payouts completed successfully. -- Successful Transactions: ${successTransactions.length} -- Failed Transactions: ${failedTransactions.length} - -Details: -${successTransactions.length > 0 ? `✅ Successful Transactions:\n${successDetails}` : "No successful transactions."} -${failedTransactions.length > 0 ? `❌ Failed Transactions:\n${failedDetails}` : "No failed transactions."} -${charityTransactions.length > 0 ? `✅ Charity Transactions:\n${charityDetails}` : "No charity transactions."} - -Check the CSV file for full details.`, - }, - [] - ); - } catch (error) { - elizaLogger.error("Error during mass payouts:", error); - callback( - { text: "Failed to complete payouts. Please try again." }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Distribute 0.0001 ETH on base to 0xA0ba2ACB5846A54834173fB0DD9444F756810f06 and 0xF14F2c49aa90BaFA223EE074C1C33b59891826bF", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Mass payouts completed successfully. -- Successful Transactions: {{2}} -- Failed Transactions: {{1}} - -Details: -✅ Successful Transactions: -Address: 0xABC123..., Amount: 0.005, Transaction URL: https://etherscan.io/tx/... -Address: 0xDEF456..., Amount: 0.005, Transaction URL: https://etherscan.io/tx/... - -❌ Failed Transactions: -Address: 0xGHI789..., Amount: 0.005, Error Code: Insufficient Funds - -Check the CSV file for full details.`, - action: "SEND_MASS_PAYOUT", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Airdrop 10 USDC to these community members: 0x789..., 0x101... on base network", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Mass payout completed successfully:\n- Airdropped 10 USDC to 2 addresses on base network\n- Successful Transactions: 2\n- Failed Transactions: 0\nCheck the CSV file for transaction details.", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Multi-send 0.25 ETH to team wallets: 0x222..., 0x333... on Ethereum", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Mass payout completed successfully:\n- Multi-sent 0.25 ETH to 2 addresses on Ethereum network\n- Successful Transactions: 2\n- Failed Transactions: 0\nCheck the CSV file for transaction details.", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Distribute rewards of 5 SOL each to contest winners: winner1.sol, winner2.sol on Solana", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Mass payout completed successfully:\n- Distributed 5 SOL to 2 addresses on Solana network\n- Successful Transactions: 2\n- Failed Transactions: 0\nCheck the CSV file for transaction details.", - }, - }, - ], - ], -}; - -export const coinbaseMassPaymentsPlugin: Plugin = { - name: "automatedPayments", - description: - "Processes mass payouts using Coinbase SDK and logs all transactions (success and failure) to a CSV file. Provides dynamic transaction data through a provider.", - actions: [sendMassPayoutAction], - providers: [massPayoutProvider], -}; diff --git a/packages/plugin-coinbase/src/plugins/tokenContract.ts b/packages/plugin-coinbase/src/plugins/tokenContract.ts deleted file mode 100644 index 0a93a6be2c5b8..0000000000000 --- a/packages/plugin-coinbase/src/plugins/tokenContract.ts +++ /dev/null @@ -1,585 +0,0 @@ -import { Coinbase, readContract, type SmartContract } from "@coinbase/coinbase-sdk"; -import { - type Action, - type Plugin, - elizaLogger, - type IAgentRuntime, - type Memory, - type HandlerCallback, - type State, - composeContext, - generateObject, - ModelClass, -} from "@elizaos/core"; -import { initializeWallet } from "../utils"; -import { - contractInvocationTemplate, - tokenContractTemplate, - readContractTemplate, -} from "../templates"; -import { - ContractInvocationSchema, - TokenContractSchema, - isContractInvocationContent, - isTokenContractContent, - ReadContractSchema, - isReadContractContent, -} from "../types"; -import path from "path"; -import { fileURLToPath } from "url"; -import { createArrayCsvWriter } from "csv-writer"; -import fs from "fs"; -import { ABI } from "../constants"; - -// Dynamically resolve the file path to the src/plugins directory -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const baseDir = path.resolve(__dirname, "../../plugin-coinbase/src/plugins"); -const contractsCsvFilePath = path.join(baseDir, "contracts.csv"); - -// Add this helper at the top level -const serializeBigInt = (value: any): any => { - if (typeof value === "bigint") { - return value.toString(); - } - if (Array.isArray(value)) { - return value.map(serializeBigInt); - } - if (typeof value === "object" && value !== null) { - return Object.fromEntries( - Object.entries(value).map(([k, v]) => [k, serializeBigInt(v)]) - ); - } - return value; -}; - -export const deployTokenContractAction: Action = { - name: "DEPLOY_TOKEN_CONTRACT", - description: - "Deploy an ERC20, ERC721, or ERC1155 token contract using the Coinbase SDK", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.info("Validating runtime for DEPLOY_TOKEN_CONTRACT..."); - return ( - !!( - runtime.character.settings.secrets?.COINBASE_API_KEY || - process.env.COINBASE_API_KEY - ) && - !!( - runtime.character.settings.secrets?.COINBASE_PRIVATE_KEY || - process.env.COINBASE_PRIVATE_KEY - ) - ); - }, - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.debug("Starting DEPLOY_TOKEN_CONTRACT handler..."); - - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - - // Ensure CSV file exists - if (!fs.existsSync(contractsCsvFilePath)) { - const csvWriter = createArrayCsvWriter({ - path: contractsCsvFilePath, - header: [ - "Contract Type", - "Name", - "Symbol", - "Network", - "Contract Address", - "Transaction URL", - "Base URI", - "Total Supply", - ], - }); - await csvWriter.writeRecords([]); - } - - const context = composeContext({ - state, - template: tokenContractTemplate, - }); - - const contractDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: TokenContractSchema, - }); - elizaLogger.info("Contract details:", contractDetails.object); - - if (!isTokenContractContent(contractDetails.object)) { - callback( - { - text: "Invalid contract details. Please check the inputs.", - }, - [] - ); - return; - } - - const { - contractType, - name, - symbol, - network, - baseURI, - totalSupply, - } = contractDetails.object; - elizaLogger.info("Contract details:", contractDetails.object); - const wallet = await initializeWallet(runtime, network); - let contract: SmartContract; - let deploymentDetails; - - switch (contractType.toLowerCase()) { - case "erc20": - contract = await wallet.deployToken({ - name, - symbol, - totalSupply: totalSupply || 1000000, - }); - deploymentDetails = { - contractType: "ERC20", - totalSupply, - baseURI: "N/A", - }; - break; - - case "erc721": - contract = await wallet.deployNFT({ - name, - symbol, - baseURI: baseURI || "", - }); - deploymentDetails = { - contractType: "ERC721", - totalSupply: "N/A", - baseURI, - }; - break; - default: - throw new Error( - `Unsupported contract type: ${contractType}` - ); - } - - // Wait for deployment to complete - await contract.wait(); - elizaLogger.info("Deployment details:", deploymentDetails); - elizaLogger.info("Contract deployed successfully:", contract); - // Log deployment to CSV - const csvWriter = createArrayCsvWriter({ - path: contractsCsvFilePath, - header: [ - "Contract Type", - "Name", - "Symbol", - "Network", - "Contract Address", - "Transaction URL", - "Base URI", - "Total Supply", - ], - append: true, - }); - const transaction = - contract.getTransaction()?.getTransactionLink() || ""; - const contractAddress = contract.getContractAddress(); - await csvWriter.writeRecords([ - [ - deploymentDetails.contractType, - name, - symbol, - network, - contractAddress, - transaction, - deploymentDetails.baseURI, - deploymentDetails.totalSupply || "", - ], - ]); - - callback( - { - text: `Token contract deployed successfully: -- Type: ${deploymentDetails.contractType} -- Name: ${name} -- Symbol: ${symbol} -- Network: ${network} -- Contract Address: ${contractAddress} -- Transaction URL: ${transaction} -${deploymentDetails.baseURI !== "N/A" ? `- Base URI: ${deploymentDetails.baseURI}` : ""} -${deploymentDetails.totalSupply !== "N/A" ? `- Total Supply: ${deploymentDetails.totalSupply}` : ""} - -Contract deployment has been logged to the CSV file.`, - }, - [] - ); - } catch (error) { - elizaLogger.error("Error deploying token contract:", error); - callback( - { - text: "Failed to deploy token contract. Please check the logs for more details.", - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Deploy an ERC721 token named 'MyNFT' with symbol 'MNFT' on base network with URI 'https://pbs.twimg.com/profile_images/1848823420336934913/oI0-xNGe_400x400.jpg'", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Token contract deployed successfully: -- Type: ERC20 -- Name: MyToken -- Symbol: MTK -- Network: base -- Contract Address: 0x... -- Transaction URL: https://basescan.org/tx/... -- Total Supply: 1000000`, - }, - }, - { - user: "{{user1}}", - content: { - text: "Deploy an ERC721 token named 'MyNFT' with symbol 'MNFT' on the base network", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Token contract deployed successfully: -- Type: ERC721 -- Name: MyNFT -- Symbol: MNFT -- Network: base -- Contract Address: 0x... -- Transaction URL: https://basescan.org/tx/... -- URI: https://pbs.twimg.com/profile_images/1848823420336934913/oI0-xNGe_400x400.jpg`, - }, - }, - ], - ], - similes: ["DEPLOY_CONTRACT", "CREATE_TOKEN", "MINT_TOKEN", "CREATE_NFT"], -}; - -// Add to tokenContract.ts -export const invokeContractAction: Action = { - name: "INVOKE_CONTRACT", - description: - "Invoke a method on a deployed smart contract using the Coinbase SDK", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.info("Validating runtime for INVOKE_CONTRACT..."); - return ( - !!( - runtime.character.settings.secrets?.COINBASE_API_KEY || - process.env.COINBASE_API_KEY - ) && - !!( - runtime.character.settings.secrets?.COINBASE_PRIVATE_KEY || - process.env.COINBASE_PRIVATE_KEY - ) - ); - }, - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.debug("Starting INVOKE_CONTRACT handler..."); - - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - - const context = composeContext({ - state, - template: contractInvocationTemplate, - }); - - const invocationDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: ContractInvocationSchema, - }); - elizaLogger.info("Invocation details:", invocationDetails.object); - if (!isContractInvocationContent(invocationDetails.object)) { - callback( - { - text: "Invalid contract invocation details. Please check the inputs.", - }, - [] - ); - return; - } - - const { - contractAddress, - method, - args, - amount, - assetId, - networkId, - } = invocationDetails.object; - const wallet = await initializeWallet(runtime, networkId); - - // Prepare invocation options - const invocationOptions = { - contractAddress, - method, - abi: ABI, - args: { - ...args, - amount: args.amount || amount, // Ensure amount is passed in args - }, - networkId, - assetId, - }; - elizaLogger.info("Invocation options:", invocationOptions); - // Invoke the contract - const invocation = await wallet.invokeContract(invocationOptions); - - // Wait for the transaction to be mined - await invocation.wait(); - - // Log the invocation to CSV - const csvWriter = createArrayCsvWriter({ - path: contractsCsvFilePath, - header: [ - "Contract Address", - "Method", - "Network", - "Status", - "Transaction URL", - "Amount", - "Asset ID", - ], - append: true, - }); - - await csvWriter.writeRecords([ - [ - contractAddress, - method, - networkId, - invocation.getStatus(), - invocation.getTransactionLink() || "", - amount || "", - assetId || "", - ], - ]); - - callback( - { - text: `Contract method invoked successfully: -- Contract Address: ${contractAddress} -- Method: ${method} -- Network: ${networkId} -- Status: ${invocation.getStatus()} -- Transaction URL: ${invocation.getTransactionLink() || "N/A"} -${amount ? `- Amount: ${amount}` : ""} -${assetId ? `- Asset ID: ${assetId}` : ""} - -Contract invocation has been logged to the CSV file.`, - }, - [] - ); - } catch (error) { - elizaLogger.error("Error invoking contract method:", error); - callback( - { - text: "Failed to invoke contract method. Please check the logs for more details.", - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Call the 'transfer' method on my ERC20 token contract at 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48 with amount 100 to recipient 0xbcF7C64B880FA89a015970dC104E848d485f99A3", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Contract method invoked successfully: -- Contract Address: 0x123... -- Method: transfer -- Network: base -- Status: SUCCESS -- Transaction URL: https://basescan.org/tx/... -- Amount: 100 -- Asset ID: wei - -Contract invocation has been logged to the CSV file.`, - }, - }, - ], - ], - similes: ["CALL_CONTRACT", "EXECUTE_CONTRACT", "INTERACT_WITH_CONTRACT"], -}; - -export const readContractAction: Action = { - name: "READ_CONTRACT", - description: - "Read data from a deployed smart contract using the Coinbase SDK", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.info("Validating runtime for READ_CONTRACT..."); - return ( - !!( - runtime.character.settings.secrets?.COINBASE_API_KEY || - process.env.COINBASE_API_KEY - ) && - !!( - runtime.character.settings.secrets?.COINBASE_PRIVATE_KEY || - process.env.COINBASE_PRIVATE_KEY - ) - ); - }, - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.debug("Starting READ_CONTRACT handler..."); - - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - - const context = composeContext({ - state, - template: readContractTemplate, - }); - - const readDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: ReadContractSchema, - }); - - if (!isReadContractContent(readDetails.object)) { - callback( - { - text: "Invalid contract read details. Please check the inputs.", - }, - [] - ); - return; - } - - const { contractAddress, method, args, networkId, abi } = - readDetails.object; - elizaLogger.info("Reading contract:", { - contractAddress, - method, - args, - networkId, - abi, - }); - - const result = await readContract({ - networkId, - contractAddress, - method, - args, - abi: ABI as any, - }); - - // Serialize the result before using it - const serializedResult = serializeBigInt(result); - - elizaLogger.info("Contract read result:", serializedResult); - - callback( - { - text: `Contract read successful: -- Contract Address: ${contractAddress} -- Method: ${method} -- Network: ${networkId} -- Result: ${JSON.stringify(serializedResult, null, 2)}`, - }, - [] - ); - } catch (error) { - elizaLogger.error("Error reading contract:", error); - callback( - { - text: `Failed to read contract: ${error instanceof Error ? error.message : "Unknown error"}`, - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Read the balance of address 0xbcF7C64B880FA89a015970dC104E848d485f99A3 from the ERC20 contract at 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48 on eth", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Contract read successful: -- Contract Address: 0x37f2131ebbc8f97717edc3456879ef56b9f4b97b -- Method: balanceOf -- Network: eth -- Result: "1000000"`, - }, - }, - ], - ], - similes: ["READ_CONTRACT", "GET_CONTRACT_DATA", "QUERY_CONTRACT"], -}; - -export const tokenContractPlugin: Plugin = { - name: "tokenContract", - description: - "Enables deployment, invocation, and reading of ERC20, ERC721, and ERC1155 token contracts using the Coinbase SDK", - actions: [ - deployTokenContractAction, - invokeContractAction, - readContractAction, - ], -}; diff --git a/packages/plugin-coinbase/src/plugins/trade.ts b/packages/plugin-coinbase/src/plugins/trade.ts deleted file mode 100644 index bb1ca4599cb84..0000000000000 --- a/packages/plugin-coinbase/src/plugins/trade.ts +++ /dev/null @@ -1,304 +0,0 @@ -import { Coinbase } from "@coinbase/coinbase-sdk"; -import { - type Action, - type Plugin, - elizaLogger, - type IAgentRuntime, - type Memory, - type HandlerCallback, - type State, - composeContext, - generateObject, - ModelClass, - type Provider, -} from "@elizaos/core"; -import { executeTradeAndCharityTransfer, getWalletDetails } from "../utils"; -import { tradeTemplate } from "../templates"; -import { isTradeContent, type TradeContent, TradeSchema } from "../types"; -import { readFile } from "fs/promises"; -import { parse } from "csv-parse/sync"; -import path from "path"; -import { fileURLToPath } from "url"; -import fs from "fs"; -import { createArrayCsvWriter } from "csv-writer"; - -// Dynamically resolve the file path to the src/plugins directory -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const baseDir = path.resolve(__dirname, "../../plugin-coinbase/src/plugins"); -const tradeCsvFilePath = path.join(baseDir, "trades.csv"); - -export const tradeProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.debug("Starting tradeProvider.get function"); - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - elizaLogger.info("Reading CSV file from:", tradeCsvFilePath); - - // Check if the file exists; if not, create it with headers - if (!fs.existsSync(tradeCsvFilePath)) { - elizaLogger.warn("CSV file not found. Creating a new one."); - const csvWriter = createArrayCsvWriter({ - path: tradeCsvFilePath, - header: [ - "Network", - "From Amount", - "Source Asset", - "To Amount", - "Target Asset", - "Status", - "Transaction URL", - ], - }); - await csvWriter.writeRecords([]); // Create an empty file with headers - elizaLogger.info("New CSV file created with headers."); - } - - // Read and parse the CSV file - const csvData = await readFile(tradeCsvFilePath, "utf-8"); - const records = parse(csvData, { - columns: true, - skip_empty_lines: true, - }); - - elizaLogger.info("Parsed CSV records:", records); - const { balances, transactions } = await getWalletDetails(runtime); - elizaLogger.info("Current Balances:", balances); - elizaLogger.info("Last Transactions:", transactions); - return { - currentTrades: records.map((record: any) => ({ - network: record["Network"] || undefined, - amount: Number.parseFloat(record["From Amount"]) || undefined, - sourceAsset: record["Source Asset"] || undefined, - toAmount: Number.parseFloat(record["To Amount"]) || undefined, - targetAsset: record["Target Asset"] || undefined, - status: record["Status"] || undefined, - transactionUrl: record["Transaction URL"] || "", - })), - balances, - transactions, - }; - } catch (error) { - elizaLogger.error("Error in tradeProvider:", error); - return []; - } - }, -}; - -export const executeTradeAction: Action = { - name: "EXECUTE_TRADE", - description: - "Execute a trade between two assets using the Coinbase SDK and log the result.", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.info("Validating runtime for EXECUTE_TRADE..."); - return ( - !!( - runtime.character.settings.secrets?.COINBASE_API_KEY || - process.env.COINBASE_API_KEY - ) && - !!( - runtime.character.settings.secrets?.COINBASE_PRIVATE_KEY || - process.env.COINBASE_PRIVATE_KEY - ) - ); - }, - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.debug("Starting EXECUTE_TRADE handler..."); - - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - - const context = composeContext({ - state, - template: tradeTemplate, - }); - - const tradeDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: TradeSchema, - }); - - if (!isTradeContent(tradeDetails.object)) { - callback( - { - text: "Invalid trade details. Ensure network, amount, source asset, and target asset are correctly specified.", - }, - [] - ); - return; - } - - const { network, amount, sourceAsset, targetAsset } = - tradeDetails.object as TradeContent; - - const allowedNetworks = ["base", "sol", "eth", "arb", "pol"]; - if (!allowedNetworks.includes(network)) { - callback( - { - text: `Invalid network. Supported networks are: ${allowedNetworks.join( - ", " - )}.`, - }, - [] - ); - return; - } - - const { trade, transfer } = await executeTradeAndCharityTransfer( - runtime, - network, - amount, - sourceAsset, - targetAsset - ); - - let responseText = `Trade executed successfully: -- Network: ${network} -- Amount: ${trade.getFromAmount()} -- From: ${sourceAsset} -- To: ${targetAsset} -- Transaction URL: ${trade.getTransaction().getTransactionLink() || ""} -- Charity Transaction URL: ${transfer.getTransactionLink() || ""}`; - - if (transfer) { - responseText += `\n- Charity Amount: ${transfer.getAmount()}`; - } else { - responseText += "\n(Note: Charity transfer was not completed)"; - } - - callback({ text: responseText }, []); - } catch (error) { - elizaLogger.error("Error during trade execution:", error); - callback( - { - text: "Failed to execute the trade. Please check the logs for more details.", - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Swap 1 ETH for USDC on base network", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Trade executed successfully:\n- Swapped 1 ETH for USDC on base network\n- Transaction URL: https://basescan.io/tx/...\n- Status: Completed", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Convert 1000 USDC to SOL on Solana", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Trade executed successfully:\n- Converted 1000 USDC to SOL on Solana network\n- Transaction URL: https://solscan.io/tx/...\n- Status: Completed", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Exchange 5 WETH for ETH on Arbitrum", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Trade executed successfully:\n- Exchanged 5 WETH for ETH on Arbitrum network\n- Transaction URL: https://arbiscan.io/tx/...\n- Status: Completed", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Trade 100 GWEI for USDC on Polygon", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Trade executed successfully:\n- Traded 100 GWEI for USDC on Polygon network\n- Transaction URL: https://polygonscan.com/tx/...\n- Status: Completed", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Market buy ETH with 500 USDC on base", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Trade executed successfully:\n- Bought ETH with 500 USDC on base network\n- Transaction URL: https://basescan.io/tx/...\n- Status: Completed", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Sell 2.5 SOL for USDC on Solana mainnet", - }, - }, - { - user: "{{agentName}}", - content: { - text: "Trade executed successfully:\n- Sold 2.5 SOL for USDC on Solana network\n- Transaction URL: https://solscan.io/tx/...\n- Status: Completed", - }, - }, - ], - ], - similes: [ - "EXECUTE_TRADE", // Primary action name - "SWAP_TOKENS", // For token swaps - "CONVERT_CURRENCY", // For currency conversion - "EXCHANGE_ASSETS", // For asset exchange - "MARKET_BUY", // For buying assets - "MARKET_SELL", // For selling assets - "TRADE_CRYPTO", // Generic crypto trading - ], -}; - -export const tradePlugin: Plugin = { - name: "tradePlugin", - description: "Enables asset trading using the Coinbase SDK.", - actions: [executeTradeAction], - providers: [tradeProvider], -}; diff --git a/packages/plugin-coinbase/src/plugins/webhooks.ts b/packages/plugin-coinbase/src/plugins/webhooks.ts deleted file mode 100644 index 3bfffc35e0d3e..0000000000000 --- a/packages/plugin-coinbase/src/plugins/webhooks.ts +++ /dev/null @@ -1,189 +0,0 @@ -import { Coinbase, Webhook } from "@coinbase/coinbase-sdk"; -import { - type Action, - type Plugin, - elizaLogger, - type IAgentRuntime, - type Memory, - type HandlerCallback, - type State, - composeContext, - generateObject, - ModelClass, - type Provider, -} from "@elizaos/core"; -import { WebhookSchema, isWebhookContent, type WebhookContent } from "../types"; -import { webhookTemplate } from "../templates"; -import { appendWebhooksToCsv } from "../utils"; - -export const webhookProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.debug("Starting webhookProvider.get function"); - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - - // List all webhooks - const resp = await Webhook.list(); - elizaLogger.info("Listing all webhooks:", resp.data); - - return { - webhooks: resp.data.map((webhook: Webhook) => ({ - id: webhook.getId(), - networkId: webhook.getNetworkId(), - eventType: webhook.getEventType(), - eventFilters: webhook.getEventFilters(), - eventTypeFilter: webhook.getEventTypeFilter(), - notificationURI: webhook.getNotificationURI(), - })), - }; - } catch (error) { - elizaLogger.error("Error in webhookProvider:", error); - return []; - } - }, -}; - -export const createWebhookAction: Action = { - name: "CREATE_WEBHOOK", - description: "Create a new webhook using the Coinbase SDK.", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - elizaLogger.info("Validating runtime for CREATE_WEBHOOK..."); - return ( - !!( - runtime.character.settings.secrets?.COINBASE_API_KEY || - process.env.COINBASE_API_KEY - ) && - !!( - runtime.character.settings.secrets?.COINBASE_PRIVATE_KEY || - process.env.COINBASE_PRIVATE_KEY - ) && - !!( - runtime.character.settings.secrets?.COINBASE_NOTIFICATION_URI || - process.env.COINBASE_NOTIFICATION_URI - ) - ); - }, - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: any, - callback: HandlerCallback - ) => { - elizaLogger.debug("Starting CREATE_WEBHOOK handler..."); - - try { - Coinbase.configure({ - apiKeyName: - runtime.getSetting("COINBASE_API_KEY") ?? - process.env.COINBASE_API_KEY, - privateKey: - runtime.getSetting("COINBASE_PRIVATE_KEY") ?? - process.env.COINBASE_PRIVATE_KEY, - }); - - const context = composeContext({ - state, - template: webhookTemplate, - }); - - const webhookDetails = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: WebhookSchema, - }); - - if (!isWebhookContent(webhookDetails.object)) { - callback( - { - text: "Invalid webhook details. Ensure network, URL, event type, and contract address are correctly specified.", - }, - [] - ); - return; - } - - const { networkId, eventType, eventFilters, eventTypeFilter } = - webhookDetails.object as WebhookContent; - const notificationUri = - runtime.getSetting("COINBASE_NOTIFICATION_URI") ?? - process.env.COINBASE_NOTIFICATION_URI; - - if (!notificationUri) { - callback( - { - text: "Notification URI is not set in the environment variables.", - }, - [] - ); - return; - } - elizaLogger.info("Creating webhook with details:", { - networkId, - notificationUri, - eventType, - eventTypeFilter, - eventFilters, - }); - const webhook = await Webhook.create({ - networkId, - notificationUri, - eventType, - eventFilters, - }); - elizaLogger.info( - "Webhook created successfully:", - webhook.toString() - ); - callback( - { - text: `Webhook created successfully: ${webhook.toString()}`, - }, - [] - ); - await appendWebhooksToCsv([webhook]); - elizaLogger.info("Webhook appended to CSV successfully"); - } catch (error) { - elizaLogger.error("Error during webhook creation:", error); - callback( - { - text: "Failed to create the webhook. Please check the logs for more details.", - }, - [] - ); - } - }, - similes: ["WEBHOOK", "NOTIFICATION", "EVENT", "TRIGGER", "LISTENER"], - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Create a webhook on base for address 0xbcF7C64B880FA89a015970dC104E848d485f99A3 on the event type: transfers", - }, - }, - { - user: "{{agentName}}", - content: { - text: `Webhook created successfully: Webhook ID: {{webhookId}}, Network ID: {{networkId}}, Notification URI: {{notificationUri}}, Event Type: {{eventType}}`, - action: "CREATE_WEBHOOK", - }, - }, - ], - ], -}; - -export const webhookPlugin: Plugin = { - name: "webhookPlugin", - description: "Manages webhooks using the Coinbase SDK.", - actions: [createWebhookAction], - providers: [webhookProvider], -}; diff --git a/packages/plugin-coinbase/src/templates.ts b/packages/plugin-coinbase/src/templates.ts deleted file mode 100644 index ff03d28e28e0c..0000000000000 --- a/packages/plugin-coinbase/src/templates.ts +++ /dev/null @@ -1,387 +0,0 @@ -export const chargeTemplate = ` -Extract the following details to create a Coinbase charge: -- **price** (number): The amount for the charge (e.g., 100.00). -- **currency** (string): The 3-letter ISO 4217 currency code (e.g., USD, EUR). -- **type** (string): The pricing type for the charge (e.g., fixed_price, dynamic_price). Assume price type is fixed unless otherwise stated -- **name** (string): A non-empty name for the charge (e.g., "The Human Fund"). -- **description** (string): A non-empty description of the charge (e.g., "Money For People"). - -Provide the values in the following JSON format: - -\`\`\`json -{ - "price": , - "currency": "", - "type": "", - "name": "", - "description": "" -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const getChargeTemplate = ` -Extract the details for a Coinbase charge using the provided charge ID: -- **charge_id** (string): The unique identifier of the charge (e.g., "2b364ef7-ad60-4fcd-958b-e550a3c47dc6"). - -Provide the charge details in the following JSON format after retrieving the charge details: - -\`\`\`json -{ - "charge_id": "", - "price": , - "currency": "", - "type": "", - "name": "", - "description": "", - "status": "", - "created_at": "", - "expires_at": "" -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const transferTemplate = ` -Extract the following details for processing a mass payout using the Coinbase SDK: -- **receivingAddresses** (array): A list of wallet addresses receiving the funds. -- **transferAmount** (number): The amount to transfer to each address. -- **assetId** (string): The asset ID to transfer (e.g., ETH, BTC). -- **network** (string): The blockchain network to use. Allowed values are: - static networks: { - readonly BaseSepolia: "base-sepolia"; - readonly BaseMainnet: "base-mainnet"; - readonly EthereumHolesky: "ethereum-holesky"; - readonly EthereumMainnet: "ethereum-mainnet"; - readonly PolygonMainnet: "polygon-mainnet"; - readonly SolanaDevnet: "solana-devnet"; - readonly SolanaMainnet: "solana-mainnet"; - readonly ArbitrumMainnet: "arbitrum-mainnet"; - }; - -Provide the details in the following JSON format: - -\`\`\`json -{ - "receivingAddresses": ["", ""], - "transferAmount": , - "assetId": "", - "network": "" -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const tradeTemplate = ` -Extract the following details for processing a trade using the Coinbase SDK: -- **network** (string): The blockchain network to use (e.g., base, sol, eth, arb, pol). -- **amount** (number): The amount to trade. -- **sourceAsset** (string): The asset ID to trade from (must be one of: ETH, SOL, USDC, WETH, GWEI, LAMPORT). -- **targetAsset** (string): The asset ID to trade to (must be one of: ETH, SOL, USDC, WETH, GWEI, LAMPORT). -- **side** (string): The side of the trade (must be either "BUY" or "SELL"). - -Ensure that: -1. **network** is one of the supported networks: "base", "sol", "eth", "arb", or "pol". -2. **sourceAsset** and **targetAsset** are valid assets from the provided list. -3. **amount** is a positive number. -4. **side** is either "BUY" or "SELL". - -Provide the details in the following JSON format: - -\`\`\`json -{ - "network": "", - "amount": , - "sourceAsset": "", - "targetAsset": "", - "side": "" -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const advancedTradeTemplate = ` -Extract the following details for processing an advanced trade using the Coinbase Advanced Trading API: -- **productId** (string): The trading pair ID (e.g., "BTC-USD", "ETH-USD", "SOL-USD") -- **side** (string): The side of the trade (must be either "BUY" or "SELL") -- **amount** (number): The amount to trade -- **orderType** (string): The type of order (must be either "MARKET" or "LIMIT") -- **limitPrice** (number, optional): The limit price for limit orders - -Ensure that: -1. **productId** follows the format "ASSET-USD" (e.g., "BTC-USD") -2. **side** is either "BUY" or "SELL" -3. **amount** is a positive number -4. **orderType** is either "MARKET" or "LIMIT" -5. **limitPrice** is provided when orderType is "LIMIT" - -Provide the details in the following JSON format: - -\`\`\`json -{ - "productId": "", - "side": "", - "amount": , - "orderType": "", - "limitPrice": -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const tokenContractTemplate = ` -Extract the following details for deploying a token contract using the Coinbase SDK: -- **contractType** (string): The type of token contract to deploy (ERC20, ERC721, or ERC1155) -- **name** (string): The name of the token -- **symbol** (string): The symbol of the token -- **network** (string): The blockchain network to deploy on (e.g., base, eth, arb, pol) -- **baseURI** (string, optional): The base URI for token metadata (required for ERC721 and ERC1155) -- **totalSupply** (number, optional): The total supply of tokens (only for ERC20) - -Provide the details in the following JSON format: - -\`\`\`json -{ - "contractType": "", - "name": "", - "symbol": "", - "network": "", - "baseURI": "", - "totalSupply": -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -// Add to templates.ts -export const contractInvocationTemplate = ` -Extract the following details for invoking a smart contract using the Coinbase SDK: -- **contractAddress** (string): The address of the contract to invoke -- **method** (string): The method to invoke on the contract -- **abi** (array): The ABI of the contract -- **args** (object, optional): The arguments to pass to the contract method -- **amount** (string, optional): The amount of the asset to send (as string to handle large numbers) -- **assetId** (string, required): The ID of the asset to send (e.g., 'USDC') -- **networkId** (string, required): The network ID to use in format "chain-network". - static networks: { - readonly BaseSepolia: "base-sepolia"; - readonly BaseMainnet: "base-mainnet"; - readonly EthereumHolesky: "ethereum-holesky"; - readonly EthereumMainnet: "ethereum-mainnet"; - readonly PolygonMainnet: "polygon-mainnet"; - readonly SolanaDevnet: "solana-devnet"; - readonly SolanaMainnet: "solana-mainnet"; - readonly ArbitrumMainnet: "arbitrum-mainnet"; - }; - -Provide the details in the following JSON format: - -\`\`\`json -{ - "contractAddress": "", - "method": "", - "abi": [], - "args": { - "": "" - }, - "amount": "", - "assetId": "", - "networkId": "" -} -\`\`\` - -Example for invoking a transfer method on the USDC contract: - -\`\`\`json -{ - "contractAddress": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", - "method": "transfer", - "abi": [ - { - "constant": false, - "inputs": [ - { - "name": "to", - "type": "address" - }, - { - "name": "amount", - "type": "uint256" - } - ], - "name": "transfer", - "outputs": [ - { - "name": "", - "type": "bool" - } - ], - "payable": false, - "stateMutability": "nonpayable", - "type": "function" - } - ], - "args": { - "to": "0xbcF7C64B880FA89a015970dC104E848d485f99A3", - "amount": "1000000" // 1 USDC (6 decimals) - }, - "networkId": "ethereum-mainnet", - "assetId": "USDC" -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const webhookTemplate = ` -Extract the following details for creating a webhook: -- **networkId** (string): The network ID for which the webhook is created. -Allowed values are: - static networks: { - readonly BaseSepolia: "base-sepolia"; - readonly BaseMainnet: "base-mainnet"; - readonly EthereumHolesky: "ethereum-holesky"; - readonly EthereumMainnet: "ethereum-mainnet"; - readonly PolygonMainnet: "polygon-mainnet"; - readonly SolanaDevnet: "solana-devnet"; - readonly SolanaMainnet: "solana-mainnet"; - readonly ArbitrumMainnet: "arbitrum-mainnet"; - }; -- **eventType** (string): The type of event for the webhook. -export declare const WebhookEventType: { - readonly Unspecified: "unspecified"; - readonly Erc20Transfer: "erc20_transfer"; - readonly Erc721Transfer: "erc721_transfer"; - readonly WalletActivity: "wallet_activity"; -}; -- **eventTypeFilter** (string, optional): Filter for wallet activity event type. -export interface WebhookEventTypeFilter { - /** - * A list of wallet addresses to filter on. - * @type {Array} - * @memberof WebhookWalletActivityFilter - */ - 'addresses'?: Array; - /** - * The ID of the wallet that owns the webhook. - * @type {string} - * @memberof WebhookWalletActivityFilter - */ - 'wallet_id'?: string; -} -- **eventFilters** (array, optional): Filters applied to the events that determine which specific events trigger the webhook. -export interface Array { - /** - * The onchain contract address of the token for which the events should be tracked. - * @type {string} - * @memberof WebhookEventFilter - */ - 'contract_address'?: string; - /** - * The onchain address of the sender. Set this filter to track all transfer events originating from your address. - * @type {string} - * @memberof WebhookEventFilter - */ - 'from_address'?: string; - /** - * The onchain address of the receiver. Set this filter to track all transfer events sent to your address. - * @type {string} - * @memberof WebhookEventFilter - */ - 'to_address'?: string; -} -Provide the details in the following JSON format: -\`\`\`json -{ - "networkId": "", - "eventType": "", - "eventTypeFilter": "", - "eventFilters": [, ] -} -\`\`\` - - - -Example for creating a webhook on the Sepolia testnet for ERC20 transfers originating from a specific wallet 0x1234567890123456789012345678901234567890 on transfers from 0xbcF7C64B880FA89a015970dC104E848d485f99A3 - -\`\`\`javascript - - networkId: 'base-sepolia', // Listening on sepolia testnet transactions - eventType: 'erc20_transfer', - eventTypeFilter: { - addresses: ['0x1234567890123456789012345678901234567890'] - }, - eventFilters: [{ - from_address: '0xbcF7C64B880FA89a015970dC104E848d485f99A3', - }], -}); -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; - -export const readContractTemplate = ` -Extract the following details for reading from a smart contract using the Coinbase SDK: -- **contractAddress** (string): The address of the contract to read from (must start with 0x) -- **method** (string): The view/pure method to call on the contract -- **networkId** (string): The network ID based on networks configured in Coinbase SDK -Allowed values are: - static networks: { - readonly BaseSepolia: "base-sepolia"; - readonly BaseMainnet: "base-mainnet"; - readonly EthereumHolesky: "ethereum-holesky"; - readonly EthereumMainnet: "ethereum-mainnet"; - readonly PolygonMainnet: "polygon-mainnet"; - readonly SolanaDevnet: "solana-devnet"; - readonly SolanaMainnet: "solana-mainnet"; - readonly ArbitrumMainnet: "arbitrum-mainnet"; - }; -- **args** (object): The arguments to pass to the contract method -- **abi** (array, optional): The contract ABI if needed for complex interactions - -Provide the details in the following JSON format: - -\`\`\`json -{ - "contractAddress": "<0x-prefixed-address>", - "method": "", - "networkId": "", - "args": { - "": "" - }, - "abi": [ - // Optional ABI array - ] -} -\`\`\` - -Example for reading the balance of an ERC20 token: - -\`\`\`json -{ - "contractAddress": "0x37f2131ebbc8f97717edc3456879ef56b9f4b97b", - "method": "balanceOf", - "networkId": "eth-mainnet", - "args": { - "account": "0xbcF7C64B880FA89a015970dC104E848d485f99A3" - } -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; diff --git a/packages/plugin-coinbase/src/types.ts b/packages/plugin-coinbase/src/types.ts deleted file mode 100644 index 4733402671cf4..0000000000000 --- a/packages/plugin-coinbase/src/types.ts +++ /dev/null @@ -1,243 +0,0 @@ -import { Coinbase } from "@coinbase/coinbase-sdk"; -import { z } from "zod"; -import { - WebhookEventType, - type WebhookEventFilter, - type WebhookEventTypeFilter, -} from "@coinbase/coinbase-sdk/dist/client"; - -export const ChargeSchema = z.object({ - id: z.string().nullable(), - price: z.number(), - type: z.string(), - currency: z.string().min(3).max(3), - name: z.string().min(1), - description: z.string().min(1), -}); - -export interface ChargeContent { - id: string | null; - price: number; - type: string; - currency: string; // Currency code (e.g., USD) - name: string; // Name of the charge - description: string; // Description of the charge -} - -export const isChargeContent = (object: any): object is ChargeContent => { - if (ChargeSchema.safeParse(object).success) { - return true; - } - console.error("Invalid content: ", object); - return false; -}; - -export const TransferSchema = z.object({ - network: z.string().toLowerCase(), - receivingAddresses: z.array(z.string()), - transferAmount: z.number(), - assetId: z.string().toLowerCase(), -}); - -export interface TransferContent { - network: string; - receivingAddresses: string[]; - transferAmount: number; - assetId: string; -} - -export const isTransferContent = (object: any): object is TransferContent => { - return TransferSchema.safeParse(object).success; -}; - -export type Transaction = { - address: string; - amount: number; - status: string; - errorCode: string | null; - transactionUrl: string | null; -}; -const assetValues = Object.values(Coinbase.assets) as [string, ...string[]]; -export const TradeSchema = z.object({ - network: z.string().toLowerCase(), - amount: z.number(), - sourceAsset: z.enum(assetValues), - targetAsset: z.enum(assetValues), - side: z.enum(["BUY", "SELL"]), -}); - -export interface TradeContent { - network: string; - amount: number; - sourceAsset: string; - targetAsset: string; - side: "BUY" | "SELL"; -} - -export const isTradeContent = (object: any): object is TradeContent => { - return TradeSchema.safeParse(object).success; -}; - -export type TradeTransaction = { - network: string; - amount: number; - sourceAsset: string; - targetAsset: string; - status: string; - errorCode: string | null; - transactionUrl: string | null; -}; - -export interface TokenContractContent { - contractType: "ERC20" | "ERC721" | "ERC1155"; - name: string; - symbol: string; - network: string; - baseURI?: string; - totalSupply?: number; -} - -export const TokenContractSchema = z - .object({ - contractType: z - .enum(["ERC20", "ERC721", "ERC1155"]) - .describe("The type of token contract to deploy"), - name: z.string().describe("The name of the token"), - symbol: z.string().describe("The symbol of the token"), - network: z.string().describe("The blockchain network to deploy on"), - baseURI: z - .string() - .optional() - .describe( - "The base URI for token metadata (required for ERC721 and ERC1155)" - ), - totalSupply: z - .number() - .optional() - .describe("The total supply of tokens (only for ERC20)"), - }) - .refine( - (data) => { - if (data.contractType === "ERC20") { - return ( - typeof data.totalSupply === "number" || - data.totalSupply === undefined - ); - } - if (["ERC721", "ERC1155"].includes(data.contractType)) { - return ( - typeof data.baseURI === "string" || - data.baseURI === undefined - ); - } - return true; - }, - { - message: "Invalid token contract content", - path: ["contractType"], - } - ); - -export const isTokenContractContent = ( - obj: any -): obj is TokenContractContent => { - return TokenContractSchema.safeParse(obj).success; -}; - -// Add to types.ts -export interface ContractInvocationContent { - contractAddress: string; - method: string; - abi: any[]; - args?: Record; - amount?: string; - assetId: string; - networkId: string; -} - -export const ContractInvocationSchema = z.object({ - contractAddress: z - .string() - .describe("The address of the contract to invoke"), - method: z.string().describe("The method to invoke on the contract"), - abi: z.array(z.any()).describe("The ABI of the contract"), - args: z - .record(z.string(), z.any()) - .optional() - .describe("The arguments to pass to the contract method"), - amount: z - .string() - .optional() - .describe( - "The amount of the asset to send (as string to handle large numbers)" - ), - assetId: z.string().describe("The ID of the asset to send (e.g., 'USDC')"), - networkId: z - .string() - .describe("The network ID to use (e.g., 'ethereum-mainnet')"), -}); - -export const isContractInvocationContent = ( - obj: any -): obj is ContractInvocationContent => { - return ContractInvocationSchema.safeParse(obj).success; -}; - -export const WebhookSchema = z.object({ - networkId: z.string(), - eventType: z.nativeEnum(WebhookEventType), - eventTypeFilter: z.custom().optional(), - eventFilters: z.array(z.custom()).optional(), -}); - -export type WebhookContent = z.infer; - -export const isWebhookContent = (object: any): object is WebhookContent => { - return WebhookSchema.safeParse(object).success; -}; - -export const AdvancedTradeSchema = z.object({ - productId: z.string(), - side: z.enum(["BUY", "SELL"]), - amount: z.number(), - orderType: z.enum(["MARKET", "LIMIT"]), - limitPrice: z.number().optional(), -}); - -export interface AdvancedTradeContent { - productId: string; - side: "BUY" | "SELL"; - amount: number; - orderType: "MARKET" | "LIMIT"; - limitPrice?: number; -} - -export const isAdvancedTradeContent = ( - object: any -): object is AdvancedTradeContent => { - return AdvancedTradeSchema.safeParse(object).success; -}; - -export interface ReadContractContent { - contractAddress: `0x${string}`; - method: string; - networkId: string; - args: Record; - abi?: any[]; -} - -export const ReadContractSchema = z.object({ - contractAddress: z - .string() - .describe("The address of the contract to read from"), - method: z.string().describe("The view/pure method to call on the contract"), - networkId: z.string().describe("The network ID to use"), - args: z - .record(z.string(), z.any()) - .describe("The arguments to pass to the contract method"), - abi: z.array(z.any()).optional().describe("The contract ABI (optional)"), -}); - -export const isReadContractContent = (obj: any): obj is ReadContractContent => { - return ReadContractSchema.safeParse(obj).success; -}; diff --git a/packages/plugin-coinbase/src/utils.ts b/packages/plugin-coinbase/src/utils.ts deleted file mode 100644 index 7a1873066bf81..0000000000000 --- a/packages/plugin-coinbase/src/utils.ts +++ /dev/null @@ -1,535 +0,0 @@ -import { - Coinbase, - type Trade, - type Transfer, - Wallet, - type WalletData, - type Webhook, -} from "@coinbase/coinbase-sdk"; -import { elizaLogger, type IAgentRuntime, settings } from "@elizaos/core"; -import fs from "fs"; -import path from "path"; -import type { EthereumTransaction } from "@coinbase/coinbase-sdk/dist/client"; -import { fileURLToPath } from "url"; -import { createArrayCsvWriter } from "csv-writer"; -import type { Transaction } from "./types"; - -// Dynamically resolve the file path to the src/plugins directory -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const baseDir = path.resolve(__dirname, "../../plugin-coinbase/src/plugins"); -const tradeCsvFilePath = path.join(baseDir, "trades.csv"); -const transactionCsvFilePath = path.join(baseDir, "transactions.csv"); -const webhookCsvFilePath = path.join(baseDir, "webhooks.csv"); - -export async function initializeWallet( - runtime: IAgentRuntime, - networkId: string = Coinbase.networks.EthereumMainnet -) { - let wallet: Wallet; - const storedSeed = - runtime.getSetting("COINBASE_GENERATED_WALLET_HEX_SEED") ?? - process.env.COINBASE_GENERATED_WALLET_HEX_SEED; - - const storedWalletId = - runtime.getSetting("COINBASE_GENERATED_WALLET_ID") ?? - process.env.COINBASE_GENERATED_WALLET_ID; - if (!storedSeed || !storedWalletId) { - // No stored seed or wallet ID, creating a new wallet - wallet = await Wallet.create({ networkId }); - - // Export wallet data directly - const walletData: WalletData = wallet.export(); - const walletAddress = await wallet.getDefaultAddress(); - try { - const characterFilePath = `characters/${runtime.character.name.toLowerCase()}.character.json`; - const walletIDSave = await updateCharacterSecrets( - characterFilePath, - "COINBASE_GENERATED_WALLET_ID", - walletData.walletId - ); - const seedSave = await updateCharacterSecrets( - characterFilePath, - "COINBASE_GENERATED_WALLET_HEX_SEED", - walletData.seed - ); - if (walletIDSave && seedSave) { - elizaLogger.log("Successfully updated character secrets."); - } else { - const seedFilePath = `characters/${runtime.character.name.toLowerCase()}-seed.txt`; - elizaLogger.error( - `Failed to update character secrets so adding gitignored ${seedFilePath} file please add it your env or character file and delete:` - ); - // save it to gitignored file - wallet.saveSeed(seedFilePath); - } - elizaLogger.log( - "Wallet created and stored new wallet:", - walletAddress - ); - } catch (error) { - elizaLogger.error("Error updating character secrets:", error); - throw error; - } - - // Logging wallet creation - elizaLogger.log("Created and stored new wallet:", walletAddress); - } else { - // Importing existing wallet using stored seed and wallet ID - // Always defaults to base-mainnet we can't select the network here - wallet = await Wallet.import({ - seed: storedSeed, - walletId: storedWalletId, - }); - const networkId = wallet.getNetworkId(); - elizaLogger.log("Imported existing wallet for network:", networkId); - - // Logging wallet import - elizaLogger.log( - "Imported existing wallet:", - await wallet.getDefaultAddress() - ); - } - - return wallet; -} - -/** - * Executes a trade and a charity transfer. - * @param {IAgentRuntime} runtime - The runtime for wallet initialization. - * @param {string} network - The network to use. - * @param {number} amount - The amount to trade and transfer. - * @param {string} sourceAsset - The source asset to trade. - * @param {string} targetAsset - The target asset to trade. - */ -export async function executeTradeAndCharityTransfer( - runtime: IAgentRuntime, - network: string, - amount: number, - sourceAsset: string, - targetAsset: string -) { - const wallet = await initializeWallet(runtime, network); - - elizaLogger.log("Wallet initialized:", { - network, - address: await wallet.getDefaultAddress(), - }); - - const charityAddress = getCharityAddress(network); - const charityAmount = charityAddress ? amount * 0.01 : 0; - const tradeAmount = charityAddress ? amount - charityAmount : amount; - const assetIdLowercase = sourceAsset.toLowerCase(); - const tradeParams = { - amount: tradeAmount, - fromAssetId: assetIdLowercase, - toAssetId: targetAsset.toLowerCase(), - }; - - let transfer: Transfer; - if (charityAddress && charityAmount > 0) { - transfer = await executeTransfer( - wallet, - charityAmount, - assetIdLowercase, - charityAddress - ); - elizaLogger.log("Charity Transfer successful:", { - address: charityAddress, - transactionUrl: transfer.getTransactionLink(), - }); - await appendTransactionsToCsv([ - { - address: charityAddress, - amount: charityAmount, - status: "Success", - errorCode: null, - transactionUrl: transfer.getTransactionLink(), - }, - ]); - } - - const trade: Trade = await wallet.createTrade(tradeParams); - elizaLogger.log("Trade initiated:", trade.toString()); - await trade.wait(); - elizaLogger.log("Trade completed successfully:", trade.toString()); - await appendTradeToCsv(trade); - return { - trade, - transfer, - }; -} - -export async function appendTradeToCsv(trade: Trade) { - try { - const csvWriter = createArrayCsvWriter({ - path: tradeCsvFilePath, - header: [ - "Network", - "From Amount", - "Source Asset", - "To Amount", - "Target Asset", - "Status", - "Transaction URL", - ], - append: true, - }); - - const formattedTrade = [ - trade.getNetworkId(), - trade.getFromAmount(), - trade.getFromAssetId(), - trade.getToAmount(), - trade.getToAssetId(), - trade.getStatus(), - trade.getTransaction().getTransactionLink() || "", - ]; - - elizaLogger.log("Writing trade to CSV:", formattedTrade); - await csvWriter.writeRecords([formattedTrade]); - elizaLogger.log("Trade written to CSV successfully."); - } catch (error) { - elizaLogger.error("Error writing trade to CSV:", error); - } -} - -export async function appendTransactionsToCsv(transactions: Transaction[]) { - try { - const csvWriter = createArrayCsvWriter({ - path: transactionCsvFilePath, - header: [ - "Address", - "Amount", - "Status", - "Error Code", - "Transaction URL", - ], - append: true, - }); - - const formattedTransactions = transactions.map((transaction) => [ - transaction.address, - transaction.amount.toString(), - transaction.status, - transaction.errorCode || "", - transaction.transactionUrl || "", - ]); - - elizaLogger.log("Writing transactions to CSV:", formattedTransactions); - await csvWriter.writeRecords(formattedTransactions); - elizaLogger.log("All transactions written to CSV successfully."); - } catch (error) { - elizaLogger.error("Error writing transactions to CSV:", error); - } -} -// create a function to append webhooks to a csv -export async function appendWebhooksToCsv(webhooks: Webhook[]) { - try { - // Ensure the CSV file exists - if (!fs.existsSync(webhookCsvFilePath)) { - elizaLogger.warn("CSV file not found. Creating a new one."); - const csvWriter = createArrayCsvWriter({ - path: webhookCsvFilePath, - header: [ - "Webhook ID", - "Network ID", - "Event Type", - "Event Filters", - "Event Type Filter", - "Notification URI", - ], - }); - await csvWriter.writeRecords([]); // Create an empty file with headers - elizaLogger.log("New CSV file created with headers."); - } - const csvWriter = createArrayCsvWriter({ - path: webhookCsvFilePath, - header: [ - "Webhook ID", - "Network ID", - "Event Type", - "Event Filters", - "Event Type Filter", - "Notification URI", - ], - append: true, - }); - - const formattedWebhooks = webhooks.map((webhook) => [ - webhook.getId(), - webhook.getNetworkId(), - webhook.getEventType(), - JSON.stringify(webhook.getEventFilters()), - JSON.stringify(webhook.getEventTypeFilter()), - webhook.getNotificationURI(), - ]); - - elizaLogger.log("Writing webhooks to CSV:", formattedWebhooks); - await csvWriter.writeRecords(formattedWebhooks); - elizaLogger.log("All webhooks written to CSV successfully."); - } catch (error) { - elizaLogger.error("Error writing webhooks to CSV:", error); - } -} - -/** - * Updates a key-value pair in character.settings.secrets. - * @param {string} characterfilePath - The file path to the character. - * @param {string} key - The secret key to update or add. - * @param {string} value - The new value for the secret key. - */ -export async function updateCharacterSecrets( - characterfilePath: string, - key: string, - value: string -): Promise { - try { - const characterFilePath = path.resolve( - process.cwd(), - characterfilePath - ); - - // Check if the character file exists - if (!fs.existsSync(characterFilePath)) { - elizaLogger.error("Character file not found:", characterFilePath); - return false; - } - - // Read the existing character file - const characterData = JSON.parse( - fs.readFileSync(characterFilePath, "utf-8") - ); - - // Ensure settings and secrets exist in the character file - if (!characterData.settings) { - characterData.settings = {}; - } - if (!characterData.settings.secrets) { - characterData.settings.secrets = {}; - } - - // Update or add the key-value pair - characterData.settings.secrets[key] = value; - - // Write the updated data back to the file - fs.writeFileSync( - characterFilePath, - JSON.stringify(characterData, null, 2), - "utf-8" - ); - - console.log( - `Updated ${key} in character.settings.secrets for ${characterFilePath}.` - ); - } catch (error) { - elizaLogger.error("Error updating character secrets:", error); - return false; - } - return true; -} - -export const getAssetType = (transaction: EthereumTransaction) => { - // Check for ETH - if (transaction.value && transaction.value !== "0") { - return "ETH"; - } - - // Check for ERC-20 tokens - if (transaction.token_transfers && transaction.token_transfers.length > 0) { - return transaction.token_transfers - .map((transfer) => { - return transfer.token_id; - }) - .join(", "); - } - - return "N/A"; -}; - -/** - * Fetches and formats wallet balances and recent transactions. - * - * @param {IAgentRuntime} runtime - The runtime for wallet initialization. - * @param {string} networkId - The network ID (optional, defaults to ETH mainnet). - * @returns {Promise<{balances: Array<{asset: string, amount: string}>, transactions: Array}>} - An object with formatted balances and transactions. - */ -export async function getWalletDetails( - runtime: IAgentRuntime, - networkId: string = Coinbase.networks.EthereumMainnet -): Promise<{ - balances: Array<{ asset: string; amount: string }>; - transactions: Array<{ - timestamp: string; - amount: string; - asset: string; // Ensure getAssetType is implemented - status: string; - transactionUrl: string; - }>; -}> { - try { - // Initialize the wallet, defaulting to the specified network or ETH mainnet - const wallet = await initializeWallet(runtime, networkId); - - // Fetch balances - const balances = await wallet.listBalances(); - const formattedBalances = Array.from(balances, (balance) => ({ - asset: balance[0], - amount: balance[1].toString(), - })); - - // Fetch the wallet's recent transactions - - const transactionsData = []; - const formattedTransactions = transactionsData.map((transaction) => { - const content = transaction.content(); - return { - timestamp: content.block_timestamp || "N/A", - amount: content.value || "N/A", - asset: getAssetType(content) || "N/A", // Ensure getAssetType is implemented - status: transaction.getStatus(), - transactionUrl: transaction.getTransactionLink() || "N/A", - }; - }); - - // Return formatted data - return { - balances: formattedBalances, - transactions: formattedTransactions, - }; - } catch (error) { - console.error("Error fetching wallet details:", error); - throw new Error("Unable to retrieve wallet details."); - } -} - -/** - * Executes a transfer. - * @param {Wallet} wallet - The wallet to use. - * @param {number} amount - The amount to transfer. - * @param {string} sourceAsset - The source asset to transfer. - * @param {string} targetAddress - The target address to transfer to. - */ -export async function executeTransferAndCharityTransfer( - wallet: Wallet, - amount: number, - sourceAsset: string, - targetAddress: string, - network: string -) { - const charityAddress = getCharityAddress(network); - const charityAmount = charityAddress ? amount * 0.01 : 0; - const transferAmount = charityAddress ? amount - charityAmount : amount; - const assetIdLowercase = sourceAsset.toLowerCase(); - - let charityTransfer: Transfer; - if (charityAddress && charityAmount > 0) { - charityTransfer = await executeTransfer( - wallet, - charityAmount, - assetIdLowercase, - charityAddress - ); - elizaLogger.log( - "Charity Transfer successful:", - charityTransfer.toString() - ); - } - - const transferDetails = { - amount: transferAmount, - assetId: assetIdLowercase, - destination: targetAddress, - gasless: assetIdLowercase === "usdc" ? true : false, - }; - elizaLogger.log("Initiating transfer:", transferDetails); - const transfer = await wallet.createTransfer(transferDetails); - elizaLogger.log("Transfer initiated:", transfer.toString()); - await transfer.wait(); - - let responseText = `Transfer executed successfully: -- Amount: ${transfer.getAmount()} -- Asset: ${assetIdLowercase} -- Destination: ${targetAddress} -- Transaction URL: ${transfer.getTransactionLink() || ""}`; - - if (charityTransfer) { - responseText += ` -- Charity Amount: ${charityTransfer.getAmount()} -- Charity Transaction URL: ${charityTransfer.getTransactionLink() || ""}`; - } else { - responseText += "\n(Note: Charity transfer was not completed)"; - } - - elizaLogger.log(responseText); - - return { - transfer, - charityTransfer, - responseText, - }; -} - -/** - * Executes a transfer. - * @param {Wallet} wallet - The wallet to use. - * @param {number} amount - The amount to transfer. - * @param {string} sourceAsset - The source asset to transfer. - * @param {string} targetAddress - The target address to transfer to. - */ -export async function executeTransfer( - wallet: Wallet, - amount: number, - sourceAsset: string, - targetAddress: string -) { - const assetIdLowercase = sourceAsset.toLowerCase(); - const transferDetails = { - amount, - assetId: assetIdLowercase, - destination: targetAddress, - gasless: assetIdLowercase === "usdc" ? true : false, - }; - elizaLogger.log("Initiating transfer:", transferDetails); - let transfer: Transfer | undefined; - try { - transfer = await wallet.createTransfer(transferDetails); - elizaLogger.log("Transfer initiated:", transfer.toString()); - await transfer.wait({ - intervalSeconds: 1, - timeoutSeconds: 20, - }); - } catch (error) { - elizaLogger.error("Error executing transfer:", error); - } - return transfer; -} - -/** - * Gets the charity address based on the network. - * @param {string} network - The network to use. - * @param {boolean} isCharitable - Whether charity donations are enabled - * @throws {Error} If charity address for the network is not configured when charity is enabled - */ -export function getCharityAddress( - network: string, - isCharitable = false -): string | null { - // Check both environment variable and passed parameter - const isCharityEnabled = - process.env.IS_CHARITABLE === "true" && isCharitable; - - if (!isCharityEnabled) { - return null; - } - const networkKey = `CHARITY_ADDRESS_${network.toUpperCase()}`; - const charityAddress = settings[networkKey]; - - if (!charityAddress) { - throw new Error( - `Charity address not configured for network ${network}. Please set ${networkKey} in your environment variables.` - ); - } - - return charityAddress; -} diff --git a/packages/plugin-coinbase/tsconfig.json b/packages/plugin-coinbase/tsconfig.json deleted file mode 100644 index 640065a56301f..0000000000000 --- a/packages/plugin-coinbase/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": ".", - "rootDirs": ["src", "advanced-sdk-ts"], - "types": ["node"] - }, - "include": ["src/**/*.ts", "advanced-sdk-ts/src/**/*.ts"] -} diff --git a/packages/plugin-coinbase/tsup.config.ts b/packages/plugin-coinbase/tsup.config.ts deleted file mode 100644 index 203042106466f..0000000000000 --- a/packages/plugin-coinbase/tsup.config.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["cjs", "esm"], - dts: true, - splitting: false, - bundle: true, - minify: false, - external: [ - "@coinbase/coinbase-sdk", - "form-data", - "combined-stream", - "axios", - "util", - "stream", - "http", - "https", - "events", - "crypto", - "buffer", - "url", - "zlib", - "querystring", - "os", - "@reflink/reflink", - "@node-llama-cpp", - "agentkeepalive", - "fs/promises", - "csv-writer", - "csv-parse/sync", - "dotenv", - "coinbase-advanced-sdk", - "advanced-sdk-ts", - "jsonwebtoken", - "whatwg-url", - ], - platform: "node", - target: "node18", - esbuildOptions(options) { - options.bundle = true; - options.platform = "node"; - options.target = "node18"; - }, -}); diff --git a/packages/plugin-coingecko/.env.test b/packages/plugin-coingecko/.env.test deleted file mode 100644 index dafea90cce1bf..0000000000000 --- a/packages/plugin-coingecko/.env.test +++ /dev/null @@ -1 +0,0 @@ -COINGECKO_API_KEY=your_test_api_key_here \ No newline at end of file diff --git a/packages/plugin-coingecko/.npmignore b/packages/plugin-coingecko/.npmignore deleted file mode 100644 index 0468b4b3648ec..0000000000000 --- a/packages/plugin-coingecko/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts diff --git a/packages/plugin-coingecko/README.md b/packages/plugin-coingecko/README.md deleted file mode 100644 index fcb79d8a5580d..0000000000000 --- a/packages/plugin-coingecko/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# Plugin CoinGecko - -A plugin for fetching cryptocurrency price data from the CoinGecko API. - -## Overview - -The Plugin CoinGecko provides a simple interface to get real-time cryptocurrency data. It integrates with CoinGecko's API to fetch current prices, market data, trending coins, and top gainers/losers for various cryptocurrencies in different fiat currencies. - -This plugin uses the [CoinGecko Pro API](https://docs.coingecko.com/reference/introduction). Please refer to their documentation for detailed information about rate limits, available endpoints, and response formats. - -## Installation - -```bash -pnpm add @elizaos/plugin-coingecko -``` - -## Configuration - -Set up your environment with the required CoinGecko API key: - -| Variable Name | Description | -| ------------------- | ---------------------- | -| `COINGECKO_API_KEY` | Your CoinGecko Pro API key | -| `COINGECKO_PRO_API_KEY` | Your CoinGecko Pro API key | - -## Usage - -```typescript -import { coingeckoPlugin } from "@elizaos/plugin-coingecko"; - -// Initialize the plugin -const plugin = coingeckoPlugin; -``` - -## Actions - -### GET_PRICE - -Fetches the current price and market data for one or more cryptocurrencies. - -Features: -- Multiple currency support (e.g., USD, EUR, JPY) -- Optional market cap data -- Optional 24h volume data -- Optional 24h price change data -- Optional last update timestamp - -Examples: -- "What's the current price of Bitcoin?" -- "Check ETH price in EUR with market cap" -- "Show me BTC and ETH prices in USD and EUR" -- "What's USDC worth with 24h volume and price change?" - -### GET_TRENDING - -Fetches the current trending cryptocurrencies on CoinGecko. - -Features: -- Includes trending coins with market data -- Optional NFT inclusion -- Optional category inclusion - -Examples: -- "What's trending in crypto?" -- "Show me trending coins only" -- "What are the hot cryptocurrencies right now?" - -### GET_TOP_GAINERS_LOSERS - -Fetches the top gaining and losing cryptocurrencies by price change. - -Features: -- Customizable time range (1h, 24h, 7d, 14d, 30d, 60d, 1y) -- Configurable number of top coins to include -- Multiple currency support -- Market cap ranking included - -Examples: -- "Show me the biggest gainers and losers today" -- "What are the top movers in EUR for the past week?" -- "Show me monthly performance of top 100 coins" - -## Response Format - -All actions return structured data including: -- Formatted text for easy reading -- Raw data for programmatic use -- Request parameters used -- Error details when applicable - -## Error Handling - -The plugin handles various error scenarios: -- Rate limiting -- API key validation -- Invalid parameters -- Network issues -- Pro plan requirements \ No newline at end of file diff --git a/packages/plugin-coingecko/__tests__/actions/getMarkets.test.ts b/packages/plugin-coingecko/__tests__/actions/getMarkets.test.ts deleted file mode 100644 index d6c937e2b209a..0000000000000 --- a/packages/plugin-coingecko/__tests__/actions/getMarkets.test.ts +++ /dev/null @@ -1,281 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { elizaLogger, ModelClass, generateObject, composeContext } from '@elizaos/core'; -import getMarketsAction, { formatCategory } from '../../src/actions/getMarkets'; -import axios from 'axios'; -import * as environment from '../../src/environment'; -import * as categoriesProvider from '../../src/providers/categoriesProvider'; - -vi.mock('axios'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn(), - success: vi.fn(), - }, - generateObject: vi.fn(), - composeContext: vi.fn(), - ModelClass: { LARGE: 'LARGE', SMALL: 'SMALL' } -})); -vi.mock('../../src/environment', () => ({ - validateCoingeckoConfig: vi.fn(), - getApiConfig: vi.fn() -})); -vi.mock('../../src/providers/categoriesProvider'); - -describe('getMarkets action', () => { - const mockRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getPluginConfig: vi.fn(), - }; - - const mockMessage = {}; - const mockState = {}; - const mockCallback = vi.fn(); - const mockConfig = { - COINGECKO_API_KEY: 'test-api-key', - COINGECKO_PRO_API_KEY: null - }; - - const mockCategories = [ - { category_id: 'defi', name: 'DeFi' }, - { category_id: 'nft', name: 'NFT' } - ]; - - beforeEach(() => { - vi.clearAllMocks(); - - // Mock environment validation - vi.mocked(environment.validateCoingeckoConfig).mockResolvedValue(mockConfig); - vi.mocked(environment.getApiConfig).mockReturnValue({ - baseUrl: 'https://api.coingecko.com/api/v3', - apiKey: 'test-api-key', - headerKey: 'x-cg-demo-api-key' - }); - - // Mock categories provider - vi.mocked(categoriesProvider.getCategoriesData).mockResolvedValue(mockCategories); - - // Mock runtime functions - mockRuntime.composeState.mockResolvedValue(mockState); - mockRuntime.updateRecentMessageState.mockResolvedValue(mockState); - mockRuntime.getPluginConfig.mockResolvedValue({ - apiKey: 'test-api-key', - baseUrl: 'https://api.coingecko.com/api/v3' - }); - - // Mock the core functions - vi.mocked(elizaLogger.log).mockImplementation(() => {}); - vi.mocked(elizaLogger.error).mockImplementation(() => {}); - vi.mocked(elizaLogger.success).mockImplementation(() => {}); - vi.mocked(composeContext).mockReturnValue({}); - }); - - describe('formatCategory', () => { - it('should return undefined for undefined input', () => { - expect(formatCategory(undefined, mockCategories)).toBeUndefined(); - }); - - it('should find exact match by category_id', () => { - expect(formatCategory('defi', mockCategories)).toBe('defi'); - }); - - it('should find match by name', () => { - expect(formatCategory('DeFi', mockCategories)).toBe('defi'); - }); - - it('should find partial match', () => { - expect(formatCategory('nf', mockCategories)).toBe('nft'); - }); - - it('should return undefined for no match', () => { - expect(formatCategory('invalid-category', mockCategories)).toBeUndefined(); - }); - }); - - it('should validate coingecko config', async () => { - await getMarketsAction.validate(mockRuntime, mockMessage); - expect(environment.validateCoingeckoConfig).toHaveBeenCalledWith(mockRuntime); - }); - - it('should fetch and format market data', async () => { - const mockResponse = { - data: [ - { - id: 'bitcoin', - symbol: 'btc', - name: 'Bitcoin', - image: 'image_url', - current_price: 50000, - market_cap: 1000000000000, - market_cap_rank: 1, - fully_diluted_valuation: 1100000000000, - total_volume: 30000000000, - high_24h: 51000, - low_24h: 49000, - price_change_24h: 1000, - price_change_percentage_24h: 2, - market_cap_change_24h: 20000000000, - market_cap_change_percentage_24h: 2, - circulating_supply: 19000000, - total_supply: 21000000, - max_supply: 21000000, - ath: 69000, - ath_change_percentage: -27.5, - ath_date: '2021-11-10T14:24:11.849Z', - atl: 67.81, - atl_change_percentage: 73623.12, - atl_date: '2013-07-06T00:00:00.000Z', - last_updated: '2024-01-31T23:00:00.000Z' - } - ] - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - category: 'defi', - order: 'market_cap_desc', - per_page: 20, - page: 1, - sparkline: false - }, - modelClass: ModelClass.SMALL - }); - - await getMarketsAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(axios.get).toHaveBeenCalledWith( - 'https://api.coingecko.com/api/v3/coins/markets', - expect.objectContaining({ - params: { - vs_currency: 'usd', - category: 'defi', - order: 'market_cap_desc', - per_page: 20, - page: 1, - sparkline: false - } - }) - ); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Bitcoin (BTC)'), - content: expect.objectContaining({ - markets: expect.arrayContaining([ - expect.objectContaining({ - name: 'Bitcoin', - symbol: 'BTC', - marketCapRank: 1, - currentPrice: 50000 - }) - ]) - }) - })); - }); - - it('should handle invalid category', async () => { - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - category: 'invalid-category', - order: 'market_cap_desc', - per_page: 20, - page: 1, - sparkline: false - }, - modelClass: ModelClass.SMALL - }); - - await getMarketsAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Invalid category'), - error: expect.objectContaining({ - message: expect.stringContaining('Invalid category') - }) - })); - }); - - it('should handle API errors gracefully', async () => { - vi.mocked(axios.get).mockRejectedValueOnce(new Error('API Error')); - - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - order: 'market_cap_desc', - per_page: 20, - page: 1, - sparkline: false - }, - modelClass: ModelClass.SMALL - }); - - await getMarketsAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Error fetching market data'), - error: expect.objectContaining({ - message: expect.stringContaining('API Error') - }) - })); - }); - - it('should handle rate limit errors', async () => { - const rateLimitError = new Error('Rate limit exceeded'); - Object.assign(rateLimitError, { - response: { status: 429 } - }); - vi.mocked(axios.get).mockRejectedValueOnce(rateLimitError); - - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - order: 'market_cap_desc', - per_page: 20, - page: 1, - sparkline: false - }, - modelClass: ModelClass.SMALL - }); - - await getMarketsAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Rate limit exceeded'), - error: expect.objectContaining({ - message: expect.stringContaining('Rate limit exceeded'), - statusCode: 429 - }) - })); - }); - - it('should handle empty response data', async () => { - vi.mocked(axios.get).mockResolvedValueOnce({ data: [] }); - - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - order: 'market_cap_desc', - per_page: 20, - page: 1, - sparkline: false - }, - modelClass: ModelClass.SMALL - }); - - await getMarketsAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('No market data received'), - error: expect.objectContaining({ - message: expect.stringContaining('No market data received') - }) - })); - }); -}); diff --git a/packages/plugin-coingecko/__tests__/actions/getPrice.test.ts b/packages/plugin-coingecko/__tests__/actions/getPrice.test.ts deleted file mode 100644 index 3c371be397ffd..0000000000000 --- a/packages/plugin-coingecko/__tests__/actions/getPrice.test.ts +++ /dev/null @@ -1,208 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { elizaLogger, ModelClass, generateObject, composeContext } from '@elizaos/core'; -import getPriceAction from '../../src/actions/getPrice'; -import axios from 'axios'; -import * as environment from '../../src/environment'; -import * as coinsProvider from '../../src/providers/coinsProvider'; - -vi.mock('axios'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn(), - success: vi.fn(), - }, - generateObject: vi.fn(), - composeContext: vi.fn(), - ModelClass: { LARGE: 'LARGE' } -})); -vi.mock('../../src/environment', () => ({ - validateCoingeckoConfig: vi.fn(), - getApiConfig: vi.fn() -})); -vi.mock('../../src/providers/coinsProvider'); - -describe('getPrice action', () => { - const mockRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getPluginConfig: vi.fn(), - }; - - const mockMessage = {}; - const mockState = {}; - const mockCallback = vi.fn(); - const mockConfig = { - COINGECKO_API_KEY: 'test-api-key', - COINGECKO_PRO_API_KEY: null - }; - - beforeEach(() => { - vi.clearAllMocks(); - - // Mock environment validation - vi.mocked(environment.validateCoingeckoConfig).mockResolvedValue(mockConfig); - vi.mocked(environment.getApiConfig).mockReturnValue({ - baseUrl: 'https://api.coingecko.com/api/v3', - apiKey: 'test-api-key', - headerKey: 'x-cg-demo-api-key' - }); - - // Mock runtime functions - mockRuntime.composeState.mockResolvedValue(mockState); - mockRuntime.updateRecentMessageState.mockResolvedValue(mockState); - mockRuntime.getPluginConfig.mockResolvedValue({ - apiKey: 'test-api-key', - baseUrl: 'https://api.coingecko.com/api/v3' - }); - - // Mock the core functions - vi.mocked(elizaLogger.log).mockImplementation(() => {}); - vi.mocked(elizaLogger.error).mockImplementation(() => {}); - vi.mocked(elizaLogger.success).mockImplementation(() => {}); - vi.mocked(composeContext).mockReturnValue({}); - }); - - it('should validate coingecko config', async () => { - await getPriceAction.validate(mockRuntime, mockMessage); - expect(environment.validateCoingeckoConfig).toHaveBeenCalledWith(mockRuntime); - }); - - it('should fetch and format price data for a single coin', async () => { - const mockPriceResponse = { - data: { - bitcoin: { - usd: 50000, - eur: 42000 - } - } - }; - - const mockCoinsData = [{ - id: 'bitcoin', - name: 'Bitcoin', - symbol: 'btc' - }]; - - vi.mocked(axios.get).mockResolvedValueOnce(mockPriceResponse); - vi.mocked(coinsProvider.getCoinsData).mockResolvedValueOnce(mockCoinsData); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - coinIds: 'bitcoin', - currency: ['usd', 'eur'], - include_market_cap: false, - include_24hr_vol: false, - include_24hr_change: false, - include_last_updated_at: false - }, - modelClass: ModelClass.LARGE - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(axios.get).toHaveBeenCalledWith( - 'https://api.coingecko.com/api/v3/simple/price', - expect.any(Object) - ); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Bitcoin (BTC)') - })); - }); - - it('should handle API errors gracefully', async () => { - vi.mocked(axios.get).mockRejectedValueOnce(new Error('API Error')); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - coinIds: 'invalid-coin', - currency: ['usd'], - include_market_cap: false, - include_24hr_vol: false, - include_24hr_change: false, - include_last_updated_at: false - }, - modelClass: ModelClass.LARGE - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - content: expect.objectContaining({ - error: expect.stringContaining('API Error') - }) - })); - }); - - it('should handle empty response data', async () => { - vi.mocked(axios.get).mockResolvedValueOnce({ data: {} }); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - coinIds: 'non-existent-coin', - currency: ['usd'], - include_market_cap: false, - include_24hr_vol: false, - include_24hr_change: false, - include_last_updated_at: false - }, - modelClass: ModelClass.LARGE - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - content: expect.objectContaining({ - error: expect.stringContaining('No price data available') - }) - })); - }); - - it('should include additional market data when requested', async () => { - const mockPriceResponse = { - data: { - ethereum: { - usd: 3000, - usd_market_cap: 350000000000, - usd_24h_vol: 20000000000, - usd_24h_change: 5.5, - last_updated_at: 1643673600 - } - } - }; - - const mockCoinsData = [{ - id: 'ethereum', - name: 'Ethereum', - symbol: 'eth' - }]; - - vi.mocked(axios.get).mockResolvedValueOnce(mockPriceResponse); - vi.mocked(coinsProvider.getCoinsData).mockResolvedValueOnce(mockCoinsData); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - coinIds: 'ethereum', - currency: ['usd'], - include_market_cap: true, - include_24hr_vol: true, - include_24hr_change: true, - include_last_updated_at: true - }, - modelClass: ModelClass.LARGE - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Market Cap') - })); - }); -}); diff --git a/packages/plugin-coingecko/__tests__/actions/getTopGainersLosers.test.ts b/packages/plugin-coingecko/__tests__/actions/getTopGainersLosers.test.ts deleted file mode 100644 index 3854e593353d2..0000000000000 --- a/packages/plugin-coingecko/__tests__/actions/getTopGainersLosers.test.ts +++ /dev/null @@ -1,251 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { elizaLogger, ModelClass, generateObject, composeContext } from '@elizaos/core'; -import getTopGainersLosersAction from '../../src/actions/getTopGainersLosers'; -import axios from 'axios'; -import * as environment from '../../src/environment'; - -vi.mock('axios'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn(), - success: vi.fn(), - }, - generateObject: vi.fn(), - composeContext: vi.fn(), - ModelClass: { LARGE: 'LARGE' } -})); -vi.mock('../../src/environment', () => ({ - validateCoingeckoConfig: vi.fn(), - getApiConfig: vi.fn() -})); - -describe('getTopGainersLosers action', () => { - const mockRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getPluginConfig: vi.fn(), - }; - - const mockMessage = {}; - const mockState = {}; - const mockCallback = vi.fn(); - const mockConfig = { - COINGECKO_API_KEY: 'test-api-key', - COINGECKO_PRO_API_KEY: null - }; - - beforeEach(() => { - vi.clearAllMocks(); - - // Mock environment validation - vi.mocked(environment.validateCoingeckoConfig).mockResolvedValue(mockConfig); - vi.mocked(environment.getApiConfig).mockReturnValue({ - baseUrl: 'https://api.coingecko.com/api/v3', - apiKey: 'test-api-key', - headerKey: 'x-cg-demo-api-key' - }); - - // Mock runtime functions - mockRuntime.composeState.mockResolvedValue(mockState); - mockRuntime.updateRecentMessageState.mockResolvedValue(mockState); - mockRuntime.getPluginConfig.mockResolvedValue({ - apiKey: 'test-api-key', - baseUrl: 'https://api.coingecko.com/api/v3' - }); - - // Mock the core functions - vi.mocked(elizaLogger.log).mockImplementation(() => {}); - vi.mocked(elizaLogger.error).mockImplementation(() => {}); - vi.mocked(elizaLogger.success).mockImplementation(() => {}); - vi.mocked(composeContext).mockReturnValue({}); - }); - - it('should validate coingecko config', async () => { - await getTopGainersLosersAction.validate(mockRuntime, mockMessage); - expect(environment.validateCoingeckoConfig).toHaveBeenCalledWith(mockRuntime); - }); - - it('should fetch and format top gainers and losers data', async () => { - const mockResponse = { - data: { - top_gainers: [ - { - id: 'bitcoin', - symbol: 'btc', - name: 'Bitcoin', - image: 'image_url', - market_cap_rank: 1, - usd: 50000, - usd_24h_vol: 30000000000, - usd_24h_change: 5.5 - } - ], - top_losers: [ - { - id: 'ethereum', - symbol: 'eth', - name: 'Ethereum', - image: 'image_url', - market_cap_rank: 2, - usd: 2500, - usd_24h_vol: 20000000000, - usd_24h_change: -3.2 - } - ] - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - duration: '24h', - top_coins: '1000' - }, - modelClass: ModelClass.LARGE - }); - - await getTopGainersLosersAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(axios.get).toHaveBeenCalledWith( - 'https://api.coingecko.com/api/v3/coins/top_gainers_losers', - expect.objectContaining({ - params: { - vs_currency: 'usd', - duration: '24h', - top_coins: '1000' - } - }) - ); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Bitcoin (BTC)'), - content: expect.objectContaining({ - data: expect.objectContaining({ - top_gainers: expect.arrayContaining([ - expect.objectContaining({ - name: 'Bitcoin', - symbol: 'btc', - usd_24h_change: 5.5 - }) - ]), - top_losers: expect.arrayContaining([ - expect.objectContaining({ - name: 'Ethereum', - symbol: 'eth', - usd_24h_change: -3.2 - }) - ]) - }) - }) - })); - }); - - it('should handle API errors gracefully', async () => { - vi.mocked(axios.get).mockRejectedValueOnce(new Error('API Error')); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - duration: '24h', - top_coins: '1000' - }, - modelClass: ModelClass.LARGE - }); - - await getTopGainersLosersAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Error fetching top gainers/losers data'), - content: expect.objectContaining({ - error: expect.stringContaining('API Error') - }) - })); - }); - - it('should handle rate limit errors', async () => { - const rateLimitError = new Error('Rate limit exceeded'); - Object.assign(rateLimitError, { - response: { status: 429 } - }); - vi.mocked(axios.get).mockRejectedValueOnce(rateLimitError); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - duration: '24h', - top_coins: '1000' - }, - modelClass: ModelClass.LARGE - }); - - await getTopGainersLosersAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Rate limit exceeded'), - content: expect.objectContaining({ - error: expect.stringContaining('Rate limit exceeded'), - statusCode: 429 - }) - })); - }); - - it('should handle pro plan requirement errors', async () => { - const proPlanError = new Error('Pro plan required'); - Object.assign(proPlanError, { - response: { status: 403 } - }); - vi.mocked(axios.get).mockRejectedValueOnce(proPlanError); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - duration: '24h', - top_coins: '1000' - }, - modelClass: ModelClass.LARGE - }); - - await getTopGainersLosersAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('requires a CoinGecko Pro API key'), - content: expect.objectContaining({ - error: expect.stringContaining('Pro plan required'), - statusCode: 403, - requiresProPlan: true - }) - })); - }); - - it('should handle empty response data', async () => { - vi.mocked(axios.get).mockResolvedValueOnce({ data: null }); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - vs_currency: 'usd', - duration: '24h', - top_coins: '1000' - }, - modelClass: ModelClass.LARGE - }); - - await getTopGainersLosersAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('No data received'), - content: expect.objectContaining({ - error: expect.stringContaining('No data received') - }) - })); - }); -}); diff --git a/packages/plugin-coingecko/__tests__/actions/getTrending.test.ts b/packages/plugin-coingecko/__tests__/actions/getTrending.test.ts deleted file mode 100644 index 32b51f36aaa9b..0000000000000 --- a/packages/plugin-coingecko/__tests__/actions/getTrending.test.ts +++ /dev/null @@ -1,220 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { elizaLogger, ModelClass, generateObject, composeContext } from '@elizaos/core'; -import getTrendingAction from '../../src/actions/getTrending'; -import axios from 'axios'; -import * as environment from '../../src/environment'; - -vi.mock('axios'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn(), - success: vi.fn(), - }, - generateObject: vi.fn(), - composeContext: vi.fn(), - ModelClass: { LARGE: 'LARGE' } -})); -vi.mock('../../src/environment', () => ({ - validateCoingeckoConfig: vi.fn(), - getApiConfig: vi.fn() -})); - -describe('getTrending action', () => { - const mockRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getPluginConfig: vi.fn(), - }; - - const mockMessage = {}; - const mockState = {}; - const mockCallback = vi.fn(); - const mockConfig = { - COINGECKO_API_KEY: 'test-api-key', - COINGECKO_PRO_API_KEY: null - }; - - beforeEach(() => { - vi.clearAllMocks(); - - // Mock environment validation - vi.mocked(environment.validateCoingeckoConfig).mockResolvedValue(mockConfig); - vi.mocked(environment.getApiConfig).mockReturnValue({ - baseUrl: 'https://api.coingecko.com/api/v3', - apiKey: 'test-api-key', - headerKey: 'x-cg-demo-api-key' - }); - - // Mock runtime functions - mockRuntime.composeState.mockResolvedValue(mockState); - mockRuntime.updateRecentMessageState.mockResolvedValue(mockState); - mockRuntime.getPluginConfig.mockResolvedValue({ - apiKey: 'test-api-key', - baseUrl: 'https://api.coingecko.com/api/v3' - }); - - // Mock the core functions - vi.mocked(elizaLogger.log).mockImplementation(() => {}); - vi.mocked(elizaLogger.error).mockImplementation(() => {}); - vi.mocked(elizaLogger.success).mockImplementation(() => {}); - vi.mocked(composeContext).mockReturnValue({}); - }); - - it('should validate coingecko config', async () => { - await getTrendingAction.validate(mockRuntime, mockMessage); - expect(environment.validateCoingeckoConfig).toHaveBeenCalledWith(mockRuntime); - }); - - it('should fetch and format trending data', async () => { - const mockTrendingResponse = { - data: { - coins: [ - { - item: { - id: 'bitcoin', - name: 'Bitcoin', - symbol: 'btc', - market_cap_rank: 1, - thumb: 'thumb_url', - large: 'large_url' - } - } - ], - nfts: [ - { - id: 'bored-ape', - name: 'Bored Ape Yacht Club', - symbol: 'BAYC', - thumb: 'thumb_url' - } - ], - categories: [ - { - id: 'defi', - name: 'DeFi' - } - ], - exchanges: [], - icos: [] - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockTrendingResponse); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - include_nfts: true, - include_categories: true - }, - modelClass: ModelClass.LARGE - }); - - await getTrendingAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(axios.get).toHaveBeenCalledWith( - 'https://api.coingecko.com/api/v3/search/trending', - expect.any(Object) - ); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Bitcoin (BTC)'), - content: expect.objectContaining({ - trending: expect.objectContaining({ - coins: expect.arrayContaining([ - expect.objectContaining({ - name: 'Bitcoin', - symbol: 'BTC', - marketCapRank: 1 - }) - ]), - nfts: expect.arrayContaining([ - expect.objectContaining({ - name: 'Bored Ape Yacht Club', - symbol: 'BAYC' - }) - ]), - categories: expect.arrayContaining([ - expect.objectContaining({ - name: 'DeFi' - }) - ]) - }) - }) - })); - }); - - it('should handle API errors gracefully', async () => { - vi.mocked(axios.get).mockRejectedValueOnce(new Error('API Error')); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - include_nfts: true, - include_categories: true - }, - modelClass: ModelClass.LARGE - }); - - await getTrendingAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Error fetching trending data'), - content: expect.objectContaining({ - error: expect.stringContaining('API Error') - }) - })); - }); - - it('should handle rate limit errors', async () => { - const rateLimitError = new Error('Rate limit exceeded'); - Object.assign(rateLimitError, { - response: { status: 429 } - }); - vi.mocked(axios.get).mockRejectedValueOnce(rateLimitError); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - include_nfts: true, - include_categories: true - }, - modelClass: ModelClass.LARGE - }); - - await getTrendingAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Rate limit exceeded'), - content: expect.objectContaining({ - error: expect.stringContaining('Rate limit exceeded'), - statusCode: 429 - }) - })); - }); - - it('should handle empty response data', async () => { - vi.mocked(axios.get).mockResolvedValueOnce({ data: null }); - - // Mock the content generation - vi.mocked(generateObject).mockResolvedValueOnce({ - object: { - include_nfts: true, - include_categories: true - }, - modelClass: ModelClass.LARGE - }); - - await getTrendingAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Error fetching trending data'), - content: expect.objectContaining({ - error: expect.stringContaining('No data received') - }) - })); - }); -}); diff --git a/packages/plugin-coingecko/__tests__/setup.ts b/packages/plugin-coingecko/__tests__/setup.ts deleted file mode 100644 index ba8257dbe3972..0000000000000 --- a/packages/plugin-coingecko/__tests__/setup.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { vi } from 'vitest'; -import { elizaLogger } from '@elizaos/core'; - -// Mock elizaLogger -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn(), - generateObject: vi.fn(), - } -})); - -// Mock fetch -global.fetch = vi.fn(); - -beforeEach(() => { - vi.clearAllMocks(); -}); diff --git a/packages/plugin-coingecko/biome.json b/packages/plugin-coingecko/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-coingecko/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-coingecko/package.json b/packages/plugin-coingecko/package.json deleted file mode 100644 index aeef5225d78ef..0000000000000 --- a/packages/plugin-coingecko/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@elizaos/plugin-coingecko", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "axios": "^1.6.7", - "tsup": "^8.3.5" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@vitest/coverage-v8": "^1.2.2", - "vitest": "^1.2.2" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - } -} diff --git a/packages/plugin-coingecko/src/actions/getMarkets.ts b/packages/plugin-coingecko/src/actions/getMarkets.ts deleted file mode 100644 index b80c7bf14bc03..0000000000000 --- a/packages/plugin-coingecko/src/actions/getMarkets.ts +++ /dev/null @@ -1,312 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getCategoriesData } from '../providers/categoriesProvider'; -import { getMarketsTemplate } from "../templates/markets"; - -interface CategoryItem { - category_id: string; - name: string; -} - -export function formatCategory(category: string | undefined, categories: CategoryItem[]): string | undefined { - if (!category) return undefined; - - const normalizedInput = category.toLowerCase().trim(); - - // First try to find exact match by category_id - const exactMatch = categories.find(c => c.category_id === normalizedInput); - if (exactMatch) { - return exactMatch.category_id; - } - - // Then try to find match by name - const nameMatch = categories.find(c => - c.name.toLowerCase() === normalizedInput || - c.name.toLowerCase().replace(/[^a-z0-9]+/g, '-') === normalizedInput - ); - if (nameMatch) { - return nameMatch.category_id; - } - - // Try to find partial matches - const partialMatch = categories.find(c => - c.name.toLowerCase().includes(normalizedInput) || - c.category_id.includes(normalizedInput) - ); - if (partialMatch) { - return partialMatch.category_id; - } - - return undefined; -} - -/** - * Interface for CoinGecko /coins/markets endpoint response - * @see https://docs.coingecko.com/reference/coins-markets - */ -export interface CoinMarketData { - id: string; - symbol: string; - name: string; - image: string; - current_price: number; - market_cap: number; - market_cap_rank: number; - fully_diluted_valuation: number; - total_volume: number; - high_24h: number; - low_24h: number; - price_change_24h: number; - price_change_percentage_24h: number; - market_cap_change_24h: number; - market_cap_change_percentage_24h: number; - circulating_supply: number; - total_supply: number; - max_supply: number; - ath: number; - ath_change_percentage: number; - ath_date: string; - atl: number; - atl_change_percentage: number; - atl_date: string; - last_updated: string; -} - -export const GetMarketsSchema = z.object({ - vs_currency: z.string().default('usd'), - category: z.string().optional(), - order: z.enum(['market_cap_desc', 'market_cap_asc', 'volume_desc', 'volume_asc']).default('market_cap_desc'), - per_page: z.number().min(1).max(250).default(20), - page: z.number().min(1).default(1), - sparkline: z.boolean().default(false) -}); - -export type GetMarketsContent = z.infer & Content; - -export const isGetMarketsContent = (obj: unknown): obj is GetMarketsContent => { - return GetMarketsSchema.safeParse(obj).success; -}; - -export default { - name: "GET_MARKETS", - similes: [ - "MARKET_OVERVIEW", - "TOP_RANKINGS", - "MARKET_LEADERBOARD", - "CRYPTO_RANKINGS", - "BEST_PERFORMING_COINS", - "TOP_MARKET_CAPS" - ], - // eslint-disable-next-line - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - // Comprehensive endpoint for market rankings, supports up to 250 coins per request - description: "Get ranked list of top cryptocurrencies sorted by market metrics (without specifying coins)", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting CoinGecko GET_MARKETS handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - try { - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - // Get categories through the provider - const categories = await getCategoriesData(runtime); - - // Compose markets context with categories - const marketsContext = composeContext({ - state: currentState, - template: getMarketsTemplate.replace('{{categories}}', - categories.map(c => `- ${c.name} (ID: ${c.category_id})`).join('\n') - ), - }); - - const result = await generateObject({ - runtime, - context: marketsContext, - modelClass: ModelClass.SMALL, - schema: GetMarketsSchema - }); - - if (!isGetMarketsContent(result.object)) { - elizaLogger.error("Invalid market data format received"); - return false; - } - - const content = result.object; - elizaLogger.log("Content from template:", content); - - // If template returns null, this is not a markets request - if (!content) { - return false; - } - - const formattedCategory = formatCategory(content.category, categories); - if (content.category && !formattedCategory) { - throw new Error(`Invalid category: ${content.category}. Please choose from the available categories.`); - } - - elizaLogger.log("Making API request with params:", { - url: `${baseUrl}/coins/markets`, - category: formattedCategory, - vs_currency: content.vs_currency, - order: content.order, - per_page: content.per_page, - page: content.page - }); - - const response = await axios.get( - `${baseUrl}/coins/markets`, - { - headers: { - 'accept': 'application/json', - [headerKey]: apiKey - }, - params: { - vs_currency: content.vs_currency, - category: formattedCategory, - order: content.order, - per_page: content.per_page, - page: content.page, - sparkline: content.sparkline - } - } - ); - - if (!response.data?.length) { - throw new Error("No market data received from CoinGecko API"); - } - - const formattedData = response.data.map(coin => ({ - name: coin.name, - symbol: coin.symbol.toUpperCase(), - marketCapRank: coin.market_cap_rank, - currentPrice: coin.current_price, - priceChange24h: coin.price_change_24h, - priceChangePercentage24h: coin.price_change_percentage_24h, - marketCap: coin.market_cap, - volume24h: coin.total_volume, - high24h: coin.high_24h, - low24h: coin.low_24h, - circulatingSupply: coin.circulating_supply, - totalSupply: coin.total_supply, - maxSupply: coin.max_supply, - lastUpdated: coin.last_updated - })); - - const categoryDisplay = content.category ? - `${categories.find(c => c.category_id === formattedCategory)?.name.toUpperCase() || content.category.toUpperCase()} ` : ''; - - const responseText = [ - `Top ${formattedData.length} ${categoryDisplay}Cryptocurrencies by ${content.order === 'volume_desc' || content.order === 'volume_asc' ? 'Volume' : 'Market Cap'}:`, - ...formattedData.map((coin, index) => - `${index + 1}. ${coin.name} (${coin.symbol})` + - ` | $${coin.currentPrice.toLocaleString()}` + - ` | ${coin.priceChangePercentage24h.toFixed(2)}%` + - ` | MCap: $${(coin.marketCap / 1e9).toFixed(2)}B` - ) - ].join('\n'); - - elizaLogger.success("Market data retrieved successfully!"); - - if (callback) { - callback({ - text: responseText, - content: { - markets: formattedData, - params: { - vs_currency: content.vs_currency, - category: content.category, - order: content.order, - per_page: content.per_page, - page: content.page - }, - timestamp: new Date().toISOString() - } - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_MARKETS handler:", error); - - let errorMessage: string; - if (error.response?.status === 429) { - errorMessage = "Rate limit exceeded. Please try again later."; - } else if (error.response?.status === 403) { - errorMessage = "This endpoint requires a CoinGecko Pro API key. Please upgrade your plan to access this data."; - } else if (error.response?.status === 400) { - errorMessage = "Invalid request parameters. Please check your input."; - } else { - errorMessage = `Error fetching market data: ${error.message}`; - } - - if (callback) { - callback({ - text: errorMessage, - error: { - message: error.message, - statusCode: error.response?.status, - params: error.config?.params, - requiresProPlan: error.response?.status === 403 - } - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Show me the top cryptocurrencies by market cap", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch the current market data for top cryptocurrencies.", - action: "GET_MARKETS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the top cryptocurrencies:\n1. Bitcoin (BTC) | $45,000 | +2.5% | MCap: $870.5B\n{{dynamic}}", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-coingecko/src/actions/getNetworkNewPools.ts b/packages/plugin-coingecko/src/actions/getNetworkNewPools.ts deleted file mode 100644 index 44ef630a61629..0000000000000 --- a/packages/plugin-coingecko/src/actions/getNetworkNewPools.ts +++ /dev/null @@ -1,293 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getNetworkNewPoolsTemplate } from "../templates/networkNewPools"; -import { getNetworksData } from "../providers/networkProvider"; - -interface NewPool { - id: string; - type: string; - attributes: { - name: string; - market_cap_usd: string; - fdv_usd: string; - reserve_in_usd: string; - pool_created_at: string; - }; -} - -interface NewPoolsResponse { - data: NewPool[]; -} - -export const GetNetworkNewPoolsSchema = z.object({ - networkId: z.string(), - limit: z.number().min(1).max(100).default(10), -}); - -export type GetNetworkNewPoolsContent = z.infer< - typeof GetNetworkNewPoolsSchema -> & - Content; - -export const isGetNetworkNewPoolsContent = ( - obj: unknown -): obj is GetNetworkNewPoolsContent => { - return GetNetworkNewPoolsSchema.safeParse(obj).success; -}; - -export default { - name: "GET_NETWORK_NEW_POOLS", - similes: [ - "NETWORK_NEW_POOLS", - "CHAIN_NEW_POOLS", - "NEW_POOLS_BY_NETWORK", - "RECENT_POOLS", - "LATEST_POOLS", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: - "Get list of newly created pools for a specific network from CoinGecko's onchain data", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting CoinGecko GET_NETWORK_NEW_POOLS handler..."); - - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - try { - elizaLogger.log("Composing network new pools context..."); - const newPoolsContext = composeContext({ - state: currentState, - template: getNetworkNewPoolsTemplate, - }); - - const result = await generateObject({ - runtime, - context: newPoolsContext, - modelClass: ModelClass.LARGE, - schema: GetNetworkNewPoolsSchema, - }); - - if (!isGetNetworkNewPoolsContent(result.object)) { - elizaLogger.error("Invalid network new pools request format"); - return false; - } - - // Fetch networks data first - const networks = await getNetworksData(runtime); - - // Fetch networks data first - const networksResponse = await getNetworksData(runtime); - - // Find the matching network from the data array - const network = networksResponse.find((n) => { - const searchTerm = ( - result.object as { networkId: string } - ).networkId.toLowerCase(); - return ( - n.id.toLowerCase() === searchTerm || - n.attributes.name.toLowerCase().includes(searchTerm) || - n.attributes.coingecko_asset_platform_id.toLowerCase() === - searchTerm - ); - }); - - if (!network) { - throw new Error( - `Network ${result.object.networkId} not found in available networks` - ); - } - - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - elizaLogger.log( - `Fetching new pools data for network: ${network.id}` - ); - - const response = await axios.get( - `${baseUrl}/onchain/networks/${network.id}/new_pools?include=base_token,dex`, - { - headers: { - [headerKey]: apiKey, - }, - } - ); - - if (!response.data) { - throw new Error("No data received from CoinGecko API"); - } - - const formattedData = response.data.data - .slice(0, result.object.limit) - .map((pool) => ({ - name: pool.attributes.name, - marketCap: Number( - pool.attributes.market_cap_usd - ).toLocaleString("en-US", { - style: "currency", - currency: "USD", - }), - fdv: Number(pool.attributes.fdv_usd).toLocaleString( - "en-US", - { - style: "currency", - currency: "USD", - } - ), - reserveUSD: Number( - pool.attributes.reserve_in_usd - ).toLocaleString("en-US", { - style: "currency", - currency: "USD", - }), - createdAt: new Date( - pool.attributes.pool_created_at - ).toLocaleDateString(), - })); - - const responseText = [ - `New Pools Overview for ${network.attributes.name}:`, - "", - ...formattedData.map((pool, index) => - [ - `${index + 1}. ${pool.name}`, - ` Market Cap: ${pool.marketCap}`, - ` FDV: ${pool.fdv}`, - ` Reserve: ${pool.reserveUSD}`, - ` Created: ${pool.createdAt}`, - "", - ].join("\n") - ), - ].join("\n"); - - elizaLogger.success( - "Network new pools data retrieved successfully!" - ); - - if (callback) { - callback({ - text: responseText, - content: { - networkId: network.id, - networkName: network.attributes.name, - newPools: formattedData, - timestamp: new Date().toISOString(), - }, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_NETWORK_NEW_POOLS handler:", error); - - const errorMessage = - error.response?.status === 429 - ? "Rate limit exceeded. Please try again later." - : `Error fetching new pools data: ${error.message}`; - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status, - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Show me new liquidity pools on Ethereum", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the new Ethereum liquidity pools for you.", - action: "GET_NETWORK_NEW_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the new pools on ETHEREUM:\n1. PEPE / WETH\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025\n2. SUSHI / WETH\n Market Cap: $8,844,297,825\n FDV: $43,874,068,484\n Reserve: $718,413,745\n Created: 1/17/2025", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "What are the 5 latest pools on BSC?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch the 5 latest pools on BSC for you.", - action: "GET_NETWORK_NEW_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the 5 newest pools on BSC:\n1. CAKE / WBNB\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "List all recent pools on Polygon", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll get all the recently added pools on Polygon for you.", - action: "GET_NETWORK_NEW_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are all new pools on POLYGON:\n1. MATIC / USDC\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-coingecko/src/actions/getNetworkTrendingPools.ts b/packages/plugin-coingecko/src/actions/getNetworkTrendingPools.ts deleted file mode 100644 index 42c2257fbf267..0000000000000 --- a/packages/plugin-coingecko/src/actions/getNetworkTrendingPools.ts +++ /dev/null @@ -1,295 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getNetworkTrendingPoolsTemplate } from "../templates/networkTrendingPools"; -import { getNetworksData } from "../providers/networkProvider"; - -interface TrendingPool { - id: string; - type: string; - attributes: { - name: string; - market_cap_usd: string; - fdv_usd: string; - reserve_in_usd: string; - pool_created_at: string; - }; -} - -interface TrendingPoolsResponse { - data: TrendingPool[]; -} - -export const GetNetworkTrendingPoolsSchema = z.object({ - networkId: z.string(), - limit: z.number().min(1).max(100).default(10), -}); - -export type GetNetworkTrendingPoolsContent = z.infer< - typeof GetNetworkTrendingPoolsSchema -> & - Content; - -export const isGetNetworkTrendingPoolsContent = ( - obj: unknown -): obj is GetNetworkTrendingPoolsContent => { - return GetNetworkTrendingPoolsSchema.safeParse(obj).success; -}; - -export default { - name: "GET_NETWORK_TRENDING_POOLS", - similes: [ - "NETWORK_TRENDING_POOLS", - "CHAIN_HOT_POOLS", - "BLOCKCHAIN_POPULAR_POOLS", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: - "Get list of trending pools for a specific network from CoinGecko's onchain data", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log( - "Starting CoinGecko GET_NETWORK_TRENDING_POOLS handler..." - ); - - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - try { - elizaLogger.log("Composing network trending pools context..."); - const trendingContext = composeContext({ - state: currentState, - template: getNetworkTrendingPoolsTemplate, - }); - - const result = await generateObject({ - runtime, - context: trendingContext, - modelClass: ModelClass.LARGE, - schema: GetNetworkTrendingPoolsSchema, - }); - - if (!isGetNetworkTrendingPoolsContent(result.object)) { - elizaLogger.error( - "Invalid network trending pools request format" - ); - return false; - } - - // Fetch networks data first - const networks = await getNetworksData(runtime); - - // Find the matching network - const network = networks.find((n) => { - const searchTerm = ( - result.object as { networkId: string } - ).networkId.toLowerCase(); - return ( - n.id.toLowerCase() === searchTerm || - n.attributes.name.toLowerCase().includes(searchTerm) || - n.attributes.coingecko_asset_platform_id.toLowerCase() === - searchTerm - ); - }); - - if (!network) { - throw new Error( - `Network ${result.object.networkId} not found in available networks` - ); - } - - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - elizaLogger.log( - `Fetching trending pools data for network: ${network.id}` - ); - - const response = await axios.get( - `${baseUrl}/onchain/networks/${network.id}/trending_pools?include=base_token,dex`, - { - headers: { - [headerKey]: apiKey, - }, - } - ); - - if (!response.data) { - throw new Error("No data received from CoinGecko API"); - } - - const formattedData = response.data.data - .slice(0, result.object.limit) - .map((pool) => ({ - name: pool.attributes.name, - marketCap: Number( - pool.attributes.market_cap_usd - ).toLocaleString("en-US", { - style: "currency", - currency: "USD", - }), - fdv: Number(pool.attributes.fdv_usd).toLocaleString( - "en-US", - { - style: "currency", - currency: "USD", - } - ), - reserveUSD: Number( - pool.attributes.reserve_in_usd - ).toLocaleString("en-US", { - style: "currency", - currency: "USD", - }), - createdAt: new Date( - pool.attributes.pool_created_at - ).toLocaleDateString(), - })); - - const responseText = [ - `Trending Pools Overview for ${network.attributes.name}:`, - "", - ...formattedData.map((pool, index) => - [ - `${index + 1}. ${pool.name}`, - ` Market Cap: ${pool.marketCap}`, - ` FDV: ${pool.fdv}`, - ` Reserve: ${pool.reserveUSD}`, - ` Created: ${pool.createdAt}`, - "", - ].join("\n") - ), - ].join("\n"); - - elizaLogger.success( - "Network trending pools data retrieved successfully!" - ); - - if (callback) { - callback({ - text: responseText, - content: { - networkId: network.id, - networkName: network.attributes.name, - trendingPools: formattedData, - timestamp: new Date().toISOString(), - }, - }); - } - - return true; - } catch (error) { - elizaLogger.error( - "Error in GET_NETWORK_TRENDING_POOLS handler:", - error - ); - - const errorMessage = - error.response?.status === 429 - ? "Rate limit exceeded. Please try again later." - : `Error fetching trending pools data: ${error.message}`; - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status, - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Show me trending liquidity pools on Solana", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the trending Solana liquidity pools for you.", - action: "GET_NETWORK_TRENDING_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the trending pools on SOLANA:\n1. MELANIA / USDC\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025\n2. TRUMP / USDC\n Market Cap: $8,844,297,825\n FDV: $43,874,068,484\n Reserve: $718,413,745\n Created: 1/17/2025", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "What are the top 5 hottest pools on Ethereum?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch the top 5 hottest pools on Ethereum for you.", - action: "GET_NETWORK_TRENDING_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the top 5 trending pools on ETHEREUM:\n1. PEPE / WETH\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "List all BSC pools with highest volume", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll get all the trending pools on BSC for you.", - action: "GET_NETWORK_TRENDING_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are all trending pools on BSC:\n1. CAKE / WBNB\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-coingecko/src/actions/getNewlyListed.ts b/packages/plugin-coingecko/src/actions/getNewlyListed.ts deleted file mode 100644 index 34a9cc67958f4..0000000000000 --- a/packages/plugin-coingecko/src/actions/getNewlyListed.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getNewCoinsTemplate } from "../templates/newCoins"; - -interface NewCoin { - id: string; - symbol: string; - name: string; - activated_at: number; -} - -interface NewCoinsResponse extends Array {} - -export const GetNewCoinsSchema = z.object({ - limit: z.number().min(1).max(50).default(10) -}); - -export type GetNewCoinsContent = z.infer & Content; - -export const isGetNewCoinsContent = (obj: unknown): obj is GetNewCoinsContent => { - return GetNewCoinsSchema.safeParse(obj).success; -}; - -export default { - name: "GET_NEW_COINS", - similes: [ - "NEW_COINS", - "RECENTLY_ADDED", - "NEW_LISTINGS", - "LATEST_COINS", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: "Get list of recently added coins from CoinGecko", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting CoinGecko GET_NEW_COINS handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - try { - elizaLogger.log("Composing new coins context..."); - const newCoinsContext = composeContext({ - state: currentState, - template: getNewCoinsTemplate, - }); - - const result = await generateObject({ - runtime, - context: newCoinsContext, - modelClass: ModelClass.LARGE, - schema: GetNewCoinsSchema - }); - - if (!isGetNewCoinsContent(result.object)) { - elizaLogger.error("Invalid new coins request format"); - return false; - } - - // Fetch new coins data from CoinGecko - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - elizaLogger.log("Fetching new coins data..."); - - const response = await axios.get( - `${baseUrl}/coins/list/new`, - { - headers: { - [headerKey]: apiKey - } - } - ); - - if (!response.data) { - throw new Error("No data received from CoinGecko API"); - } - - const formattedData = response.data - .slice(0, result.object.limit) - .map(coin => ({ - name: coin.name, - symbol: coin.symbol.toUpperCase(), - activatedAt: new Date(coin.activated_at * 1000).toLocaleString('en-US', { - year: 'numeric', - month: 'long', - day: 'numeric', - hour: '2-digit', - minute: '2-digit' - }) - })); - - const responseText = [ - 'Recently Added Coins:', - '', - ...formattedData.map((coin, index) => - `${index + 1}. ${coin.name} (${coin.symbol})\n Listed: ${coin.activatedAt}` - ) - ].join('\n'); - - elizaLogger.success("New coins data retrieved successfully!"); - - if (callback) { - callback({ - text: responseText, - content: { - newCoins: formattedData, - timestamp: new Date().toISOString() - } - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_NEW_COINS handler:", error); - - const errorMessage = error.response?.status === 429 ? - "Rate limit exceeded. Please try again later." : - `Error fetching new coins data: ${error.message}`; - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What are the newest coins listed?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the recently added coins for you.", - action: "GET_NEW_COINS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the recently added coins:\n1. Verb Ai (VERB)\n Listed: January 20, 2025, 12:31 PM\n{{dynamic}}", - }, - }, - ], - ] as ActionExample[][], -} as Action; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/actions/getPrice.ts b/packages/plugin-coingecko/src/actions/getPrice.ts deleted file mode 100644 index 50b5e6d5eacf2..0000000000000 --- a/packages/plugin-coingecko/src/actions/getPrice.ts +++ /dev/null @@ -1,333 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getCoinsData } from "../providers/coinsProvider"; -import { getPriceTemplate } from "../templates/price"; - -interface CurrencyData { - [key: string]: number; - usd?: number; - eur?: number; - usd_market_cap?: number; - eur_market_cap?: number; - usd_24h_vol?: number; - eur_24h_vol?: number; - usd_24h_change?: number; - eur_24h_change?: number; - last_updated_at?: number; -} - -interface PriceResponse { - [coinId: string]: CurrencyData; -} - -export const GetPriceSchema = z.object({ - coinIds: z.union([z.string(), z.array(z.string())]), - currency: z.union([z.string(), z.array(z.string())]).default(["usd"]), - include_market_cap: z.boolean().default(false), - include_24hr_vol: z.boolean().default(false), - include_24hr_change: z.boolean().default(false), - include_last_updated_at: z.boolean().default(false) -}); - -export type GetPriceContent = z.infer & Content; - -export const isGetPriceContent = (obj: unknown): obj is GetPriceContent => { - return GetPriceSchema.safeParse(obj).success; -}; - -function formatCoinIds(input: string | string[]): string { - if (Array.isArray(input)) { - return input.join(','); - } - return input; -} - -export default { - name: "GET_PRICE", - similes: [ - "COIN_PRICE_CHECK", - "SPECIFIC_COINS_PRICE", - "COIN_PRICE_LOOKUP", - "SELECTED_COINS_PRICE", - "PRICE_DETAILS", - "COIN_PRICE_DATA" - ], - // eslint-disable-next-line - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: "Get price and basic market data for one or more specific cryptocurrencies (by name/symbol)", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting CoinGecko GET_PRICE handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - try { - elizaLogger.log("Composing price context..."); - const priceContext = composeContext({ - state: currentState, - template: getPriceTemplate, - }); - - elizaLogger.log("Generating content from template..."); - const result = await generateObject({ - runtime, - context: priceContext, - modelClass: ModelClass.LARGE, - schema: GetPriceSchema - }); - - if (!isGetPriceContent(result.object)) { - elizaLogger.error("Invalid price request format"); - return false; - } - - const content = result.object; - elizaLogger.log("Generated content:", content); - - // Format currencies for API request - const currencies = Array.isArray(content.currency) ? content.currency : [content.currency]; - const vs_currencies = currencies.join(',').toLowerCase(); - - // Format coin IDs for API request - const coinIds = formatCoinIds(content.coinIds); - - elizaLogger.log("Formatted request parameters:", { coinIds, vs_currencies }); - - // Fetch price from CoinGecko - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - elizaLogger.log(`Fetching prices for ${coinIds} in ${vs_currencies}...`); - elizaLogger.log("API request URL:", `${baseUrl}/simple/price`); - elizaLogger.log("API request params:", { - ids: coinIds, - vs_currencies, - include_market_cap: content.include_market_cap, - include_24hr_vol: content.include_24hr_vol, - include_24hr_change: content.include_24hr_change, - include_last_updated_at: content.include_last_updated_at - }); - - const response = await axios.get( - `${baseUrl}/simple/price`, - { - params: { - ids: coinIds, - vs_currencies, - include_market_cap: content.include_market_cap, - include_24hr_vol: content.include_24hr_vol, - include_24hr_change: content.include_24hr_change, - include_last_updated_at: content.include_last_updated_at - }, - headers: { - 'accept': 'application/json', - [headerKey]: apiKey - } - } - ); - - if (Object.keys(response.data).length === 0) { - throw new Error("No price data available for the specified coins and currency"); - } - - // Get coins data for formatting - const coins = await getCoinsData(runtime); - - // Format response text for each coin - const formattedResponse = Object.entries(response.data).map(([coinId, data]) => { - const coin = coins.find(c => c.id === coinId); - const coinName = coin ? `${coin.name} (${coin.symbol.toUpperCase()})` : coinId; - const parts = [`${coinName}:`]; - - // Add price for each requested currency - for (const currency of currencies) { - const upperCurrency = currency.toUpperCase(); - if (data[currency]) { - parts.push(` ${upperCurrency}: ${data[currency].toLocaleString(undefined, { - style: 'currency', - currency: currency - })}`); - } - - // Add market cap if requested and available - if (content.include_market_cap) { - const marketCap = data[`${currency}_market_cap`]; - if (marketCap !== undefined) { - parts.push(` Market Cap (${upperCurrency}): ${marketCap.toLocaleString(undefined, { - style: 'currency', - currency: currency, - maximumFractionDigits: 0 - })}`); - } - } - - // Add 24h volume if requested and available - if (content.include_24hr_vol) { - const volume = data[`${currency}_24h_vol`]; - if (volume !== undefined) { - parts.push(` 24h Volume (${upperCurrency}): ${volume.toLocaleString(undefined, { - style: 'currency', - currency: currency, - maximumFractionDigits: 0 - })}`); - } - } - - // Add 24h change if requested and available - if (content.include_24hr_change) { - const change = data[`${currency}_24h_change`]; - if (change !== undefined) { - const changePrefix = change >= 0 ? '+' : ''; - parts.push(` 24h Change (${upperCurrency}): ${changePrefix}${change.toFixed(2)}%`); - } - } - } - - // Add last updated if requested - if (content.include_last_updated_at && data.last_updated_at) { - const lastUpdated = new Date(data.last_updated_at * 1000).toLocaleString(); - parts.push(` Last Updated: ${lastUpdated}`); - } - - return parts.join('\n'); - }).filter(Boolean); - - if (formattedResponse.length === 0) { - throw new Error("Failed to format price data for the specified coins"); - } - - const responseText = formattedResponse.join('\n\n'); - elizaLogger.success("Price data retrieved successfully!"); - - if (callback) { - callback({ - text: responseText, - content: { - prices: Object.entries(response.data).reduce((acc, [coinId, data]) => { - const coinPrices = currencies.reduce((currencyAcc, currency) => { - const currencyData = { - price: data[currency], - marketCap: data[`${currency}_market_cap`], - volume24h: data[`${currency}_24h_vol`], - change24h: data[`${currency}_24h_change`], - lastUpdated: data.last_updated_at, - }; - Object.assign(currencyAcc, { [currency]: currencyData }); - return currencyAcc; - }, {}); - Object.assign(acc, { [coinId]: coinPrices }); - return acc; - }, {}), - params: { - currencies: currencies.map(c => c.toUpperCase()), - include_market_cap: content.include_market_cap, - include_24hr_vol: content.include_24hr_vol, - include_24hr_change: content.include_24hr_change, - include_last_updated_at: content.include_last_updated_at - } - } - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_PRICE handler:", error); - - let errorMessage: string; - if (error.response?.status === 429) { - errorMessage = "Rate limit exceeded. Please try again later."; - } else if (error.response?.status === 403) { - errorMessage = "This endpoint requires a CoinGecko Pro API key. Please upgrade your plan to access this data."; - } else if (error.response?.status === 400) { - errorMessage = "Invalid request parameters. Please check your input."; - } - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status, - params: error.config?.params, - requiresProPlan: error.response?.status === 403 - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What's the current price of Bitcoin?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the current Bitcoin price for you.", - action: "GET_PRICE", - }, - }, - { - user: "{{agent}}", - content: { - text: "The current price of Bitcoin is {{dynamic}} USD", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Check ETH and BTC prices in EUR with market cap", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the current prices with market cap data.", - action: "GET_PRICE", - }, - }, - { - user: "{{agent}}", - content: { - text: "Bitcoin: EUR {{dynamic}} | Market Cap: €{{dynamic}}\nEthereum: EUR {{dynamic}} | Market Cap: €{{dynamic}}", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-coingecko/src/actions/getPricePerAddress.ts b/packages/plugin-coingecko/src/actions/getPricePerAddress.ts deleted file mode 100644 index 967b66f137ebb..0000000000000 --- a/packages/plugin-coingecko/src/actions/getPricePerAddress.ts +++ /dev/null @@ -1,217 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getPriceByAddressTemplate } from "../templates/priceAddress"; - -// Schema definition for the token price request -export const GetTokenPriceSchema = z.object({ - chainId: z.string(), - tokenAddress: z.string(), -}); - -export type GetTokenPriceContent = z.infer & - Content; - -export const isGetTokenPriceContent = ( - obj: unknown -): obj is GetTokenPriceContent => { - return GetTokenPriceSchema.safeParse(obj).success; -}; - -interface TokenResponse { - id: string; - symbol: string; - name: string; - market_data: { - current_price: { - usd: number; - }; - market_cap: { - usd: number; - }; - }; -} - -export default { - name: "GET_TOKEN_PRICE_BY_ADDRESS", - similes: [ - "FETCH_TOKEN_PRICE_BY_ADDRESS", - "CHECK_TOKEN_PRICE_BY_ADDRESS", - "LOOKUP_TOKEN_BY_ADDRESS", - ], - // eslint-disable-next-line - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: - "Get the current USD price for a token using its blockchain address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting GET_TOKEN_PRICE_BY_ADDRESS handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - try { - elizaLogger.log("Composing token price context..."); - const context = composeContext({ - state: currentState, - template: getPriceByAddressTemplate, - }); - - elizaLogger.log("Generating content from template..."); - const result = await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: GetTokenPriceSchema, - }); - - if (!isGetTokenPriceContent(result.object)) { - elizaLogger.error("Invalid token price request format"); - return false; - } - - const content = result.object; - elizaLogger.log("Generated content:", content); - - // Get API configuration - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - // Fetch token data - elizaLogger.log("Fetching token data..."); - const response = await axios.get( - `${baseUrl}/coins/${content.chainId}/contract/${content.tokenAddress}`, - { - headers: { - accept: "application/json", - [headerKey]: apiKey, - }, - } - ); - - const tokenData = response.data; - if (!tokenData.market_data?.current_price?.usd) { - throw new Error( - `No price data available for token ${content.tokenAddress} on ${content.chainId}` - ); - } - - // Format response - const parts = [ - `${tokenData.name} (${tokenData.symbol.toUpperCase()})`, - `Address: ${content.tokenAddress}`, - `Chain: ${content.chainId}`, - `Price: $${tokenData.market_data.current_price.usd.toFixed(6)} USD`, - ]; - - if (tokenData.market_data.market_cap?.usd) { - parts.push( - `Market Cap: $${tokenData.market_data.market_cap.usd.toLocaleString()} USD` - ); - } - - const responseText = parts.join("\n"); - elizaLogger.success("Token price data retrieved successfully!"); - - if (callback) { - callback({ - text: responseText, - content: { - token: { - name: tokenData.name, - symbol: tokenData.symbol, - address: content.tokenAddress, - chain: content.chainId, - price: tokenData.market_data.current_price.usd, - marketCap: tokenData.market_data.market_cap?.usd, - }, - }, - }); - } - - return true; - } catch (error) { - elizaLogger.error( - "Error in GET_TOKEN_PRICE_BY_ADDRESS handler:", - error - ); - - let errorMessage: string; - if (error.response?.status === 429) { - errorMessage = "Rate limit exceeded. Please try again later."; - } else if (error.response?.status === 403) { - errorMessage = - "This endpoint requires a CoinGecko Pro API key. Please upgrade your plan to access this data."; - } else if (error.response?.status === 400) { - errorMessage = - "Invalid request parameters. Please check your input."; - } else { - errorMessage = - "Failed to fetch token price. Please try again later."; - } - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status, - requiresProPlan: error.response?.status === 403, - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What's the price of the USDC token on Ethereum? The address is 0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the USDC token price for you.", - action: "GET_TOKEN_PRICE_BY_ADDRESS", - }, - }, - { - user: "{{agent}}", - content: { - text: "USD Coin (USDC)\nAddress: 0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48\nChain: ethereum\nPrice: {{dynamic}} USD\nMarket Cap: ${{dynamic}} USD", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-coingecko/src/actions/getTopGainersLosers.ts b/packages/plugin-coingecko/src/actions/getTopGainersLosers.ts deleted file mode 100644 index 013da0f17e970..0000000000000 --- a/packages/plugin-coingecko/src/actions/getTopGainersLosers.ts +++ /dev/null @@ -1,253 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getTopGainersLosersTemplate } from "../templates/gainersLosers"; - -interface TopGainerLoserItem { - id: string; - symbol: string; - name: string; - image: string; - market_cap_rank: number; - usd: number; - usd_24h_vol: number; - usd_1h_change?: number; - usd_24h_change?: number; - usd_7d_change?: number; - usd_14d_change?: number; - usd_30d_change?: number; - usd_60d_change?: number; - usd_1y_change?: number; -} - -interface TopGainersLosersResponse { - top_gainers: TopGainerLoserItem[]; - top_losers: TopGainerLoserItem[]; -} - -const DurationEnum = z.enum(["1h", "24h", "7d", "14d", "30d", "60d", "1y"]); -//type Duration = z.infer; - -export const GetTopGainersLosersSchema = z.object({ - vs_currency: z.string().default("usd"), - duration: DurationEnum.default("24h"), - top_coins: z.string().default("1000") -}); - -export type GetTopGainersLosersContent = z.infer & Content; - -export const isGetTopGainersLosersContent = (obj: unknown): obj is GetTopGainersLosersContent => { - return GetTopGainersLosersSchema.safeParse(obj).success; -}; - -export default { - name: "GET_TOP_GAINERS_LOSERS", - similes: [ - "TOP_MOVERS", - "BIGGEST_GAINERS", - "BIGGEST_LOSERS", - "PRICE_CHANGES", - "BEST_WORST_PERFORMERS", - ], - // eslint-disable-next-line - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: "Get list of top gaining and losing cryptocurrencies by price change", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting CoinGecko GET_TOP_GAINERS_LOSERS handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - try { - elizaLogger.log("Composing gainers/losers context..."); - const context = composeContext({ - state: currentState, - template: getTopGainersLosersTemplate, - }); - - elizaLogger.log("Generating content from template..."); - const result = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: GetTopGainersLosersSchema - }); - - if (!isGetTopGainersLosersContent(result.object)) { - elizaLogger.error("Invalid gainers/losers request format"); - return false; - } - - const content = result.object; - elizaLogger.log("Generated content:", content); - - // Fetch data from CoinGecko - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - elizaLogger.log("Fetching top gainers/losers data..."); - elizaLogger.log("API request params:", { - vs_currency: content.vs_currency, - duration: content.duration, - top_coins: content.top_coins - }); - - const response = await axios.get( - `${baseUrl}/coins/top_gainers_losers`, - { - headers: { - 'accept': 'application/json', - [headerKey]: apiKey - }, - params: { - vs_currency: content.vs_currency, - duration: content.duration, - top_coins: content.top_coins - } - } - ); - - if (!response.data) { - throw new Error("No data received from CoinGecko API"); - } - - // Format the response text - const responseText = [ - 'Top Gainers:', - ...response.data.top_gainers.map((coin, index) => { - const changeKey = `usd_${content.duration}_change` as keyof TopGainerLoserItem; - const change = coin[changeKey] as number; - return `${index + 1}. ${coin.name} (${coin.symbol.toUpperCase()})` + - ` | $${coin.usd.toLocaleString(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 8 })}` + - ` | ${change >= 0 ? '+' : ''}${change.toFixed(2)}%` + - `${coin.market_cap_rank ? ` | Rank #${coin.market_cap_rank}` : ''}`; - }), - '', - 'Top Losers:', - ...response.data.top_losers.map((coin, index) => { - const changeKey = `usd_${content.duration}_change` as keyof TopGainerLoserItem; - const change = coin[changeKey] as number; - return `${index + 1}. ${coin.name} (${coin.symbol.toUpperCase()})` + - ` | $${coin.usd.toLocaleString(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 8 })}` + - ` | ${change >= 0 ? '+' : ''}${change.toFixed(2)}%` + - `${coin.market_cap_rank ? ` | Rank #${coin.market_cap_rank}` : ''}`; - }) - ].join('\n'); - - if (callback) { - callback({ - text: responseText, - content: { - data: response.data, - params: { - vs_currency: content.vs_currency, - duration: content.duration, - top_coins: content.top_coins - } - } - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_TOP_GAINERS_LOSERS handler:", error); - - let errorMessage: string; - if (error.response?.status === 429) { - errorMessage = "Rate limit exceeded. Please try again later."; - } else if (error.response?.status === 403) { - errorMessage = "This endpoint requires a CoinGecko Pro API key. Please upgrade your plan to access this data."; - } else if (error.response?.status === 400) { - errorMessage = "Invalid request parameters. Please check your input."; - } else { - errorMessage = `Error fetching top gainers/losers data: ${error.message}`; - } - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status, - params: error.config?.params, - requiresProPlan: error.response?.status === 403 - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What are the top gaining and losing cryptocurrencies?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the top gainers and losers for you.", - action: "GET_TOP_GAINERS_LOSERS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the top gainers and losers:\nTop Gainers:\n1. Bitcoin (BTC) | $45,000 | +5.2% | Rank #1\n{{dynamic}}", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Show me the best and worst performing crypto today", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch the current top movers in the crypto market.", - action: "GET_TOP_GAINERS_LOSERS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are today's best and worst performers:\n{{dynamic}}", - }, - }, - ], - ] as ActionExample[][], -} as Action; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/actions/getTrending.ts b/packages/plugin-coingecko/src/actions/getTrending.ts deleted file mode 100644 index 0fae95d35d59c..0000000000000 --- a/packages/plugin-coingecko/src/actions/getTrending.ts +++ /dev/null @@ -1,256 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getTrendingTemplate } from "../templates/trending"; - -interface TrendingCoinItem { - id: string; - name: string; - api_symbol: string; - symbol: string; - market_cap_rank: number; - thumb: string; - large: string; -} - -interface TrendingExchange { - id: string; - name: string; - market_type: string; - thumb: string; - large: string; -} - -interface TrendingCategory { - id: string; - name: string; -} - -interface TrendingNFT { - id: string; - name: string; - symbol: string; - thumb: string; -} - -interface TrendingResponse { - coins: Array<{ item: TrendingCoinItem }>; - exchanges: TrendingExchange[]; - categories: TrendingCategory[]; - nfts: TrendingNFT[]; - icos: string[]; -} - -export const GetTrendingSchema = z.object({ - include_nfts: z.boolean().default(true), - include_categories: z.boolean().default(true) -}); - -export type GetTrendingContent = z.infer & Content; - -export const isGetTrendingContent = (obj: unknown): obj is GetTrendingContent => { - return GetTrendingSchema.safeParse(obj).success; -}; - -export default { - name: "GET_TRENDING", - similes: [ - "TRENDING_COINS", - "TRENDING_CRYPTO", - "HOT_COINS", - "POPULAR_COINS", - "TRENDING_SEARCH", - ], - // eslint-disable-next-line - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: "Get list of trending cryptocurrencies, NFTs, and categories from CoinGecko", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting CoinGecko GET_TRENDING handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - try { - // Compose trending context - elizaLogger.log("Composing trending context..."); - const trendingContext = composeContext({ - state: currentState, - template: getTrendingTemplate, - }); - - const result = await generateObject({ - runtime, - context: trendingContext, - modelClass: ModelClass.LARGE, - schema: GetTrendingSchema - }); - - if (!isGetTrendingContent(result.object)) { - elizaLogger.error("Invalid trending request format"); - return false; - } - - // Fetch trending data from CoinGecko - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - elizaLogger.log("Fetching trending data..."); - - const response = await axios.get( - `${baseUrl}/search/trending`, - { - headers: { - [headerKey]: apiKey - } - } - ); - - if (!response.data) { - throw new Error("No data received from CoinGecko API"); - } - - const formattedData = { - coins: response.data.coins.map(({ item }) => ({ - name: item.name, - symbol: item.symbol.toUpperCase(), - marketCapRank: item.market_cap_rank, - id: item.id, - thumbnail: item.thumb, - largeImage: item.large - })), - nfts: response.data.nfts.map(nft => ({ - name: nft.name, - symbol: nft.symbol, - id: nft.id, - thumbnail: nft.thumb - })), - categories: response.data.categories.map(category => ({ - name: category.name, - id: category.id - })) - }; - - const responseText = [ - 'Trending Coins:', - ...formattedData.coins.map((coin, index) => - `${index + 1}. ${coin.name} (${coin.symbol})${coin.marketCapRank ? ` - Rank #${coin.marketCapRank}` : ''}` - ), - '', - 'Trending NFTs:', - ...(formattedData.nfts.length ? - formattedData.nfts.map((nft, index) => `${index + 1}. ${nft.name} (${nft.symbol})`) : - ['No trending NFTs available']), - '', - 'Trending Categories:', - ...(formattedData.categories.length ? - formattedData.categories.map((category, index) => `${index + 1}. ${category.name}`) : - ['No trending categories available']) - ].join('\n'); - - elizaLogger.success("Trending data retrieved successfully!"); - - if (callback) { - callback({ - text: responseText, - content: { - trending: formattedData, - timestamp: new Date().toISOString() - } - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_TRENDING handler:", error); - - // Enhanced error handling - const errorMessage = error.response?.status === 429 ? - "Rate limit exceeded. Please try again later." : - `Error fetching trending data: ${error.message}`; - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "What are the trending cryptocurrencies?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the trending cryptocurrencies for you.", - action: "GET_TRENDING", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the trending cryptocurrencies:\n1. Bitcoin (BTC) - Rank #1\n2. Ethereum (ETH) - Rank #2\n{{dynamic}}", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Show me what's hot in crypto right now", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch the current trending cryptocurrencies.", - action: "GET_TRENDING", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the trending cryptocurrencies:\n{{dynamic}}", - }, - }, - ], - ] as ActionExample[][], -} as Action; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/actions/getTrendingPools.ts b/packages/plugin-coingecko/src/actions/getTrendingPools.ts deleted file mode 100644 index 7163478c2c268..0000000000000 --- a/packages/plugin-coingecko/src/actions/getTrendingPools.ts +++ /dev/null @@ -1,249 +0,0 @@ -import { - type ActionExample, - composeContext, - type Content, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import axios from "axios"; -import { z } from "zod"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; -import { getTrendingPoolsTemplate } from "../templates/trendingPools"; - -interface TrendingPool { - id: string; - type: string; - attributes: { - name: string; - market_cap_usd: string; - fdv_usd: string; - reserve_in_usd: string; - pool_created_at: string; - }; -} - -interface TrendingPoolsResponse { - data: TrendingPool[]; -} - -export const GetTrendingPoolsSchema = z.object({ - limit: z.number().min(1).max(100).default(10), -}); - -export type GetTrendingPoolsContent = z.infer & - Content; - -export const isGetTrendingPoolsContent = ( - obj: unknown, -): obj is GetTrendingPoolsContent => { - return GetTrendingPoolsSchema.safeParse(obj).success; -}; - -export default { - name: "GET_TRENDING_POOLS", - similes: ["TRENDING_POOLS", "HOT_POOLS", "POPULAR_POOLS", "TOP_POOLS"], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoingeckoConfig(runtime); - return true; - }, - description: "Get list of trending pools from CoinGecko's onchain data", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback, - ): Promise => { - elizaLogger.log("Starting CoinGecko GET_TRENDING_POOLS handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - try { - elizaLogger.log("Composing trending pools context..."); - const trendingContext = composeContext({ - state: currentState, - template: getTrendingPoolsTemplate, - }); - - const result = await generateObject({ - runtime, - context: trendingContext, - modelClass: ModelClass.LARGE, - schema: GetTrendingPoolsSchema, - }); - - if (!isGetTrendingPoolsContent(result.object)) { - elizaLogger.error("Invalid trending pools request format"); - return false; - } - - // Fetch trending pools data from CoinGecko - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - elizaLogger.log("Fetching trending pools data..."); - - const response = await axios.get( - `${baseUrl}/onchain/networks/trending_pools?include=base_token,dex`, - { - headers: { - [headerKey]: apiKey, - }, - }, - ); - - if (!response.data) { - throw new Error("No data received from CoinGecko API"); - } - - const formattedData = response.data.data.map((pool) => ({ - name: pool.attributes.name, - marketCap: Number( - pool.attributes.market_cap_usd, - ).toLocaleString("en-US", { - style: "currency", - currency: "USD", - }), - fdv: Number(pool.attributes.fdv_usd).toLocaleString("en-US", { - style: "currency", - currency: "USD", - }), - reserveUSD: Number( - pool.attributes.reserve_in_usd, - ).toLocaleString("en-US", { - style: "currency", - currency: "USD", - }), - createdAt: new Date( - pool.attributes.pool_created_at, - ).toLocaleDateString(), - })); - - const responseText = [ - "Trending Pools Overview:", - "", - ...formattedData.map((pool, index) => - [ - `${index + 1}. ${pool.name}`, - ` Market Cap: ${pool.marketCap}`, - ` FDV: ${pool.fdv}`, - ` Reserve: ${pool.reserveUSD}`, - ` Created: ${pool.createdAt}`, - "", - ].join("\n"), - ), - ].join("\n"); - - elizaLogger.success("Trending pools data retrieved successfully!"); - - if (callback) { - callback({ - text: responseText, - content: { - trendingPools: formattedData, - timestamp: new Date().toISOString(), - }, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_TRENDING_POOLS handler:", error); - - const errorMessage = - error.response?.status === 429 - ? "Rate limit exceeded. Please try again later." - : `Error fetching trending pools data: ${error.message}`; - - if (callback) { - callback({ - text: errorMessage, - content: { - error: error.message, - statusCode: error.response?.status, - }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Show me trending liquidity pools", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the trending liquidity pools for you.", - action: "GET_TRENDING_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the trending liquidity pools:\n1. MELANIA / USDC\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025\n2. TRUMP / USDC\n Market Cap: $8,844,297,825\n FDV: $43,874,068,484\n Reserve: $718,413,745\n Created: 1/17/2025", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "What are the top hottest dex pools?", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll fetch the top hottest DEX pools for you.", - action: "GET_TRENDING_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are the top 5 hottest DEX pools:\n1. TRUMP / USDC\n Market Cap: $8,844,297,825\n FDV: $43,874,068,484\n Reserve: $718,413,745\n Created: 1/17/2025\n2. MELANIA / USDC\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "List all trading pools with highest volume", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll get all the trending trading pools for you.", - action: "GET_TRENDING_POOLS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here are all trending trading pools:\n1. MELANIA / USDC\n Market Cap: $954,636,707\n FDV: $6,402,478,508\n Reserve: $363,641,037\n Created: 1/19/2025\n2. TRUMP / USDC\n Market Cap: $8,844,297,825\n FDV: $43,874,068,484\n Reserve: $718,413,745\n Created: 1/17/2025", - }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-coingecko/src/constants.ts b/packages/plugin-coingecko/src/constants.ts deleted file mode 100644 index 7da5d7014164f..0000000000000 --- a/packages/plugin-coingecko/src/constants.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const API_URLS = { - FREE: 'https://api.coingecko.com/api/v3', - PRO: 'https://pro-api.coingecko.com/api/v3' -} as const; - -// We'll determine which URL to use based on API key validation/usage -export const DEFAULT_BASE_URL = API_URLS.FREE; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/environment.ts b/packages/plugin-coingecko/src/environment.ts deleted file mode 100644 index d8212f4afd6aa..0000000000000 --- a/packages/plugin-coingecko/src/environment.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -const coingeckoConfigSchema = z.object({ - COINGECKO_API_KEY: z.string().nullable(), - COINGECKO_PRO_API_KEY: z.string().nullable(), -}).refine(data => data.COINGECKO_API_KEY || data.COINGECKO_PRO_API_KEY, { - message: "Either COINGECKO_API_KEY or COINGECKO_PRO_API_KEY must be provided" -}); - -export type CoingeckoConfig = z.infer; - -export async function validateCoingeckoConfig(runtime: IAgentRuntime): Promise { - const config = { - COINGECKO_API_KEY: runtime.getSetting("COINGECKO_API_KEY"), - COINGECKO_PRO_API_KEY: runtime.getSetting("COINGECKO_PRO_API_KEY"), - }; - - return coingeckoConfigSchema.parse(config); -} - -export function getApiConfig(config: CoingeckoConfig) { - const isPro = !!config.COINGECKO_PRO_API_KEY; - return { - baseUrl: isPro ? "https://pro-api.coingecko.com/api/v3" : "https://api.coingecko.com/api/v3", - apiKey: isPro ? config.COINGECKO_PRO_API_KEY : config.COINGECKO_API_KEY, - headerKey: isPro ? "x-cg-pro-api-key" : "x-cg-demo-api-key" - }; -} diff --git a/packages/plugin-coingecko/src/index.ts b/packages/plugin-coingecko/src/index.ts deleted file mode 100644 index 829f33bc17e2a..0000000000000 --- a/packages/plugin-coingecko/src/index.ts +++ /dev/null @@ -1,33 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import getMarkets from "./actions/getMarkets"; -import getPrice from "./actions/getPrice"; -import getPricePerAddress from "./actions/getPricePerAddress"; -import getTopGainersLosers from "./actions/getTopGainersLosers"; -import getTrending from "./actions/getTrending"; -import getTrendingPools from "./actions/getTrendingPools"; -import getNewlyListed from "./actions/getNewlyListed"; -import getNetworkTrendingPools from "./actions/getNetworkTrendingPools"; -import getNetworkNewPools from "./actions/getNetworkNewPools"; -import { categoriesProvider } from "./providers/categoriesProvider"; -import { coinsProvider } from "./providers/coinsProvider"; -import { networksProvider } from "./providers/networkProvider"; - -export const coingeckoPlugin: Plugin = { - name: "coingecko", - description: "CoinGecko Plugin for Eliza", - actions: [ - getPrice, - getPricePerAddress, - getTrending, - getTrendingPools, - getMarkets, - getTopGainersLosers, - getNewlyListed, - getNetworkTrendingPools, - getNetworkNewPools, - ], - evaluators: [], - providers: [categoriesProvider, coinsProvider, networksProvider], -}; - -export default coingeckoPlugin; diff --git a/packages/plugin-coingecko/src/providers/categoriesProvider.ts b/packages/plugin-coingecko/src/providers/categoriesProvider.ts deleted file mode 100644 index 75aaa9b88b45d..0000000000000 --- a/packages/plugin-coingecko/src/providers/categoriesProvider.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { type IAgentRuntime, type Memory, type Provider, type State, elizaLogger } from "@elizaos/core"; -import axios from 'axios'; -import { getApiConfig, validateCoingeckoConfig } from '../environment'; - -interface CategoryItem { - category_id: string; - name: string; -} - -const CACHE_KEY = 'coingecko:categories'; -const CACHE_TTL = 5 * 60; // 5 minutes -const MAX_RETRIES = 3; - -async function fetchCategories(runtime: IAgentRuntime): Promise { - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - const response = await axios.get( - `${baseUrl}/coins/categories/list`, - { - headers: { - 'accept': 'application/json', - [headerKey]: apiKey - }, - timeout: 5000 // 5 second timeout - } - ); - - if (!response.data?.length) { - throw new Error("Invalid categories data received"); - } - - return response.data; -} - -async function fetchWithRetry(runtime: IAgentRuntime): Promise { - let lastError: Error | null = null; - - for (let i = 0; i < MAX_RETRIES; i++) { - try { - return await fetchCategories(runtime); - } catch (error) { - lastError = error; - elizaLogger.error(`Categories fetch attempt ${i + 1} failed:`, error); - await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); - } - } - - throw lastError || new Error("Failed to fetch categories after multiple attempts"); -} - -async function getCategories(runtime: IAgentRuntime): Promise { - try { - // Try to get from cache first - const cached = await runtime.cacheManager.get(CACHE_KEY); - if (cached) { - return cached; - } - - // Fetch fresh data - const categories = await fetchWithRetry(runtime); - - // Cache the result - await runtime.cacheManager.set(CACHE_KEY, categories, { expires: CACHE_TTL }); - - return categories; - } catch (error) { - elizaLogger.error("Error fetching categories:", error); - throw error; - } -} - -function formatCategoriesContext(categories: CategoryItem[]): string { - const popularCategories = [ - 'layer-1', 'defi', 'meme', 'ai-meme-coins', - 'artificial-intelligence', 'gaming', 'metaverse' - ]; - - const popular = categories - .filter(c => popularCategories.includes(c.category_id)) - .map(c => `${c.name} (${c.category_id})`); - - return ` -Available cryptocurrency categories: - -Popular categories: -${popular.map(c => `- ${c}`).join('\n')} - -Total available categories: ${categories.length} - -You can use these category IDs when filtering cryptocurrency market data. -`.trim(); -} - -export const categoriesProvider: Provider = { - // eslint-disable-next-line - get: async (runtime: IAgentRuntime, message: Memory, state?: State): Promise => { - try { - const categories = await getCategories(runtime); - return formatCategoriesContext(categories); - } catch (error) { - elizaLogger.error("Categories provider error:", error); - return "Cryptocurrency categories are temporarily unavailable. Please try again later."; - } - } -}; - -// Helper function for actions to get raw categories data -export async function getCategoriesData(runtime: IAgentRuntime): Promise { - return getCategories(runtime); -} diff --git a/packages/plugin-coingecko/src/providers/coinsProvider.ts b/packages/plugin-coingecko/src/providers/coinsProvider.ts deleted file mode 100644 index bdfa9773505c9..0000000000000 --- a/packages/plugin-coingecko/src/providers/coinsProvider.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { type IAgentRuntime, type Memory, type Provider, type State, elizaLogger } from "@elizaos/core"; -import axios from 'axios'; -import { getApiConfig, validateCoingeckoConfig } from '../environment'; - -interface CoinItem { - id: string; - symbol: string; - name: string; -} - -const CACHE_KEY = 'coingecko:coins'; -const CACHE_TTL = 5 * 60; // 5 minutes -const MAX_RETRIES = 3; - -async function fetchCoins(runtime: IAgentRuntime, includePlatform = false): Promise { - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - const response = await axios.get( - `${baseUrl}/coins/list`, - { - params: { - include_platform: includePlatform - }, - headers: { - 'accept': 'application/json', - [headerKey]: apiKey - }, - timeout: 5000 // 5 second timeout - } - ); - - if (!response.data?.length) { - throw new Error("Invalid coins data received"); - } - - return response.data; -} - -async function fetchWithRetry(runtime: IAgentRuntime, includePlatform = false): Promise { - let lastError: Error | null = null; - - for (let i = 0; i < MAX_RETRIES; i++) { - try { - return await fetchCoins(runtime, includePlatform); - } catch (error) { - lastError = error; - elizaLogger.error(`Coins fetch attempt ${i + 1} failed:`, error); - await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); - } - } - - throw lastError || new Error("Failed to fetch coins after multiple attempts"); -} - -async function getCoins(runtime: IAgentRuntime, includePlatform = false): Promise { - try { - // Try to get from cache first - const cached = await runtime.cacheManager.get(CACHE_KEY); - if (cached) { - return cached; - } - - // Fetch fresh data - const coins = await fetchWithRetry(runtime, includePlatform); - - // Cache the result - await runtime.cacheManager.set(CACHE_KEY, coins, { expires: CACHE_TTL }); - - return coins; - } catch (error) { - elizaLogger.error("Error fetching coins:", error); - throw error; - } -} - -function formatCoinsContext(coins: CoinItem[]): string { - const popularCoins = [ - 'bitcoin', 'ethereum', 'binancecoin', 'ripple', - 'cardano', 'solana', 'polkadot', 'dogecoin' - ]; - - const popular = coins - .filter(c => popularCoins.includes(c.id)) - .map(c => `${c.name} (${c.symbol.toUpperCase()}) - ID: ${c.id}`); - - return ` -Available cryptocurrencies: - -Popular coins: -${popular.map(c => `- ${c}`).join('\n')} - -Total available coins: ${coins.length} - -You can use these coin IDs when querying specific cryptocurrency data. -`.trim(); -} - -export const coinsProvider: Provider = { - // eslint-disable-next-line - get: async (runtime: IAgentRuntime, message: Memory, state?: State): Promise => { - try { - const coins = await getCoins(runtime); - return formatCoinsContext(coins); - } catch (error) { - elizaLogger.error("Coins provider error:", error); - return "Cryptocurrency list is temporarily unavailable. Please try again later."; - } - } -}; - -// Helper function for actions to get raw coins data -export async function getCoinsData(runtime: IAgentRuntime, includePlatform = false): Promise { - return getCoins(runtime, includePlatform); -} diff --git a/packages/plugin-coingecko/src/providers/networkProvider.ts b/packages/plugin-coingecko/src/providers/networkProvider.ts deleted file mode 100644 index 3656a648f2b87..0000000000000 --- a/packages/plugin-coingecko/src/providers/networkProvider.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { - type IAgentRuntime, - type Memory, - type Provider, - type State, - elizaLogger, -} from "@elizaos/core"; -import axios from "axios"; -import { getApiConfig, validateCoingeckoConfig } from "../environment"; - -interface NetworkAttributes { - name: string; - coingecko_asset_platform_id: string; -} - -interface NetworkItem { - id: string; - type: string; - attributes: NetworkAttributes; -} - -interface NetworksResponse { - data: NetworkItem[]; -} - -const CACHE_KEY = "coingecko:networks"; -const CACHE_TTL = 30 * 60; // 30 minutes -const MAX_RETRIES = 3; - -async function fetchNetworks(runtime: IAgentRuntime): Promise { - const config = await validateCoingeckoConfig(runtime); - const { baseUrl, apiKey, headerKey } = getApiConfig(config); - - const response = await axios.get( - `${baseUrl}/onchain/networks`, - { - headers: { - accept: "application/json", - [headerKey]: apiKey, - }, - timeout: 5000, // 5 second timeout - } - ); - - if (!response.data?.data?.length) { - throw new Error("Invalid networks data received"); - } - - return response.data.data; -} - -async function fetchWithRetry(runtime: IAgentRuntime): Promise { - let lastError: Error | null = null; - - for (let i = 0; i < MAX_RETRIES; i++) { - try { - return await fetchNetworks(runtime); - } catch (error) { - lastError = error; - elizaLogger.error(`Networks fetch attempt ${i + 1} failed:`, error); - await new Promise((resolve) => setTimeout(resolve, 1000 * (i + 1))); - } - } - - throw ( - lastError || - new Error("Failed to fetch networks after multiple attempts") - ); -} - -async function getNetworks(runtime: IAgentRuntime): Promise { - try { - // Try to get from cache first - const cached = await runtime.cacheManager.get(CACHE_KEY); - if (cached) { - return cached; - } - - // Fetch fresh data - const networks = await fetchWithRetry(runtime); - - // Cache the result - await runtime.cacheManager.set(CACHE_KEY, networks, { - expires: CACHE_TTL, - }); - - return networks; - } catch (error) { - elizaLogger.error("Error fetching networks:", error); - throw error; - } -} - -function formatNetworksContext(networks: NetworkItem[]): string { - const mainNetworks = ["eth", "bsc", "polygon_pos", "avax", "solana"]; - - const popular = networks - .filter((n) => mainNetworks.includes(n.id)) - .map((n) => `${n.attributes.name} - ID: ${n.id}`); - - return ` -Available blockchain networks: - -Major networks: -${popular.map((n) => `- ${n}`).join("\n")} - -Total available networks: ${networks.length} - -You can use these network IDs when querying network-specific data. -`.trim(); -} - -export const networksProvider: Provider = { - // eslint-disable-next-line - get: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State - ): Promise => { - try { - const networks = await getNetworks(runtime); - return formatNetworksContext(networks); - } catch (error) { - elizaLogger.error("Networks provider error:", error); - return "Blockchain networks list is temporarily unavailable. Please try again later."; - } - }, -}; - -// Helper function for actions to get raw networks data -export async function getNetworksData( - runtime: IAgentRuntime -): Promise { - return getNetworks(runtime); -} diff --git a/packages/plugin-coingecko/src/templates/gainersLosers.ts b/packages/plugin-coingecko/src/templates/gainersLosers.ts deleted file mode 100644 index 73c104e767309..0000000000000 --- a/packages/plugin-coingecko/src/templates/gainersLosers.ts +++ /dev/null @@ -1,50 +0,0 @@ -export const getTopGainersLosersTemplate = ` -Extract the following parameters for top gainers and losers data: -- **vs_currency** (string): The target currency to display prices in (e.g., "usd", "eur") - defaults to "usd" -- **duration** (string): Time range for price changes - one of "24h", "7d", "14d", "30d", "60d", "1y" - defaults to "24h" -- **top_coins** (string): Filter by market cap ranking (e.g., "100", "1000") - defaults to "1000" - -Provide the values in the following JSON format: - -\`\`\`json -{ - "vs_currency": "usd", - "duration": "24h", - "top_coins": "1000" -} -\`\`\` - -Example request: "Show me the biggest gainers and losers today" -Example response: -\`\`\`json -{ - "vs_currency": "usd", - "duration": "24h", - "top_coins": "1000" -} -\`\`\` - -Example request: "What are the top movers in EUR for the past week?" -Example response: -\`\`\`json -{ - "vs_currency": "eur", - "duration": "7d", - "top_coins": "300" -} -\`\`\` - -Example request: "Show me monthly performance of top 100 coins" -Example response: -\`\`\`json -{ - "vs_currency": "usd", - "duration": "30d", - "top_coins": "100" -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} - -Based on the conversation above, if the request is for top gainers and losers data, extract the appropriate parameters and respond with a JSON object. If the request is not related to top movers data, respond with null.`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/templates/markets.ts b/packages/plugin-coingecko/src/templates/markets.ts deleted file mode 100644 index 6610ea5b7eb5b..0000000000000 --- a/packages/plugin-coingecko/src/templates/markets.ts +++ /dev/null @@ -1,56 +0,0 @@ -export const getMarketsTemplate = ` -Extract the following parameters for market listing: -- **vs_currency** (string): Target currency for price data (default: "usd") -- **category** (string, optional): Specific category ID from the available categories -- **per_page** (number): Number of results to return (1-250, default: 20) -- **order** (string): Sort order for results, one of: - - market_cap_desc: Highest market cap first - - market_cap_asc: Lowest market cap first - - volume_desc: Highest volume first - - volume_asc: Lowest volume first - -Available Categories: -{{categories}} - -Provide the values in the following JSON format: - -\`\`\`json -{ - "vs_currency": "", - "category": "", - "per_page": , - "order": "", - "page": 1, - "sparkline": false -} -\`\`\` - -Example request: "Show me the top 10 gaming cryptocurrencies" -Example response: -\`\`\`json -{ - "vs_currency": "usd", - "category": "gaming", - "per_page": 10, - "order": "market_cap_desc", - "page": 1, - "sparkline": false -} -\`\`\` - -Example request: "What are the best performing coins by volume?" -Example response: -\`\`\`json -{ - "vs_currency": "usd", - "per_page": 20, - "order": "volume_desc", - "page": 1, - "sparkline": false -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} - -Based on the conversation above, if the request is for a market listing/ranking, extract the appropriate parameters and respond with a JSON object. If the request is for specific coins only, respond with null.`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/templates/networkNewPools.ts b/packages/plugin-coingecko/src/templates/networkNewPools.ts deleted file mode 100644 index d21b77bc03708..0000000000000 --- a/packages/plugin-coingecko/src/templates/networkNewPools.ts +++ /dev/null @@ -1,37 +0,0 @@ -export const getNetworkNewPoolsTemplate = `Determine if this is a network-specific new pools request. If it is one of the specified situations, extract the network ID and limit: - -Situation 1: "Get network new pools" -- Message contains: network name AND phrases about new/recent/latest pools -- Example: "Show new pools on Ethereum" or "What are the latest pools on BSC?" -- Action: Extract network ID and use default limit - -Situation 2: "Get specific number of new pools" -- Message contains: number AND network name AND new/recent/latest pools reference -- Example: "Show 5 newest pools on Polygon" or "Get 20 latest pools on Avalanche" -- Action: Extract network ID and specific limit - -Situation 3: "Get all new pools" -- Message contains: "all" AND network name AND new/recent/latest pools reference -- Example: "Show all new pools on BSC" or "List all recent pools on Ethereum" -- Action: Extract network ID and set maximum limit - -Network ID mappings: -- "solana", "sol" => "solana" -- "ethereum", "eth" => "eth" -- "binance smart chain", "bsc", "bnb chain" => "bsc" -- "polygon", "matic" => "polygon_pos" -- "avalanche", "avax" => "avax" - -For all situations, respond with a JSON object in the format: -\`\`\`json -{ - "networkId": string, - "limit": number -} -\`\`\` - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; diff --git a/packages/plugin-coingecko/src/templates/networkTrendingPools.ts b/packages/plugin-coingecko/src/templates/networkTrendingPools.ts deleted file mode 100644 index 19db65f813e3b..0000000000000 --- a/packages/plugin-coingecko/src/templates/networkTrendingPools.ts +++ /dev/null @@ -1,37 +0,0 @@ -export const getNetworkTrendingPoolsTemplate = `Determine if this is a network-specific trending pools request. If it is one of the specified situations, extract the network ID and limit: - -Situation 1: "Get network trending pools" -- Message contains: network name (e.g., "solana", "ethereum", "bsc") AND phrases about pools -- Example: "Show trending pools on Solana" or "What are the hot pools on ETH?" -- Action: Extract network ID and use default limit - -Situation 2: "Get specific number of network pools" -- Message contains: number AND network name AND pools reference -- Example: "Show top 5 pools on BSC" or "Get 20 trending pools on Ethereum" -- Action: Extract network ID and specific limit - -Situation 3: "Get all network pools" -- Message contains: "all" AND network name AND pools reference -- Example: "Show all trending pools on Polygon" or "List all hot pools on Avalanche" -- Action: Extract network ID and set maximum limit - -Network ID mappings: -- "solana", "sol" => "solana" -- "ethereum", "eth" => "eth" -- "binance smart chain", "bsc", "bnb chain" => "bsc" -- "polygon", "matic" => "polygon_pos" -- "avalanche", "avax" => "avax" - -For all situations, respond with a JSON object in the format: -\`\`\`json -{ - "networkId": string, - "limit": number -} -\`\`\` - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; diff --git a/packages/plugin-coingecko/src/templates/newCoins.ts b/packages/plugin-coingecko/src/templates/newCoins.ts deleted file mode 100644 index 51fcd4cc0f6e7..0000000000000 --- a/packages/plugin-coingecko/src/templates/newCoins.ts +++ /dev/null @@ -1,29 +0,0 @@ -export const getNewCoinsTemplate = `Determine if this is a new coins request. If it is one of the specified situations, perform the corresponding action: - -Situation 1: "Get all new coins" -- Message contains: phrases like "all new coins", "all recent listings", "all latest coins" -- Example: "Show me all new coin listings" or "List all recently added coins" -- Action: Return with limit=50 - -Situation 2: "Get specific number of new coins" -- Message contains: number followed by "new coins" or "latest" followed by number and "coins" -- Example: "Show me 5 new coins" or "Get the latest 20 coins" -- Action: Return with limit=specified number - -Situation 3: "Default new coins request" -- Message contains: general phrases like "new coins", "recent listings", "latest coins" -- Example: "What are the newest coins?" or "Show me recent listings" -- Action: Return with limit=10 - -For all situations, respond with a JSON object in the format: -\`\`\`json -{ - "limit": number -} -\`\`\` - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/templates/price.ts b/packages/plugin-coingecko/src/templates/price.ts deleted file mode 100644 index 6245bbe26e3b0..0000000000000 --- a/packages/plugin-coingecko/src/templates/price.ts +++ /dev/null @@ -1,65 +0,0 @@ -export const getPriceTemplate = ` -Extract the following parameters for cryptocurrency price data: -- **coinIds** (string | string[]): The ID(s) of the cryptocurrency/cryptocurrencies to get prices for (e.g., "bitcoin" or ["bitcoin", "ethereum"]) -- **currency** (string | string[]): The currency/currencies to display prices in (e.g., "usd" or ["usd", "eur", "jpy"]) - defaults to ["usd"] -- **include_market_cap** (boolean): Whether to include market cap data - defaults to false -- **include_24hr_vol** (boolean): Whether to include 24h volume data - defaults to false -- **include_24hr_change** (boolean): Whether to include 24h price change data - defaults to false -- **include_last_updated_at** (boolean): Whether to include last update timestamp - defaults to false - -Provide the values in the following JSON format: - -\`\`\`json -{ - "coinIds": "bitcoin", - "currency": ["usd"], - "include_market_cap": false, - "include_24hr_vol": false, - "include_24hr_change": false, - "include_last_updated_at": false -} -\`\`\` - -Example request: "What's the current price of Bitcoin?" -Example response: -\`\`\`json -{ - "coinIds": "bitcoin", - "currency": ["usd"], - "include_market_cap": false, - "include_24hr_vol": false, - "include_24hr_change": false, - "include_last_updated_at": false -} -\`\`\` - -Example request: "Show me ETH price and market cap in EUR with last update time" -Example response: -\`\`\`json -{ - "coinIds": "ethereum", - "currency": ["eur"], - "include_market_cap": true, - "include_24hr_vol": false, - "include_24hr_change": false, - "include_last_updated_at": true -} -\`\`\` - -Example request: "What's the current price of Bitcoin in USD, JPY and EUR?" -Example response: -\`\`\`json -{ - "coinIds": "bitcoin", - "currency": ["usd", "jpy", "eur"], - "include_market_cap": false, - "include_24hr_vol": false, - "include_24hr_change": false, - "include_last_updated_at": false -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} - -Based on the conversation above, if the request is for cryptocurrency price data, extract the appropriate parameters and respond with a JSON object. If the request is not related to price data, respond with null.`; diff --git a/packages/plugin-coingecko/src/templates/priceAddress.ts b/packages/plugin-coingecko/src/templates/priceAddress.ts deleted file mode 100644 index 429841374a78f..0000000000000 --- a/packages/plugin-coingecko/src/templates/priceAddress.ts +++ /dev/null @@ -1,52 +0,0 @@ -export const getPriceByAddressTemplate = ` -Extract the following parameters for token price data: -- **chainId** (string): The blockchain network ID (e.g., "ethereum", "polygon", "binance-smart-chain") -- **tokenAddress** (string): The contract address of the token -- **include_market_cap** (boolean): Whether to include market cap data - defaults to true - -Normalize chain IDs to lowercase names: ethereum, polygon, binance-smart-chain, avalanche, fantom, arbitrum, optimism, etc. -Token address should be the complete address string, maintaining its original case. - -Provide the values in the following JSON format: - -\`\`\`json -{ - "chainId": "ethereum", - "tokenAddress": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "include_market_cap": true -} -\`\`\` - -Example request: "What's the price of USDC on Ethereum? Address: 0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48" -Example response: -\`\`\`json -{ - "chainId": "ethereum", - "tokenAddress": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "include_market_cap": true -} -\`\`\` - -Example request: "Check the price for this token on Polygon: 0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174" -Example response: -\`\`\`json -{ - "chainId": "polygon", - "tokenAddress": "0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174", - "include_market_cap": true -} -\`\`\` - -Example request: "Get price for BONK token on Solana with address HeLp6NuQkmYB4pYWo2zYs22mESHXPQYzXbB8n4V98jwC" -Example response: -\`\`\`json -{ - "chainId": "solana", - "tokenAddress": "HeLp6NuQkmYB4pYWo2zYs22mESHXPQYzXbB8n4V98jwC" -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} - -Based on the conversation above, use last question made and if the request is for token price data and includes both a chain and address, extract the appropriate parameters and respond with a JSON object. If the request is not related to token price data or missing required information, respond with null.`; diff --git a/packages/plugin-coingecko/src/templates/trending.ts b/packages/plugin-coingecko/src/templates/trending.ts deleted file mode 100644 index 073f68a0c023b..0000000000000 --- a/packages/plugin-coingecko/src/templates/trending.ts +++ /dev/null @@ -1,36 +0,0 @@ -export const getTrendingTemplate = ` -Extract the following parameters for trending data: -- **include_nfts** (boolean): Whether to include NFTs in the response (default: true) -- **include_categories** (boolean): Whether to include categories in the response (default: true) - -Provide the values in the following JSON format: - -\`\`\`json -{ - "include_nfts": true, - "include_categories": true -} -\`\`\` - -Example request: "What's trending in crypto?" -Example response: -\`\`\`json -{ - "include_nfts": true, - "include_categories": true -} -\`\`\` - -Example request: "Show me trending coins only" -Example response: -\`\`\`json -{ - "include_nfts": false, - "include_categories": false -} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} - -Based on the conversation above, if the request is for trending market data, extract the appropriate parameters and respond with a JSON object. If the request is not related to trending data, respond with null.`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/templates/trendingPools.ts b/packages/plugin-coingecko/src/templates/trendingPools.ts deleted file mode 100644 index 08cd0d3e2a797..0000000000000 --- a/packages/plugin-coingecko/src/templates/trendingPools.ts +++ /dev/null @@ -1,29 +0,0 @@ -export const getTrendingPoolsTemplate = `Determine if this is a trending pools request. If it is one of the specified situations, perform the corresponding action: - -Situation 1: "Get all trending pools" -- Message contains: phrases like "all trending pools", "show all pools", "list all pools" -- Example: "Show me all trending pools" or "List all pools" -- Action: Return with limit=100 - -Situation 2: "Get specific number of pools" -- Message contains: number followed by "pools" or "top" followed by number and "pools" -- Example: "Show top 5 pools" or "Get me 20 trending pools" -- Action: Return with limit=specified number - -Situation 3: "Default trending pools request" -- Message contains: general phrases like "trending pools", "hot pools", "popular pools" -- Example: "What are the trending pools?" or "Show me hot pools" -- Action: Return with limit=10 - -For all situations, respond with a JSON object in the format: -\`\`\`json -{ - "limit": number -} -\`\`\` - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/types.ts b/packages/plugin-coingecko/src/types.ts deleted file mode 100644 index bf2eb42724e15..0000000000000 --- a/packages/plugin-coingecko/src/types.ts +++ /dev/null @@ -1,23 +0,0 @@ -// Type definitions for CoinGecko plugin - -export interface CoinGeckoConfig { - apiKey: string; - baseUrl?: string; -} - -export interface PriceResponse { - [key: string]: { - [currency: string]: number; - }; -} - -export interface MarketData { - id: string; - symbol: string; - name: string; - current_price: number; - market_cap: number; - market_cap_rank: number; - price_change_percentage_24h: number; - total_volume: number; -} diff --git a/packages/plugin-coingecko/tsconfig.json b/packages/plugin-coingecko/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/plugin-coingecko/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-coingecko/tsup.config.ts b/packages/plugin-coingecko/tsup.config.ts deleted file mode 100644 index 3e30481b3aad0..0000000000000 --- a/packages/plugin-coingecko/tsup.config.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - dts: true, - external: ["dotenv", "fs", "path", "https", "http"] -}); diff --git a/packages/plugin-coingecko/vitest.config.ts b/packages/plugin-coingecko/vitest.config.ts deleted file mode 100644 index 419efc958f910..0000000000000 --- a/packages/plugin-coingecko/vitest.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - setupFiles: ['./__tests__/setup.ts'], - include: ['**/__tests__/**/*.test.ts'], - } -}); diff --git a/packages/plugin-coinmarketcap/README.md b/packages/plugin-coinmarketcap/README.md deleted file mode 100644 index 9c042d9e57636..0000000000000 --- a/packages/plugin-coinmarketcap/README.md +++ /dev/null @@ -1,127 +0,0 @@ -# @elizaos/plugin-coinmarketcap - -A plugin for Eliza that enables cryptocurrency price checking using the CoinMarketCap API. - -## Features - -- Real-time cryptocurrency price checking -- Support for multiple cryptocurrencies (BTC, ETH, SOL, etc.) -- Currency conversion (USD, EUR, etc.) -- Detailed price and market data -- Natural language processing for price queries - -## Installation - -```bash -npm install @elizaos/plugin-coinmarketcap -``` - -## Configuration - -1. Get your API key from [CoinMarketCap](https://pro.coinmarketcap.com) - -2. Set up your environment variables: - -```bash -COINMARKETCAP_API_KEY=your_api_key -``` - -3. Register the plugin in your Eliza configuration: - -```typescript -import { CoinMarketCapPlugin } from "@elizaos/plugin-coinmarketcap"; - -// In your Eliza configuration -plugins: [ - new CoinMarketCapPlugin(), - // ... other plugins -]; -``` - -## Usage - -The plugin responds to natural language queries about cryptocurrency prices. Here are some examples: - -```plaintext -"What's the current price of Bitcoin?" -"Show me ETH price in USD" -"Get the price of SOL" -``` - -### Supported Cryptocurrencies - -The plugin supports major cryptocurrencies including: - -- Bitcoin (BTC) -- Ethereum (ETH) -- Solana (SOL) -- USD Coin (USDC) -- And many more... - -### Available Actions - -#### GET_PRICE - -Fetches the current price of a cryptocurrency. - -```typescript -// Example response format -{ - symbol: "BTC", - price: 50000.00, - currency: "USD", - marketCap: 1000000000000, - volume24h: 50000000000, - percentChange24h: 2.5 -} -``` - -## API Reference - -### Environment Variables - -| Variable | Description | Required | -| --------------------- | -------------------------- | -------- | -| COINMARKETCAP_API_KEY | Your CoinMarketCap API key | Yes | - -### Types - -```typescript -interface PriceData { - price: number; - marketCap: number; - volume24h: number; - percentChange24h: number; -} - -interface GetPriceContent { - symbol: string; - currency: string; -} -``` - -## Error Handling - -The plugin includes comprehensive error handling for: - -- Invalid API keys -- Rate limiting -- Network timeouts -- Invalid cryptocurrency symbols -- Unsupported currencies - -## Rate Limits - -CoinMarketCap API has different rate limits based on your subscription plan. Please refer to [CoinMarketCap's pricing page](https://coinmarketcap.com/api/pricing/) for detailed information. - -## Support - -For support, please open an issue in the repository or reach out to the maintainers: - -- Discord: 0xspit - -## Links - -- [CoinMarketCap API Documentation](https://coinmarketcap.com/api/documentation/v1/) - -- [GitHub Repository](https://github.com/elizaos/eliza/tree/main/packages/plugin-coinmarketcap) diff --git a/packages/plugin-coinmarketcap/__tests__/actions/getPrice.service.test.ts b/packages/plugin-coinmarketcap/__tests__/actions/getPrice.service.test.ts deleted file mode 100644 index e695eea392dba..0000000000000 --- a/packages/plugin-coinmarketcap/__tests__/actions/getPrice.service.test.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import axios from 'axios'; -import { createPriceService } from '../../src/actions/getPrice/service'; - -vi.mock('axios'); - -describe('PriceService', () => { - const API_KEY = 'test-api-key'; - let priceService: ReturnType; - - beforeEach(() => { - vi.clearAllMocks(); - vi.mocked(axios.create).mockReturnValue(axios); - priceService = createPriceService(API_KEY); - }); - - it('should create axios instance with correct config', () => { - expect(axios.create).toHaveBeenCalledWith({ - baseURL: 'https://pro-api.coinmarketcap.com/v1', - headers: { - 'X-CMC_PRO_API_KEY': API_KEY, - 'Accept': 'application/json' - } - }); - }); - - it('should normalize symbol and currency', async () => { - const mockResponse = { - data: { - data: { - BTC: { - quote: { - USD: { - price: 50000, - market_cap: 1000000000000, - volume_24h: 30000000000, - percent_change_24h: 2.5 - } - } - } - } - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - await priceService.getPrice(' btc ', ' usd '); - - expect(axios.get).toHaveBeenCalledWith( - '/cryptocurrency/quotes/latest', - expect.objectContaining({ - params: { - symbol: 'BTC', - convert: 'USD' - } - }) - ); - }); - - it('should return formatted price data', async () => { - const mockResponse = { - data: { - data: { - BTC: { - quote: { - USD: { - price: 50000, - market_cap: 1000000000000, - volume_24h: 30000000000, - percent_change_24h: 2.5 - } - } - } - } - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - const result = await priceService.getPrice('BTC', 'USD'); - - expect(result).toEqual({ - price: 50000, - marketCap: 1000000000000, - volume24h: 30000000000, - percentChange24h: 2.5 - }); - }); - - it('should handle missing symbol data', async () => { - const mockResponse = { - data: { - data: {} - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - await expect(priceService.getPrice('INVALID', 'USD')) - .rejects - .toThrow('No data found for symbol: INVALID'); - }); - - it('should handle missing quote data', async () => { - const mockResponse = { - data: { - data: { - BTC: { - quote: {} - } - } - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - await expect(priceService.getPrice('BTC', 'INVALID')) - .rejects - .toThrow('No quote data found for currency: INVALID'); - }); - - it('should handle API errors', async () => { - const errorMessage = 'API rate limit exceeded'; - const apiError = new Error(errorMessage); - Object.assign(apiError, { - isAxiosError: true, - response: { - data: { - status: { - error_message: errorMessage - } - } - } - }); - - vi.mocked(axios.get).mockRejectedValueOnce(apiError); - - await expect(priceService.getPrice('BTC', 'USD')) - .rejects - .toThrow(`${errorMessage}`); - }); - - it('should handle non-axios errors', async () => { - const error = new Error('Network error'); - vi.mocked(axios.get).mockRejectedValueOnce(error); - - await expect(priceService.getPrice('BTC', 'USD')) - .rejects - .toThrow('Network error'); - }); -}); diff --git a/packages/plugin-coinmarketcap/__tests__/actions/getPrice.test.ts b/packages/plugin-coinmarketcap/__tests__/actions/getPrice.test.ts deleted file mode 100644 index f737c282c41ce..0000000000000 --- a/packages/plugin-coinmarketcap/__tests__/actions/getPrice.test.ts +++ /dev/null @@ -1,234 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { elizaLogger, ModelClass, generateObjectDeprecated, composeContext } from '@elizaos/core'; -import getPriceAction from '../../src/actions/getPrice'; -import axios from 'axios'; -import * as environment from '../../src/environment'; - -vi.mock('axios'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn(), - success: vi.fn(), - }, - generateObjectDeprecated: vi.fn(), - composeContext: vi.fn(), - ModelClass: { SMALL: 'SMALL' } -})); -vi.mock('../../src/environment', () => ({ - validateCoinMarketCapConfig: vi.fn() -})); - -describe('getPrice action', () => { - const mockRuntime = { - composeState: vi.fn(), - updateRecentMessageState: vi.fn(), - getPluginConfig: vi.fn(), - }; - - const mockMessage = {}; - const mockState = {}; - const mockCallback = vi.fn(); - const mockConfig = { - COINMARKETCAP_API_KEY: 'test-api-key' - }; - - beforeEach(() => { - vi.clearAllMocks(); - - // Mock environment validation - vi.mocked(environment.validateCoinMarketCapConfig).mockResolvedValue(mockConfig); - - // Mock runtime functions - mockRuntime.composeState.mockResolvedValue(mockState); - mockRuntime.updateRecentMessageState.mockResolvedValue(mockState); - mockRuntime.getPluginConfig.mockResolvedValue({ - apiKey: 'test-api-key' - }); - - // Mock axios create - vi.mocked(axios.create).mockReturnValue(axios); - - // Mock the core functions - vi.mocked(elizaLogger.log).mockImplementation(() => {}); - vi.mocked(elizaLogger.error).mockImplementation(() => {}); - vi.mocked(elizaLogger.success).mockImplementation(() => {}); - vi.mocked(composeContext).mockReturnValue({}); - }); - - it('should validate coinmarketcap config', async () => { - await getPriceAction.validate(mockRuntime, mockMessage); - expect(environment.validateCoinMarketCapConfig).toHaveBeenCalledWith(mockRuntime); - }); - - it('should fetch and format price data', async () => { - const mockResponse = { - data: { - data: { - BTC: { - quote: { - USD: { - price: 50000, - market_cap: 1000000000000, - volume_24h: 30000000000, - percent_change_24h: 2.5 - } - } - } - } - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - // Mock the content generation - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce({ - symbol: 'BTC', - currency: 'USD' - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(axios.get).toHaveBeenCalledWith( - '/cryptocurrency/quotes/latest', - expect.objectContaining({ - params: { - symbol: 'BTC', - convert: 'USD' - } - }) - ); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('50000 USD'), - content: expect.objectContaining({ - symbol: 'BTC', - currency: 'USD', - price: 50000, - marketCap: 1000000000000, - volume24h: 30000000000, - percentChange24h: 2.5 - }) - })); - }); - - it('should handle invalid symbol', async () => { - const mockResponse = { - data: { - data: {} - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - // Mock the content generation - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce({ - symbol: 'INVALID', - currency: 'USD' - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('No data found for symbol'), - content: expect.objectContaining({ - error: expect.stringContaining('No data found for symbol') - }) - })); - }); - - it('should handle invalid currency', async () => { - const mockResponse = { - data: { - data: { - BTC: { - quote: {} - } - } - } - }; - - vi.mocked(axios.get).mockResolvedValueOnce(mockResponse); - - // Mock the content generation - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce({ - symbol: 'BTC', - currency: 'INVALID' - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('No quote data found for currency'), - content: expect.objectContaining({ - error: expect.stringContaining('No quote data found for currency') - }) - })); - }); - - it('should handle API errors gracefully', async () => { - vi.mocked(axios.get).mockRejectedValueOnce(new Error('API Error')); - - // Mock the content generation - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce({ - symbol: 'BTC', - currency: 'USD' - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('API Error'), - content: expect.objectContaining({ - error: expect.stringContaining('API Error') - }) - })); - }); - - it('should handle rate limit errors', async () => { - const errorMessage = 'Rate limit exceeded'; - const rateLimitError = new Error(`API Error: ${errorMessage}`); - Object.assign(rateLimitError, { - isAxiosError: true, - response: { - data: { - status: { - error_message: errorMessage - } - } - } - }); - vi.mocked(axios.get).mockRejectedValueOnce(rateLimitError); - - // Mock the content generation - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce({ - symbol: 'BTC', - currency: 'USD' - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith({ - text: `Error fetching price: API Error: ${errorMessage}`, - content: { error: `API Error: ${errorMessage}` } - }); - }); - - it('should handle invalid content generation', async () => { - // Mock invalid content generation - vi.mocked(generateObjectDeprecated).mockResolvedValueOnce({ - invalidField: 'invalid' - }); - - await getPriceAction.handler(mockRuntime, mockMessage, mockState, {}, mockCallback); - - expect(mockCallback).toHaveBeenCalledWith(expect.objectContaining({ - text: expect.stringContaining('Invalid price check content'), - content: expect.objectContaining({ - error: expect.stringContaining('Invalid price check content') - }) - })); - }); -}); diff --git a/packages/plugin-coinmarketcap/__tests__/setup.ts b/packages/plugin-coinmarketcap/__tests__/setup.ts deleted file mode 100644 index bbc49909c167c..0000000000000 --- a/packages/plugin-coinmarketcap/__tests__/setup.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { vi } from 'vitest'; - -// Mock console methods -global.console = { - ...console, - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn() -}; diff --git a/packages/plugin-coinmarketcap/biome.json b/packages/plugin-coinmarketcap/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-coinmarketcap/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-coinmarketcap/package.json b/packages/plugin-coinmarketcap/package.json deleted file mode 100644 index 44a039cbfa5bc..0000000000000 --- a/packages/plugin-coinmarketcap/package.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "name": "@elizaos/plugin-coinmarketcap", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "axios": "^1.6.7" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "tsup": "^8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "test": "vitest run", - "test:watch": "vitest", - "test:coverage": "vitest run --coverage" - } -} diff --git a/packages/plugin-coinmarketcap/src/actions/getPrice/examples.ts b/packages/plugin-coinmarketcap/src/actions/getPrice/examples.ts deleted file mode 100644 index 701993902cf28..0000000000000 --- a/packages/plugin-coinmarketcap/src/actions/getPrice/examples.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { ActionExample } from "@elizaos/core"; - -export const priceExamples: ActionExample[][] = [ - [ - { - user: "{{user1}}", - content: { - text: "What's the current price of Bitcoin?", - }, - }, - { - user: "{{agent}}", - content: { - text: "Let me check the current Bitcoin price for you.", - action: "GET_PRICE", - }, - }, - { - user: "{{agent}}", - content: { - text: "The current price of BTC is 65,432.21 USD", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Check ETH price in EUR", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll check the current Ethereum price in EUR.", - action: "GET_PRICE", - }, - }, - { - user: "{{agent}}", - content: { - text: "The current price of ETH is 2,345.67 EUR", - }, - }, - ], -]; diff --git a/packages/plugin-coinmarketcap/src/actions/getPrice/index.ts b/packages/plugin-coinmarketcap/src/actions/getPrice/index.ts deleted file mode 100644 index e2af9c2da53e3..0000000000000 --- a/packages/plugin-coinmarketcap/src/actions/getPrice/index.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { - composeContext, - elizaLogger, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - type Action, -} from "@elizaos/core"; -import { validateCoinMarketCapConfig } from "../../environment"; -import { priceExamples } from "./examples"; -import { createPriceService } from "./service"; -import { getPriceTemplate } from "./template"; -import type { GetPriceContent } from "./types"; -import { isGetPriceContent } from "./validation"; - -export default { - name: "GET_PRICE", - similes: [ - "CHECK_PRICE", - "PRICE_CHECK", - "GET_CRYPTO_PRICE", - "CHECK_CRYPTO_PRICE", - "GET_TOKEN_PRICE", - "CHECK_TOKEN_PRICE", - ], - // eslint-disable-next-line - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCoinMarketCapConfig(runtime); - return true; - }, - description: "Get the current price of a cryptocurrency from CoinMarketCap", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting CoinMarketCap GET_PRICE handler..."); - - // Initialize or update state - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - try { - // Compose and generate price check content - const priceContext = composeContext({ - state: currentState, - template: getPriceTemplate, - }); - - const content = (await generateObjectDeprecated({ - runtime, - context: priceContext, - modelClass: ModelClass.SMALL, - })) as unknown as GetPriceContent; - - // Validate content - if (!isGetPriceContent(content)) { - throw new Error("Invalid price check content"); - } - - // Get price from CoinMarketCap - const config = await validateCoinMarketCapConfig(runtime); - - const priceService = createPriceService( - config.COINMARKETCAP_API_KEY - ); - - try { - const priceData = await priceService.getPrice( - content.symbol, - content.currency - ); - elizaLogger.success( - `Price retrieved successfully! ${content.symbol}: ${priceData.price} ${content.currency.toUpperCase()}` - ); - - if (callback) { - callback({ - text: `The current price of ${content.symbol} is ${priceData.price} ${content.currency.toUpperCase()}`, - content: { - symbol: content.symbol, - currency: content.currency, - ...priceData, - }, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error in GET_PRICE handler:", error); - if (callback) { - callback({ - text: `Error fetching price: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - } catch (error) { - elizaLogger.error("Error in GET_PRICE handler:", error); - if (callback) { - callback({ - text: `Error fetching price: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - examples: priceExamples, -} as Action; diff --git a/packages/plugin-coinmarketcap/src/actions/getPrice/service.ts b/packages/plugin-coinmarketcap/src/actions/getPrice/service.ts deleted file mode 100644 index df716b50c30c0..0000000000000 --- a/packages/plugin-coinmarketcap/src/actions/getPrice/service.ts +++ /dev/null @@ -1,71 +0,0 @@ -import axios from "axios"; -import type { ApiResponse, PriceData } from "./types"; - -const BASE_URL = "https://pro-api.coinmarketcap.com/v1"; - -export const createPriceService = (apiKey: string) => { - const client = axios.create({ - baseURL: BASE_URL, - headers: { - "X-CMC_PRO_API_KEY": apiKey, - Accept: "application/json", - }, - }); - - const getPrice = async ( - symbol: string, - currency: string - ): Promise => { - const normalizedSymbol = symbol.toUpperCase().trim(); - const normalizedCurrency = currency.toUpperCase().trim(); - - try { - const response = await client.get( - "/cryptocurrency/quotes/latest", - { - params: { - symbol: normalizedSymbol, - convert: normalizedCurrency, - }, - } - ); - - console.log( - "API Response:", - JSON.stringify(response.data, null, 2) - ); - - const symbolData = response.data.data[normalizedSymbol]; - if (!symbolData) { - throw new Error( - `No data found for symbol: ${normalizedSymbol}` - ); - } - - const quoteData = symbolData.quote[normalizedCurrency]; - if (!quoteData) { - throw new Error( - `No quote data found for currency: ${normalizedCurrency}` - ); - } - - return { - price: quoteData.price, - marketCap: quoteData.market_cap, - volume24h: quoteData.volume_24h, - percentChange24h: quoteData.percent_change_24h, - }; - } catch (error) { - if (axios.isAxiosError(error)) { - const errorMessage = - error.response?.data?.status?.error_message || - error.message; - console.error("API Error:", errorMessage); - throw new Error(`API Error: ${errorMessage}`); - } - throw error; - } - }; - - return { getPrice }; -}; diff --git a/packages/plugin-coinmarketcap/src/actions/getPrice/template.ts b/packages/plugin-coinmarketcap/src/actions/getPrice/template.ts deleted file mode 100644 index 46e439a2356cc..0000000000000 --- a/packages/plugin-coinmarketcap/src/actions/getPrice/template.ts +++ /dev/null @@ -1,27 +0,0 @@ -export const getPriceTemplate = `Respond with a JSON object containing BOTH symbol and currency. Currency must default to "USD" if not specified. - -Here are the cryptocurrency symbol mappings: -- bitcoin/btc -> BTC -- ethereum/eth -> ETH -- solana/sol -> SOL -- cardano/ada -> ADA -- ripple/xrp -> XRP -- dogecoin/doge -> DOGE -- polkadot/dot -> DOT -- usdc -> USDC -- tether/usdt -> USDT - -IMPORTANT: Response must ALWAYS include both "symbol" and "currency" fields. - -Example response: -\`\`\`json -{ - "symbol": "BTC", - "currency": "USD" -} -\`\`\` - -{{recentMessages}} - -Extract the cryptocurrency from the most recent message. Always include currency (default "USD"). -Respond with a JSON markdown block containing both symbol and currency.`; diff --git a/packages/plugin-coinmarketcap/src/actions/getPrice/types.ts b/packages/plugin-coinmarketcap/src/actions/getPrice/types.ts deleted file mode 100644 index 8290b77daf52c..0000000000000 --- a/packages/plugin-coinmarketcap/src/actions/getPrice/types.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type { Content } from "@elizaos/core"; - -export interface GetPriceContent extends Content { - symbol: string; - currency: string; -} - -export interface PriceData { - price: number; - marketCap: number; - volume24h: number; - percentChange24h: number; -} - -export interface ApiResponse { - data: { - [symbol: string]: { - quote: { - [currency: string]: { - price: number; - market_cap: number; - volume_24h: number; - percent_change_24h: number; - }; - }; - }; - }; -} diff --git a/packages/plugin-coinmarketcap/src/actions/getPrice/validation.ts b/packages/plugin-coinmarketcap/src/actions/getPrice/validation.ts deleted file mode 100644 index 7ef2a878fbef5..0000000000000 --- a/packages/plugin-coinmarketcap/src/actions/getPrice/validation.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { z } from "zod"; -import type { GetPriceContent } from "./types"; - -export const GetPriceSchema = z.object({ - symbol: z.string(), - currency: z.string().default("USD"), -}); - -export function isGetPriceContent( - content: GetPriceContent -): content is GetPriceContent { - return ( - typeof content.symbol === "string" && - typeof content.currency === "string" - ); -} diff --git a/packages/plugin-coinmarketcap/src/environment.ts b/packages/plugin-coinmarketcap/src/environment.ts deleted file mode 100644 index 8f7a1faadc71b..0000000000000 --- a/packages/plugin-coinmarketcap/src/environment.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { z } from "zod"; - -export const coinmarketcapEnvSchema = z.object({ - COINMARKETCAP_API_KEY: z - .string() - .min(1, "CoinMarketCap API key is required"), -}); - -export type CoinMarketCapConfig = z.infer; - -export async function validateCoinMarketCapConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - COINMARKETCAP_API_KEY: runtime.getSetting("COINMARKETCAP_API_KEY"), - }; - - return coinmarketcapEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `CoinMarketCap configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/plugin-coinmarketcap/src/index.ts b/packages/plugin-coinmarketcap/src/index.ts deleted file mode 100644 index dc2d24dea8782..0000000000000 --- a/packages/plugin-coinmarketcap/src/index.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import getPrice from "./actions/getPrice"; - -export const coinmarketcapPlugin: Plugin = { - name: "coinmarketcap", - description: "CoinMarketCap Plugin for Eliza", - actions: [getPrice], - evaluators: [], - providers: [], -}; - -export default coinmarketcapPlugin; diff --git a/packages/plugin-coinmarketcap/src/types.ts b/packages/plugin-coinmarketcap/src/types.ts deleted file mode 100644 index 8290b77daf52c..0000000000000 --- a/packages/plugin-coinmarketcap/src/types.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type { Content } from "@elizaos/core"; - -export interface GetPriceContent extends Content { - symbol: string; - currency: string; -} - -export interface PriceData { - price: number; - marketCap: number; - volume24h: number; - percentChange24h: number; -} - -export interface ApiResponse { - data: { - [symbol: string]: { - quote: { - [currency: string]: { - price: number; - market_cap: number; - volume_24h: number; - percent_change_24h: number; - }; - }; - }; - }; -} diff --git a/packages/plugin-coinmarketcap/tsconfig.json b/packages/plugin-coinmarketcap/tsconfig.json deleted file mode 100644 index 73993deaaf7cb..0000000000000 --- a/packages/plugin-coinmarketcap/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-coinmarketcap/tsup.config.ts b/packages/plugin-coinmarketcap/tsup.config.ts deleted file mode 100644 index 58ed52c49904e..0000000000000 --- a/packages/plugin-coinmarketcap/tsup.config.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: [ - "dotenv", - "fs", - "path", - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - ], -}); diff --git a/packages/plugin-coinmarketcap/vitest.config.ts b/packages/plugin-coinmarketcap/vitest.config.ts deleted file mode 100644 index 5c5066f7c5440..0000000000000 --- a/packages/plugin-coinmarketcap/vitest.config.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - setupFiles: ['__tests__/setup.ts'], - include: ['__tests__/**/*.test.ts'], - coverage: { - provider: 'v8', - reporter: ['text', 'json', 'html'], - include: ['src/**/*.ts'], - exclude: ['**/*.d.ts', '**/*.test.ts', '**/examples.ts', '**/template.ts'] - } - } -}); diff --git a/packages/plugin-conflux/README.md b/packages/plugin-conflux/README.md deleted file mode 100644 index 288ffe3ac1d10..0000000000000 --- a/packages/plugin-conflux/README.md +++ /dev/null @@ -1,237 +0,0 @@ -# @elizaos/plugin-conflux - -A plugin for interacting with the Conflux blockchain network within the ElizaOS ecosystem. - -## Description - -The Conflux plugin enables seamless interaction with both Conflux Core Space and eSpace networks. It provides functionality for token transfers, cross-space bridge operations, and ConfiPump token management (creation, buying, and selling). - -## Installation - -```bash -pnpm install @elizaos/plugin-conflux -``` - -## Configuration - -The plugin requires the following environment variables to be set: - -```typescript -CONFLUX_CORE_PRIVATE_KEY= -CONFLUX_CORE_SPACE_RPC_URL= -CONFLUX_MEME_CONTRACT_ADDRESS= -``` - -## Usage - -### Basic Integration - -```typescript -import { confluxPlugin } from "@elizaos/plugin-conflux"; -``` - -### Example Usage - -```typescript -// Core Space Transfer -"Send 1 CFX to cfx:aaejuaaaaaaaaaaaaaaaaaaaaaaaaaaaa2eaeg85p5"; - -// Cross-Space Bridge Transfer -"Send 1 CFX to eSpace Address 0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752"; - -// ConfiPump Token Creation -"Create a new token called GLITCHIZA with symbol GLITCHIZA and generate a description about it"; - -// ConfiPump Token Trading -"Buy 0.00069 CFX worth of GLITCHIZA(0x1234567890abcdef)"; -"Sell 0.00069 CFX worth of GLITCHIZA(0x1234567890abcdef)"; -``` - -## API Reference - -### Actions - -#### SEND_CFX - -Transfers CFX tokens within Conflux Core Space. - -**Aliases:** - -- SEND_CONFLUX -- SEND_CFX_CORE_SPACE -- TRANSFER_CFX - -**Input Content:** - -```typescript -interface TransferContent { - to: string; // Conflux Core Space address (cfx: prefix) - amount: string; // Amount of CFX to send -} -``` - -#### BRIDGE_SEND_CFX - -Transfers CFX tokens from Core Space to eSpace. - -**Aliases:** - -- BRIDGE_SEND_CONFLUX -- CROSS_SPACE_SEND_CFX -- BRIDGE_TRANSFER_CFX -- CROSS_SPACE_TRANSFER_CFX - -**Input Content:** - -```typescript -interface TransferContent { - to: string; // Conflux eSpace address (0x prefix) - amount: string; // Amount of CFX to send -} -``` - -#### CONFI_PUMP - -Manages ConfiPump token operations. - -**Aliases:** - -- SELL_TOKEN -- BUY_TOKEN -- CREATE_TOKEN - -**Input Content:** - -```typescript -interface PumpContent { - action: "CREATE_TOKEN" | "BUY_TOKEN" | "SELL_TOKEN"; - params: { - name?: string; - symbol?: string; - description?: string; - tokenAddress?: string; - value?: string; - }; -} -``` - -## Common Issues & Troubleshooting - -1. **Transaction Failures** - - Ensure sufficient CFX balance for transactions - - Verify correct address format (cfx: for Core Space, 0x for eSpace) - - Check RPC endpoint connectivity - -## Security Best Practices - -1. **Private Key Management** - - Store private keys securely using environment variables - - Never expose private keys in code or logs - - Use separate accounts for development and production - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run the plugin: - -```bash -pnpm run dev -``` - -## Future Enhancements - -1. **Advanced Token Management** - - - Batch token transfers - - Token allowance management - - Advanced meme token features - - Token metadata management - -2. **Enhanced Bridge Operations** - - - Multi-token bridge support - - Automated bridge fee optimization - - Bridge transaction status tracking - - Cross-space batch operations - -3. **Smart Contract Integration** - - - Contract deployment tools - - Contract interaction templates - - ABI management system - - Contract verification tools - -4. **Performance Optimizations** - - - Transaction batching - - Improved caching mechanisms - - Gas optimization strategies - - Network request optimization - -5. **Developer Tools** - - - CLI tools for common operations - - Development environment templates - - Testing utilities - - Documentation generator - -6. **Security Features** - - - Transaction simulation - - Risk assessment tools - - Address validation improvements - - Rate limiting controls - -7. **Monitoring and Analytics** - - Transaction tracking dashboard - - Performance metrics - - Error reporting system - - Usage analytics - -We welcome community feedback and contributions to help prioritize these enhancements. - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Conflux Network](https://confluxnetwork.org/): Hybrid consensus blockchain -- [js-conflux-sdk](https://www.npmjs.com/package/js-conflux-sdk): Official Conflux JavaScript SDK -- [ConfiPump](https://confipump.io/): Meme token creation platform -- [@conflux-dev/conflux-address-js](https://www.npmjs.com/package/@conflux-dev/conflux-address-js): Address utilities - -Special thanks to: - -- The Conflux Foundation for developing the network -- The Conflux Developer community -- The ConfiPump team for meme token infrastructure -- The js-conflux-sdk maintainers -- The Eliza community for their contributions and feedback - -For more information about Conflux capabilities: - -- [Conflux Documentation](https://developer.confluxnetwork.org/) -- [Conflux Portal](https://portal.confluxnetwork.org/) -- [ConfluxScan](https://confluxscan.io/) -- [Cross-Space Bridge](https://bridge.confluxnetwork.org/) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-conflux/__tests__/actions/bridgeTransfer.test.ts b/packages/plugin-conflux/__tests__/actions/bridgeTransfer.test.ts deleted file mode 100644 index c8f4c1df40565..0000000000000 --- a/packages/plugin-conflux/__tests__/actions/bridgeTransfer.test.ts +++ /dev/null @@ -1,191 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { bridgeTransfer } from '../../src/actions/bridgeTransfer'; -import { - type IAgentRuntime, - type Memory, - ModelClass, - ModelProviderName, - type State, - type HandlerCallback, -} from '@elizaos/core'; -import * as core from '@elizaos/core'; -import { createPublicClient, createWalletClient, encodeFunctionData } from 'cive'; -import { hexAddressToBase32 } from 'cive/utils'; - -// Mock generateObject -vi.mock('@elizaos/core', async () => { - const actual = await vi.importActual('@elizaos/core'); - return { - ...actual, - generateObject: vi.fn().mockResolvedValue({ - object: { - to: '0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752', - amount: 1 - } - }), - }; -}); - -// Mock cive functions -vi.mock('cive', () => ({ - createPublicClient: vi.fn(() => ({ - getChainId: vi.fn().mockResolvedValue(1), - })), - createWalletClient: vi.fn(() => ({ - sendTransaction: vi.fn().mockResolvedValue('0x123'), - })), - http: vi.fn(), - parseCFX: vi.fn().mockReturnValue(BigInt(1000000000000000000)), // 1 CFX - encodeFunctionData: vi.fn().mockReturnValue('0x123456'), -})); - -vi.mock('cive/accounts', () => ({ - privateKeyToAccount: vi.fn().mockReturnValue({ - address: '0x123', - signTransaction: vi.fn(), - }), -})); - -vi.mock('cive/utils', () => ({ - hexAddressToBase32: vi.fn().mockReturnValue('cfxtest:test-address'), -})); - -describe('bridgeTransfer action', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn((key: string) => { - switch (key) { - case 'CONFLUX_CORE_PRIVATE_KEY': - return '0x1234567890abcdef'; - case 'CONFLUX_CORE_SPACE_RPC_URL': - return 'https://test.confluxrpc.com'; - default: - return undefined; - } - }), - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - generateText: vi.fn(), - model: { - [ModelClass.SMALL]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.MEDIUM]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.LARGE]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - } - }, - modelProvider: ModelProviderName.OPENAI, - }; - - const mockMessage: Memory = { - content: { - text: 'Send 1 CFX to eSpace Address 0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752', - }, - }; - - const mockCallback: HandlerCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should validate successfully', async () => { - const result = await bridgeTransfer.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should handle successful bridge transfer', async () => { - const result = await bridgeTransfer.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('1 CFX sent to 0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752'), - content: { - to: '0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752', - amount: 1 - } - }); - }); - - it('should handle bridge transfer with existing state', async () => { - const mockState = {}; - const result = await bridgeTransfer.handler( - mockRuntime, - mockMessage, - mockState, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockRuntime.updateRecentMessageState).toHaveBeenCalledWith(mockState); - }); - - it('should handle invalid content generation', async () => { - vi.mocked(core.generateObject).mockRejectedValueOnce(new Error('Invalid content')); - await expect( - bridgeTransfer.handler(mockRuntime, mockMessage) - ).rejects.toThrow('Invalid content'); - }); - - it('should handle bridge transfer failure', async () => { - const mockError = new Error('Bridge transfer failed'); - vi.mocked(createWalletClient).mockImplementationOnce(() => ({ - sendTransaction: vi.fn().mockRejectedValue(mockError), - })); - - const result = await bridgeTransfer.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Failed to send 1 CFX to 0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752'), - }); - }); - - it('should use correct cross-space call contract address', async () => { - await bridgeTransfer.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(hexAddressToBase32).toHaveBeenCalledWith({ - hexAddress: '0x0888000000000000000000000000000000000006', - networkId: 1, - }); - }); -}); diff --git a/packages/plugin-conflux/__tests__/actions/transfer.test.ts b/packages/plugin-conflux/__tests__/actions/transfer.test.ts deleted file mode 100644 index 4609ea20f37e1..0000000000000 --- a/packages/plugin-conflux/__tests__/actions/transfer.test.ts +++ /dev/null @@ -1,175 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { transfer } from '../../src/actions/transfer'; -import { - ModelClass, - ModelProviderName, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback -} from '@elizaos/core'; -import * as core from '@elizaos/core'; -import { createPublicClient, createWalletClient } from 'cive'; - -// Mock generateObject -vi.mock('@elizaos/core', async () => { - const actual = await vi.importActual('@elizaos/core'); - return { - ...actual, - generateObject: vi.fn().mockResolvedValue({ - object: { - to: 'cfxtest:test-address', - amount: 1 - } - }), - }; -}); - -// Mock cive functions -vi.mock('cive', () => ({ - createPublicClient: vi.fn(() => ({ - getChainId: vi.fn().mockResolvedValue(1), - waitForTransactionReceipt: vi.fn().mockResolvedValue({}), - })), - createWalletClient: vi.fn(() => ({ - sendTransaction: vi.fn().mockResolvedValue('0x123'), - })), - privateKeyToAccount: vi.fn().mockReturnValue({ - address: '0x123', - }), - http: vi.fn(), - parseCFX: vi.fn().mockReturnValue(BigInt(1)), - testnet: {}, -})); - -vi.mock('cive/accounts', () => ({ - privateKeyToAccount: vi.fn().mockReturnValue({ - address: '0x123', - signTransaction: vi.fn(), - }), -})); - -describe('transfer action', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn((key: string) => { - switch (key) { - case 'CONFLUX_CORE_PRIVATE_KEY': - return '0x1234567890abcdef'; - case 'CONFLUX_CORE_SPACE_RPC_URL': - return 'https://test.confluxrpc.com'; - default: - return undefined; - } - }), - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - generateText: vi.fn(), - model: { - [ModelClass.SMALL]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.MEDIUM]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.LARGE]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - } - }, - modelProvider: ModelProviderName.OPENAI, - }; - - const mockMessage: Memory = { - content: { - text: 'Send 1 CFX to cfxtest:test-address', - }, - }; - - const mockCallback: HandlerCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should validate successfully', async () => { - const result = await transfer.validate(mockRuntime, mockMessage); - expect(result).toBe(true); - }); - - it('should handle successful transfer', async () => { - const result = await transfer.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockCallback).toHaveBeenCalledWith({ - text: '1 CFX sent to cfxtest:test-address: 0x123', - content: { - to: 'cfxtest:test-address', - amount: 1 - } - }); - }); - - it('should handle transfer with existing state', async () => { - const mockState = {}; - const result = await transfer.handler( - mockRuntime, - mockMessage, - mockState, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockRuntime.updateRecentMessageState).toHaveBeenCalledWith(mockState); - }); - - it('should handle invalid content generation', async () => { - vi.mocked(core.generateObject).mockRejectedValueOnce(new Error('Invalid content')); - await expect( - transfer.handler(mockRuntime, mockMessage) - ).rejects.toThrow('Invalid content'); - }); - - it('should handle transfer failure', async () => { - const mockError = new Error('Transfer failed'); - vi.mocked(createWalletClient).mockImplementationOnce(() => ({ - sendTransaction: vi.fn().mockRejectedValue(mockError), - })); - - const result = await transfer.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: expect.stringContaining('Failed to send 1 CFX to cfxtest:test-address'), - }); - }); -}); diff --git a/packages/plugin-conflux/__tests__/setup.ts b/packages/plugin-conflux/__tests__/setup.ts deleted file mode 100644 index 7b44297fa4436..0000000000000 --- a/packages/plugin-conflux/__tests__/setup.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { vi } from 'vitest'; -// Mock console methods to avoid cluttering test output -global.console = { - ...console, - log: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - info: vi.fn(), - debug: vi.fn(), -}; diff --git a/packages/plugin-conflux/biome.json b/packages/plugin-conflux/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-conflux/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-conflux/eslint.config.mjs b/packages/plugin-conflux/eslint.config.mjs deleted file mode 100644 index 92fe5bbebefad..0000000000000 --- a/packages/plugin-conflux/eslint.config.mjs +++ /dev/null @@ -1,3 +0,0 @@ -import eslintGlobalConfig from "../../eslint.config.mjs"; - -export default [...eslintGlobalConfig]; diff --git a/packages/plugin-conflux/package.json b/packages/plugin-conflux/package.json deleted file mode 100644 index 763fe97c2ead6..0000000000000 --- a/packages/plugin-conflux/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "@elizaos/plugin-conflux", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "cive": "0.7.1" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "vitest": "1.4.0" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "test": "vitest run" - } -} diff --git a/packages/plugin-conflux/src/abi/crossSpaceCall.ts b/packages/plugin-conflux/src/abi/crossSpaceCall.ts deleted file mode 100644 index f9ad2a67a073d..0000000000000 --- a/packages/plugin-conflux/src/abi/crossSpaceCall.ts +++ /dev/null @@ -1,184 +0,0 @@ -const CrossSpaceCallAbi = [ - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes20", - name: "sender", - type: "bytes20", - }, - { - indexed: true, - internalType: "bytes20", - name: "receiver", - type: "bytes20", - }, - { - indexed: false, - internalType: "uint256", - name: "value", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "nonce", - type: "uint256", - }, - { - indexed: false, - internalType: "bytes", - name: "data", - type: "bytes", - }, - ], - name: "Call", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes20", - name: "sender", - type: "bytes20", - }, - { - indexed: true, - internalType: "bytes20", - name: "contract_address", - type: "bytes20", - }, - { - indexed: false, - internalType: "uint256", - name: "value", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "nonce", - type: "uint256", - }, - { - indexed: false, - internalType: "bytes", - name: "init", - type: "bytes", - }, - ], - name: "Create", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "bool", - name: "success", - type: "bool", - }, - ], - name: "Outcome", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes20", - name: "sender", - type: "bytes20", - }, - { - indexed: true, - internalType: "address", - name: "receiver", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "value", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "nonce", - type: "uint256", - }, - ], - name: "Withdraw", - type: "event", - }, - { - inputs: [{ internalType: "bytes", name: "init", type: "bytes" }], - name: "createEVM", - outputs: [{ internalType: "bytes20", name: "", type: "bytes20" }], - stateMutability: "payable", - type: "function", - }, - { - inputs: [{ internalType: "bytes20", name: "to", type: "bytes20" }], - name: "transferEVM", - outputs: [{ internalType: "bytes", name: "output", type: "bytes" }], - stateMutability: "payable", - type: "function", - }, - { - inputs: [ - { internalType: "bytes20", name: "to", type: "bytes20" }, - { internalType: "bytes", name: "data", type: "bytes" }, - ], - name: "callEVM", - outputs: [{ internalType: "bytes", name: "output", type: "bytes" }], - stateMutability: "payable", - type: "function", - }, - { - inputs: [ - { internalType: "bytes20", name: "to", type: "bytes20" }, - { internalType: "bytes", name: "data", type: "bytes" }, - ], - name: "staticCallEVM", - outputs: [{ internalType: "bytes", name: "output", type: "bytes" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [], - name: "deployEip1820", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "uint256", name: "value", type: "uint256" }], - name: "withdrawFromMapped", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [{ internalType: "address", name: "addr", type: "address" }], - name: "mappedBalance", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, - { - inputs: [{ internalType: "address", name: "addr", type: "address" }], - name: "mappedNonce", - outputs: [{ internalType: "uint256", name: "", type: "uint256" }], - stateMutability: "view", - type: "function", - }, -]; - -export default CrossSpaceCallAbi; diff --git a/packages/plugin-conflux/src/abi/erc20.ts b/packages/plugin-conflux/src/abi/erc20.ts deleted file mode 100644 index 47e5e6b8ea743..0000000000000 --- a/packages/plugin-conflux/src/abi/erc20.ts +++ /dev/null @@ -1,119 +0,0 @@ -const ERC20ABI = [ - { - constant: true, - inputs: [], - name: "name", - outputs: [{ name: "", type: "string" }], - payable: false, - stateMutability: "view", - type: "function", - }, - { - constant: false, - inputs: [ - { name: "_spender", type: "address" }, - { name: "_value", type: "uint256" }, - ], - name: "approve", - outputs: [{ name: "", type: "bool" }], - payable: false, - stateMutability: "view", - type: "function", - }, - { - constant: true, - inputs: [], - name: "totalSupply", - outputs: [{ name: "", type: "uint256" }], - payable: false, - stateMutability: "view", - type: "function", - }, - { - constant: false, - inputs: [ - { name: "_from", type: "address" }, - { name: "_to", type: "address" }, - { name: "_value", type: "uint256" }, - ], - name: "transferFrom", - outputs: [{ name: "", type: "bool" }], - payable: false, - stateMutability: "nonpayable", - type: "function", - }, - { - constant: true, - inputs: [], - name: "decimals", - outputs: [{ name: "", type: "uint8" }], - payable: false, - stateMutability: "view", - type: "function", - }, - { - constant: true, - inputs: [{ name: "_owner", type: "address" }], - name: "balanceOf", - outputs: [{ name: "balance", type: "uint256" }], - payable: false, - stateMutability: "view", - type: "function", - }, - { - constant: true, - inputs: [], - name: "symbol", - outputs: [{ name: "", type: "string" }], - payable: false, - stateMutability: "view", - type: "function", - }, - { - constant: false, - inputs: [ - { name: "_to", type: "address" }, - { name: "_value", type: "uint256" }, - ], - name: "transfer", - outputs: [{ name: "", type: "bool" }], - payable: false, - stateMutability: "nonpayable", - type: "function", - }, - { - constant: true, - inputs: [ - { name: "_owner", type: "address" }, - { name: "_spender", type: "address" }, - ], - name: "allowance", - outputs: [{ name: "", type: "uint256" }], - payable: false, - stateMutability: "view", - type: "function", - }, - { payable: true, stateMutability: "payable", type: "fallback" }, - { - anonymous: false, - inputs: [ - { indexed: true, name: "owner", type: "address" }, - { indexed: true, name: "spender", type: "address" }, - { indexed: false, name: "value", type: "uint256" }, - ], - name: "Approval", - type: "event", - }, - { - anonymous: false, - inputs: [ - { indexed: true, name: "from", type: "address" }, - { indexed: true, name: "to", type: "address" }, - { indexed: false, name: "value", type: "uint256" }, - ], - name: "Transfer", - type: "event", - }, -] as const; - -export default ERC20ABI; diff --git a/packages/plugin-conflux/src/abi/meme.ts b/packages/plugin-conflux/src/abi/meme.ts deleted file mode 100644 index 7ea7672ae1592..0000000000000 --- a/packages/plugin-conflux/src/abi/meme.ts +++ /dev/null @@ -1,1671 +0,0 @@ -const MEMEABI = [ - { - inputs: [ - { - components: [ - { - internalType: "address", - name: "tokenImpl_", - type: "address", - }, - { - internalType: "address", - name: "tokenImplV2_", - type: "address", - }, - { - internalType: "uint256", - name: "feeRate_", - type: "uint256", - }, - { - internalType: "address", - name: "feeReceiver_", - type: "address", - }, - { - internalType: "address", - name: "dexLauncher_", - type: "address", - }, - { - internalType: "enum IConfiPumpTypes.DexThreshType", - name: "defaultDexThreshType_", - type: "uint8", - }, - { - internalType: "enum IConfiPumpTypes.CurveType", - name: "defaultCurveType_", - type: "uint8", - }, - { - internalType: "enum IConfiPumpTypes.TokenVersion", - name: "defaultTokenVersion_", - type: "uint8", - }, - { - internalType: "address", - name: "v2Factory_", - type: "address", - }, - { - internalType: "bytes32", - name: "v2InitCodeHash_", - type: "bytes32", - }, - { - internalType: "address", - name: "weth_", - type: "address", - }, - { - internalType: "uint256", - name: "creation_fee_", - type: "uint256", - }, - { - internalType: "uint256", - name: "lpEth_", - type: "uint256", - }, - { - internalType: "uint256", - name: "lpEthTokenCreator_", - type: "uint256", - }, - ], - internalType: "struct ConfiPumpBase.ConfiPumpInitParams", - name: "params", - type: "tuple", - }, - ], - stateMutability: "nonpayable", - type: "constructor", - }, - { - inputs: [ - { - internalType: "uint256", - name: "actualAmount", - type: "uint256", - }, - { - internalType: "uint256", - name: "amount1", - type: "uint256", - }, - ], - name: "ActualAmountMustLTEAmount", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "amount", - type: "uint256", - }, - ], - name: "AmountTooSmall", - type: "error", - }, - { - inputs: [], - name: "CallReverted", - type: "error", - }, - { - inputs: [], - name: "FeatureDisabled", - type: "error", - }, - { - inputs: [], - name: "GameNotLive", - type: "error", - }, - { - inputs: [], - name: "GameNotPaused", - type: "error", - }, - { - inputs: [], - name: "GameNotPending", - type: "error", - }, - { - inputs: [], - name: "GameNotStarted", - type: "error", - }, - { - inputs: [], - name: "InvalidDEXSupplyThreshold", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "threshold", - type: "uint256", - }, - ], - name: "InvalidDexThreshold", - type: "error", - }, - { - inputs: [ - { - internalType: "enum IConfiPumpTypes.DexThreshType", - name: "threshold", - type: "uint8", - }, - ], - name: "InvalidDexThresholdType", - type: "error", - }, - { - inputs: [], - name: "InvalidGameSupplyThreshold", - type: "error", - }, - { - inputs: [], - name: "InvalidLocks", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "expected", - type: "uint256", - }, - { - internalType: "uint256", - name: "actual", - type: "uint256", - }, - ], - name: "InvalidPiggybackLength", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "id", - type: "uint256", - }, - ], - name: "InvalidRoundID", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "signer", - type: "address", - }, - ], - name: "InvalidSigner", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "InvalidTokenForBattle", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - { - internalType: "enum IConfiPumpTypes.TokenMode", - name: "mode", - type: "uint8", - }, - ], - name: "InvalidTokenModeForGame", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - { - internalType: "enum IConfiPumpTypes.TokenMode", - name: "from", - type: "uint8", - }, - { - internalType: "enum IConfiPumpTypes.TokenMode", - name: "to", - type: "uint8", - }, - ], - name: "InvalidTokenModeTransition", - type: "error", - }, - { - inputs: [], - name: "LastRoundNotResolved", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "expected", - type: "address", - }, - { - internalType: "address", - name: "actual", - type: "address", - }, - ], - name: "MismatchedAddressInProof", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "srcToken", - type: "address", - }, - { - internalType: "address", - name: "dstToken", - type: "address", - }, - ], - name: "NoConversionPath", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "created", - type: "uint256", - }, - { - internalType: "uint256", - name: "max", - type: "uint256", - }, - ], - name: "NoQuotaForCreator", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "collection", - type: "address", - }, - ], - name: "NonPositionNFTReceived", - type: "error", - }, - { - inputs: [], - name: "NotImplemented", - type: "error", - }, - { - inputs: [], - name: "NotRoller", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "sender", - type: "address", - }, - ], - name: "NotUniswapV3Pool", - type: "error", - }, - { - inputs: [], - name: "PermissionlessCreateDisabled", - type: "error", - }, - { - inputs: [ - { - internalType: "uint160", - name: "sqrtPriceA", - type: "uint160", - }, - { - internalType: "uint160", - name: "sqrtPriceB", - type: "uint160", - }, - ], - name: "PriceAMustLTPriceB", - type: "error", - }, - { - inputs: [], - name: "ProtocolDisabled", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "requiredToken", - type: "uint256", - }, - { - internalType: "uint256", - name: "reserveToken", - type: "uint256", - }, - ], - name: "RequiredTokenMustLTE", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "id", - type: "uint256", - }, - ], - name: "RoundNotFound", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "tokenA", - type: "address", - }, - ], - name: "SameToken", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "seq", - type: "uint256", - }, - ], - name: "SeqNotFound", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "actualAmount", - type: "uint256", - }, - { - internalType: "uint256", - name: "minAmount", - type: "uint256", - }, - ], - name: "SlippageTooHigh", - type: "error", - }, - { - inputs: [], - name: "StakingDisabled", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "newSupply", - type: "uint256", - }, - ], - name: "SupplyExceedsTotalSupply", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenAlreadyDEXed", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenAlreadyInGame", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenInDuel", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenKilled", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenNotDEXed", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenNotFound", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenNotKilled", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "TokenNotTradable", - type: "error", - }, - { - inputs: [], - name: "TradeDisabled", - type: "error", - }, - { - inputs: [ - { - internalType: "address", - name: "pool", - type: "address", - }, - { - internalType: "uint256", - name: "liquidity", - type: "uint256", - }, - ], - name: "UniswapV2PoolNotZero", - type: "error", - }, - { - inputs: [], - name: "UniswapV3Slot0Failed", - type: "error", - }, - { - inputs: [ - { - internalType: "uint256", - name: "next", - type: "uint256", - }, - ], - name: "cannotCheckInUntil", - type: "error", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "oldFlags", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "newFlags", - type: "uint256", - }, - ], - name: "BitFlagsChanged", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "user", - type: "address", - }, - ], - name: "CheckedIn", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "newSupply", - type: "uint256", - }, - ], - name: "FlapTokenCirculatingSupplyChanged", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint8", - name: "version", - type: "uint8", - }, - ], - name: "Initialized", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "address", - name: "pool", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "amount", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "eth", - type: "uint256", - }, - ], - name: "LaunchedToDEX", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - indexed: true, - internalType: "bytes32", - name: "previousAdminRole", - type: "bytes32", - }, - { - indexed: true, - internalType: "bytes32", - name: "newAdminRole", - type: "bytes32", - }, - ], - name: "RoleAdminChanged", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - indexed: true, - internalType: "address", - name: "account", - type: "address", - }, - { - indexed: true, - internalType: "address", - name: "sender", - type: "address", - }, - ], - name: "RoleGranted", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: true, - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - indexed: true, - internalType: "address", - name: "account", - type: "address", - }, - { - indexed: true, - internalType: "address", - name: "sender", - type: "address", - }, - ], - name: "RoleRevoked", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "ts", - type: "uint256", - }, - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "address", - name: "buyer", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "amount", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "eth", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "fee", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "postPrice", - type: "uint256", - }, - ], - name: "TokenBought", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "ts", - type: "uint256", - }, - { - indexed: false, - internalType: "address", - name: "creator", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "nonce", - type: "uint256", - }, - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "string", - name: "name", - type: "string", - }, - { - indexed: false, - internalType: "string", - name: "symbol", - type: "string", - }, - { - indexed: false, - internalType: "string", - name: "meta", - type: "string", - }, - ], - name: "TokenCreated", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "address", - name: "curve", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "curveParameter", - type: "uint256", - }, - ], - name: "TokenCurveSet", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "dexSupplyThresh", - type: "uint256", - }, - ], - name: "TokenDexSupplyThreshSet", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "ts", - type: "uint256", - }, - { - indexed: false, - internalType: "address", - name: "srcToken", - type: "address", - }, - { - indexed: false, - internalType: "address", - name: "dstToken", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "srcAmount", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "dstAmount", - type: "uint256", - }, - { - indexed: false, - internalType: "address", - name: "who", - type: "address", - }, - ], - name: "TokenRedeemed", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "uint256", - name: "ts", - type: "uint256", - }, - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "address", - name: "seller", - type: "address", - }, - { - indexed: false, - internalType: "uint256", - name: "amount", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "eth", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "fee", - type: "uint256", - }, - { - indexed: false, - internalType: "uint256", - name: "postPrice", - type: "uint256", - }, - ], - name: "TokenSold", - type: "event", - }, - { - anonymous: false, - inputs: [ - { - indexed: false, - internalType: "address", - name: "token", - type: "address", - }, - { - indexed: false, - internalType: "enum IConfiPumpTypes.TokenVersion", - name: "version", - type: "uint8", - }, - ], - name: "TokenVersionSet", - type: "event", - }, - { - stateMutability: "nonpayable", - type: "fallback", - }, - { - inputs: [], - name: "DEFAULT_ADMIN_ROLE", - outputs: [ - { - internalType: "bytes32", - name: "", - type: "bytes32", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - { - internalType: "address", - name: "recipient", - type: "address", - }, - { - internalType: "uint256", - name: "minAmount", - type: "uint256", - }, - { - internalType: "bool", - name: "isCreator", - type: "bool", - }, - ], - name: "buy", - outputs: [ - { - internalType: "uint256", - name: "", - type: "uint256", - }, - ], - stateMutability: "payable", - type: "function", - }, - { - inputs: [], - name: "checkIn", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - ], - name: "getRoleAdmin", - outputs: [ - { - internalType: "bytes32", - name: "", - type: "bytes32", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "getToken", - outputs: [ - { - components: [ - { - internalType: "enum IConfiPumpTypes.TokenStatus", - name: "status", - type: "uint8", - }, - { - internalType: "uint256", - name: "reserve", - type: "uint256", - }, - { - internalType: "uint256", - name: "circulatingSupply", - type: "uint256", - }, - { - internalType: "uint256", - name: "price", - type: "uint256", - }, - { - internalType: "bool", - name: "inGame", - type: "bool", - }, - { - internalType: "uint256", - name: "seqInGame", - type: "uint256", - }, - ], - internalType: "struct IConfiPumpTypes.TokenState", - name: "", - type: "tuple", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "getTokenEx", - outputs: [ - { - components: [ - { - internalType: "enum IConfiPumpTypes.TokenStatus", - name: "status", - type: "uint8", - }, - { - internalType: "uint256", - name: "reserve", - type: "uint256", - }, - { - internalType: "uint256", - name: "circulatingSupply", - type: "uint256", - }, - { - internalType: "uint256", - name: "price", - type: "uint256", - }, - { - internalType: "bool", - name: "inGame", - type: "bool", - }, - { - internalType: "uint256", - name: "seqInGame", - type: "uint256", - }, - { - internalType: "enum IConfiPumpTypes.TokenMode", - name: "mode", - type: "uint8", - }, - ], - internalType: "struct IConfiPumpTypes.TokenStateEx", - name: "", - type: "tuple", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - name: "getTokenV2", - outputs: [ - { - components: [ - { - internalType: "enum IConfiPumpTypes.TokenStatus", - name: "status", - type: "uint8", - }, - { - internalType: "uint256", - name: "reserve", - type: "uint256", - }, - { - internalType: "uint256", - name: "circulatingSupply", - type: "uint256", - }, - { - internalType: "uint256", - name: "price", - type: "uint256", - }, - { - internalType: "enum IConfiPumpTypes.TokenVersion", - name: "tokenVersion", - type: "uint8", - }, - { - internalType: "uint256", - name: "r", - type: "uint256", - }, - { - internalType: "uint256", - name: "dexSupplyThresh", - type: "uint256", - }, - ], - internalType: "struct IConfiPumpTypes.TokenStateV2", - name: "state", - type: "tuple", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "grantRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "hasRole", - outputs: [ - { - internalType: "bool", - name: "", - type: "bool", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "admin", - type: "address", - }, - ], - name: "initialize", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "lastCheckIn", - outputs: [ - { - internalType: "uint256", - name: "", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "string", - name: "name", - type: "string", - }, - { - internalType: "string", - name: "symbol", - type: "string", - }, - { - internalType: "string", - name: "meta", - type: "string", - }, - ], - name: "newToken", - outputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - stateMutability: "payable", - type: "function", - }, - { - inputs: [ - { - internalType: "string", - name: "name", - type: "string", - }, - { - internalType: "string", - name: "symbol", - type: "string", - }, - { - internalType: "string", - name: "meta", - type: "string", - }, - ], - name: "newTokenNoDuel", - outputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - stateMutability: "payable", - type: "function", - }, - { - inputs: [ - { - internalType: "string", - name: "name", - type: "string", - }, - { - internalType: "string", - name: "symbol", - type: "string", - }, - { - internalType: "string", - name: "meta", - type: "string", - }, - { - internalType: "enum IConfiPumpTypes.DexThreshType", - name: "dexTreshType", - type: "uint8", - }, - ], - name: "newTokenWithDexSupplyThresh", - outputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - ], - stateMutability: "payable", - type: "function", - }, - { - inputs: [], - name: "nonce", - outputs: [ - { - internalType: "uint256", - name: "", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - { - internalType: "uint256", - name: "eth", - type: "uint256", - }, - ], - name: "previewBuy", - outputs: [ - { - internalType: "uint256", - name: "amount", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "srcToken", - type: "address", - }, - { - internalType: "address", - name: "dstToken", - type: "address", - }, - { - internalType: "uint256", - name: "srcAmount", - type: "uint256", - }, - ], - name: "previewRedeem", - outputs: [ - { - internalType: "uint256", - name: "dstAmount", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - { - internalType: "uint256", - name: "amount", - type: "uint256", - }, - ], - name: "previewSell", - outputs: [ - { - internalType: "uint256", - name: "eth", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "srcToken", - type: "address", - }, - { - internalType: "address", - name: "dstToken", - type: "address", - }, - { - internalType: "uint256", - name: "srcAmount", - type: "uint256", - }, - ], - name: "redeem", - outputs: [ - { - internalType: "uint256", - name: "dstAmount", - type: "uint256", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "renounceRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "bytes32", - name: "role", - type: "bytes32", - }, - { - internalType: "address", - name: "account", - type: "address", - }, - ], - name: "revokeRole", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "token", - type: "address", - }, - { - internalType: "uint256", - name: "amount", - type: "uint256", - }, - { - internalType: "uint256", - name: "minEth", - type: "uint256", - }, - ], - name: "sell", - outputs: [ - { - internalType: "uint256", - name: "", - type: "uint256", - }, - ], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "uint256", - name: "flags", - type: "uint256", - }, - ], - name: "setBitFlags", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - { - inputs: [ - { - internalType: "bytes4", - name: "interfaceId", - type: "bytes4", - }, - ], - name: "supportsInterface", - outputs: [ - { - internalType: "bool", - name: "", - type: "bool", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "", - type: "address", - }, - ], - name: "tokenCreators", - outputs: [ - { - internalType: "address", - name: "", - type: "address", - }, - ], - stateMutability: "view", - type: "function", - }, - { - inputs: [ - { - internalType: "address", - name: "", - type: "address", - }, - ], - name: "tokenCreatorsFeeBalance", - outputs: [ - { - internalType: "uint256", - name: "", - type: "uint256", - }, - ], - stateMutability: "view", - type: "function", - }, - { - stateMutability: "payable", - type: "receive", - }, -] as const; - -export default MEMEABI; diff --git a/packages/plugin-conflux/src/actions/bridgeTransfer.ts b/packages/plugin-conflux/src/actions/bridgeTransfer.ts deleted file mode 100644 index dd6f8f1c44301..0000000000000 --- a/packages/plugin-conflux/src/actions/bridgeTransfer.ts +++ /dev/null @@ -1,166 +0,0 @@ -import type { - Action, - IAgentRuntime, - Memory, - State, - HandlerCallback, -} from "@elizaos/core"; -import { - generateObject, - composeContext, - ModelClass, -} from "@elizaos/core"; -import { - createPublicClient, - createWalletClient, - http, - parseCFX, - encodeFunctionData, -} from "cive"; -import { hexAddressToBase32 } from "cive/utils"; -import { privateKeyToAccount } from "cive/accounts"; -import { testnet } from "cive/chains"; -import { confluxBridgeTransferTemplate } from "../templates/bridgeTransfer"; -import { TransferSchema, isTransferContent } from "../types"; -import CrossSpaceCallAbi from "../abi/crossSpaceCall"; - -const bridgeSendCFX = async ( - secretKey: `0x${string}`, - rpcUrl: string, - espaceTo: `0x${string}`, - amount: string -) => { - const client = createPublicClient({ - transport: http(rpcUrl), - }); - const networkId = await client.getChainId(); - const account = privateKeyToAccount(secretKey, { networkId }); - - const walletClient = createWalletClient({ - transport: http(rpcUrl), - chain: testnet, - }); - - const toAddress = hexAddressToBase32({ - hexAddress: "0x0888000000000000000000000000000000000006", - networkId, - }); // crossSpaceCall Address - - const hash = await walletClient.sendTransaction({ - account, - to: toAddress, - value: parseCFX(amount), - chain: testnet, - data: encodeFunctionData({ - abi: CrossSpaceCallAbi, - functionName: "transferEVM", - args: [espaceTo], - }), - }); - - // await client.waitForTransactionReceipt({ - // hash, - // }); - return hash; -}; - -export const bridgeTransfer: Action = { - name: "BRIDGE_SEND_CFX", - description: - "Bridge transfer CFX from Conflux Core Space to another in Conflux eSpace. The address is a 0x-prefix address", - similes: [ - "BRIDGE_SEND_CONFLUX", - "CROSS_SPACE_SEND_CFX", - "BRIDGE_TRANSFER_CFX", - "CROSS_SPACE_TRANSFER_CFX", - ], - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 1 CFX to eSpace Address 0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752", - }, - }, - { - user: "{{user2}}", - content: { - text: "1 CFX sent to espace Address 0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752: 0x1234567890abcdef", - content: { - to: "0x119DA8bbe74B1C5c987D0c64D10eC1dB301d4752", - amount: "1", - }, - }, - }, - ], - ], - // eslint-disable-next-line - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - // no extra validation needed - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const context = composeContext({ - state: currentState, - template: confluxBridgeTransferTemplate, - }); - - const content = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: TransferSchema, - }); - - if (!isTransferContent(content.object)) { - throw new Error("Invalid content"); - } - - const secretKey = runtime.getSetting( - "CONFLUX_CORE_PRIVATE_KEY" - ) as `0x${string}`; - const rpcUrl = runtime.getSetting("CONFLUX_CORE_SPACE_RPC_URL"); - - let success = false; - - try { - const hash = await bridgeSendCFX( - secretKey, - rpcUrl, - content.object.to as `0x${string}`, - content.object.amount.toString() - ); - success = true; - if (!callback) { - return success; - } - callback({ - text: `${content.object.amount} CFX sent to ${content.object.to}: ${hash}`, - content: content.object, - }); - } catch (error) { - console.error(`Error sending CFX: ${error}`); - if (!callback) { - return success; - } - callback({ - text: `Failed to send ${content.object.amount} CFX to ${content.object.to}: ${error}`, - }); - } - return success; - }, -}; diff --git a/packages/plugin-conflux/src/actions/confiPump.ts b/packages/plugin-conflux/src/actions/confiPump.ts deleted file mode 100644 index bfc71e3c56e09..0000000000000 --- a/packages/plugin-conflux/src/actions/confiPump.ts +++ /dev/null @@ -1,361 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, -} from "@elizaos/core"; -import { generateObject, composeContext, ModelClass } from "@elizaos/core"; -import { - createPublicClient, - createWalletClient, - http, - parseEther, - encodeFunctionData, - type WalletClient, - type Account, -} from "viem"; -import { privateKeyToAccount } from "viem/accounts"; -import { confluxESpaceTestnet } from "viem/chains"; -import { parseUnits, getAddress } from "viem/utils"; -import { confiPumpTemplate } from "../templates/confiPump"; - -import { - PumpSchema, - isPumpContent, - isPumpBuyContent, - isPumpCreateContent, - isPumpSellContent, -} from "../types"; -import MEMEABI from "../abi/meme"; -import ERC20ABI from "../abi/erc20"; - -// Helper function to check and approve token allowance if needed -async function ensureAllowance( - walletClient: WalletClient, - rpcUrl: string, - account: Account, - tokenAddress: `0x${string}`, - memeAddress: `0x${string}`, - amount: bigint -) { - elizaLogger.log( - `Checking allowance: token: ${tokenAddress} meme: ${memeAddress} amount: ${amount}` - ); - - const publicClient = createPublicClient({ - transport: http(rpcUrl), - chain: confluxESpaceTestnet, - }); - - const allowance = await publicClient.readContract({ - address: tokenAddress, - abi: ERC20ABI, - functionName: "allowance", - args: [account.address, memeAddress], - }); - - elizaLogger.log("allowance:", allowance); - - if (allowance < amount) { - elizaLogger.log( - `allowance(${allowance}) is less than amount(${amount}), approving...` - ); - - const hash = await walletClient.sendTransaction({ - account, - to: tokenAddress, - data: encodeFunctionData({ - abi: ERC20ABI, - functionName: "approve", - args: [memeAddress, amount - allowance], - }), - chain: confluxESpaceTestnet, - kzg: null, - }); - - elizaLogger.log(`Approving hash: ${hash}`); - await publicClient.waitForTransactionReceipt({ hash }); - elizaLogger.log(`Approving success: ${hash}`); - } else { - elizaLogger.log("No need to approve"); - } -} - -// Main ConfiPump action definition -export const confiPump: Action = { - name: "CONFI_PUMP", - description: - "Perform actions on ConfiPump, for example create a new token, buy a token, or sell a token.", - similes: ["SELL_TOKEN", "BUY_TOKEN", "CREATE_TOKEN"], - examples: [ - // Create token example - [ - { - user: "{{user1}}", - content: { - text: "Create a new token called GLITCHIZA with symbol GLITCHIZA and generate a description about it.", - }, - }, - { - user: "{{user2}}", - content: { - text: "Token GLITCHIZA (GLITCHIZA) created successfully!\nContract Address: 0x1234567890abcdef\n", - action: "CREATE_TOKEN", - content: { - tokenInfo: { - symbol: "GLITCHIZA", - address: - "EugPwuZ8oUMWsYHeBGERWvELfLGFmA1taDtmY8uMeX6r", - creator: - "9jW8FPr6BSSsemWPV22UUCzSqkVdTp6HTyPqeqyuBbCa", - name: "GLITCHIZA", - description: "A GLITCHIZA token", - }, - amount: "1", - }, - }, - }, - ], - // Buy token example - [ - { - user: "{{user1}}", - content: { - text: "Buy 0.00069 CFX worth of GLITCHIZA(0x1234567890abcdef)", - }, - }, - { - user: "{{user2}}", - content: { - text: "0.00069 CFX bought successfully!", - action: "BUY_TOKEN", - content: { - address: "0x1234567890abcdef", - amount: "0.00069", - }, - }, - }, - ], - // Sell token example - [ - { - user: "{{user1}}", - content: { - text: "Sell 0.00069 CFX worth of GLITCHIZA(0x1234567890abcdef)", - }, - }, - { - user: "{{user2}}", - content: { - text: "0.00069 CFX sold successfully: 0x1234567890abcdef", - action: "SELL_TOKEN", - content: { - address: "0x1234567890abcdef", - amount: "0.00069", - }, - }, - }, - ], - ], - // eslint-disable-next-line - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - return true; // No extra validation needed - }, - - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - let success = false; - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Generate content based on template - const context = composeContext({ - state: currentState, - template: confiPumpTemplate, - }); - - const content = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: PumpSchema, - }); - - if (!isPumpContent(content.object)) { - throw new Error("Invalid content"); - } - - // Setup clients and account - const rpcUrl = runtime.getSetting("CONFLUX_ESPACE_RPC_URL"); - const account = privateKeyToAccount( - runtime.getSetting("CONFLUX_ESPACE_PRIVATE_KEY") as `0x${string}` - ); - const walletClient = createWalletClient({ - transport: http(rpcUrl), - }); - - const contentObject = content.object; - let data: `0x${string}`; - let value: bigint; - - try { - // Handle different action types - switch (contentObject.action) { - case "CREATE_TOKEN": - if (!isPumpCreateContent(contentObject)) { - elizaLogger.error( - "Invalid PumpCreateContent: ", - contentObject - ); - throw new Error("Invalid PumpCreateContent"); - } - elizaLogger.log( - "creating: ", - contentObject.params.name, - contentObject.params.symbol, - contentObject.params.description - ); - data = encodeFunctionData({ - abi: MEMEABI, - functionName: "newToken", - args: [ - contentObject.params.name, - contentObject.params.symbol, - contentObject.params.description, - ], - }); - value = parseEther("10"); - break; - - case "BUY_TOKEN": - if (!isPumpBuyContent(contentObject)) { - elizaLogger.error( - "Invalid PumpBuyContent: ", - contentObject - ); - throw new Error("Invalid PumpBuyContent"); - } - value = parseUnits( - contentObject.params.value.toString(), - 18 - ); - elizaLogger.log( - "buying: ", - contentObject.params.tokenAddress, - value - ); - data = encodeFunctionData({ - abi: MEMEABI, - functionName: "buy", - args: [ - contentObject.params.tokenAddress as `0x${string}`, - account.address, - 0n, - false, - ], - }); - break; - - case "SELL_TOKEN": { - if (!isPumpSellContent(contentObject)) { - elizaLogger.error( - "Invalid PumpSellContent: ", - contentObject - ); - throw new Error("Invalid PumpSellContent"); - } - const tokenAddress = getAddress( - contentObject.params.tokenAddress as `0x${string}` - ); - elizaLogger.log( - "selling: ", - tokenAddress, - account.address, - contentObject.params.value - ); - const amountUnits = parseUnits( - contentObject.params.value.toString(), - 18 - ); - - await ensureAllowance( - walletClient, - rpcUrl, - account, - tokenAddress as `0x${string}`, - runtime.getSetting( - "CONFLUX_MEME_CONTRACT_ADDRESS" - ) as `0x${string}`, - amountUnits - ); - - data = encodeFunctionData({ - abi: MEMEABI, - functionName: "sell", - args: [tokenAddress, amountUnits, 0n], - }); - value = 0n; - break; - } - } - - // Simulate and execute transaction - const publicClient = createPublicClient({ - transport: http(rpcUrl), - chain: confluxESpaceTestnet, - }); - - const memeContractAddress = runtime.getSetting( - "CONFLUX_MEME_CONTRACT_ADDRESS" - ) as `0x${string}`; - - const simulate = await publicClient.call({ - to: memeContractAddress, - data, - value, - account, - }); - elizaLogger.log("simulate: ", simulate); - - const hash = await walletClient.sendTransaction({ - account, - to: memeContractAddress, - data, - chain: confluxESpaceTestnet, - kzg: null, - value, - }); - - success = true; - - if (callback) { - callback({ - text: `Perform the action successfully: ${content.object.action}: ${hash}`, - content: content.object, - }); - } - } catch (error) { - elizaLogger.error(`Error performing the action: ${error}`); - if (callback) { - callback({ - text: `Failed to perform the action: ${content.object.action}: ${error}`, - }); - } - } - - return success; - }, -}; diff --git a/packages/plugin-conflux/src/actions/transfer.ts b/packages/plugin-conflux/src/actions/transfer.ts deleted file mode 100644 index dbc9f5ad153c5..0000000000000 --- a/packages/plugin-conflux/src/actions/transfer.ts +++ /dev/null @@ -1,142 +0,0 @@ -import type { - Action, - IAgentRuntime, - Memory, - State, - HandlerCallback, -} from "@elizaos/core"; -import { - generateObject, - composeContext, - ModelClass, -} from "@elizaos/core"; -import { createPublicClient, createWalletClient, http, parseCFX } from "cive"; -import { privateKeyToAccount } from "cive/accounts"; -import { testnet } from "cive/chains"; -import { confluxTransferTemplate } from "../templates/transfer"; -import { TransferSchema, isTransferContent } from "../types"; - -const sendCFX = async ( - secretKey: `0x${string}`, - rpcUrl: string, - to: string, - amount: string -) => { - const client = createPublicClient({ - transport: http(rpcUrl), - }); - const networkId = await client.getChainId(); - const account = privateKeyToAccount(secretKey, { networkId }); - - const walletClient = createWalletClient({ - transport: http(rpcUrl), - chain: testnet, - }); - - const hash = await walletClient.sendTransaction({ - account, - to, - value: parseCFX(amount), - chain: testnet, - }); - - // await client.waitForTransactionReceipt({ - // hash, - // }); - return hash; -}; - -export const transfer: Action = { - name: "SEND_CFX", - description: - "Transfer CFX to another address in Conflux Core Space. The address starts with `cfx:` or `cfxtest:`", - similes: ["SEND_CONFLUX", "SEND_CFX_CORE_SPACE", "TRANSFER_CFX"], - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 1 CFX to cfx:aaejuaaaaaaaaaaaaaaaaaaaaaaaaaaaa2eaeg85p5", - }, - }, - { - user: "{{user2}}", - content: { - text: "1 CFX sent to cfx:aaejuaaaaaaaaaaaaaaaaaaaaaaaaaaaa2eaeg85p5: 0x1234567890abcdef", - content: { - to: "cfx:aaejuaaaaaaaaaaaaaaaaaaaaaaaaaaaa2eaeg85p5", - amount: "1", - }, - }, - }, - ], - ], - // eslint-disable-next-line - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - // no extra validation needed - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const context = composeContext({ - state: currentState, - template: confluxTransferTemplate, - }); - - const content = await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: TransferSchema, - }); - - if (!isTransferContent(content.object)) { - throw new Error("Invalid content"); - } - - const secretKey = runtime.getSetting( - "CONFLUX_CORE_PRIVATE_KEY" - ) as `0x${string}`; - const rpcUrl = runtime.getSetting("CONFLUX_CORE_SPACE_RPC_URL"); - - let success = false; - - try { - const hash = await sendCFX( - secretKey, - rpcUrl, - content.object.to, - content.object.amount.toString() - ); - success = true; - if (!callback) { - return success; - } - callback({ - text: `${content.object.amount} CFX sent to ${content.object.to}: ${hash}`, - content: content.object, - }); - } catch (error) { - console.error(`Error sending CFX: ${error}`); - if (!callback) { - return success; - } - callback({ - text: `Failed to send ${content.object.amount} CFX to ${content.object.to}: ${error}`, - }); - } - return success; - }, -}; diff --git a/packages/plugin-conflux/src/index.ts b/packages/plugin-conflux/src/index.ts deleted file mode 100644 index fc7ef20d1c10d..0000000000000 --- a/packages/plugin-conflux/src/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { transfer } from "./actions/transfer"; -import { bridgeTransfer } from "./actions/bridgeTransfer"; -import { confiPump } from "./actions/confiPump"; - -export const confluxPlugin: Plugin = { - name: "conflux", - description: "Conflux Plugin for Eliza", - actions: [transfer, bridgeTransfer, confiPump], - providers: [], -}; diff --git a/packages/plugin-conflux/src/templates/bridgeTransfer.ts b/packages/plugin-conflux/src/templates/bridgeTransfer.ts deleted file mode 100644 index ca5fdea32ba0c..0000000000000 --- a/packages/plugin-conflux/src/templates/bridgeTransfer.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const confluxBridgeTransferTemplate = ` -Extract Conflux Cross Space Transfer Parameters from the latest message: - -{{recentMessages}} - -The to address should be the Conflux eSpace address, starting with "0x". -`; diff --git a/packages/plugin-conflux/src/templates/confiPump.ts b/packages/plugin-conflux/src/templates/confiPump.ts deleted file mode 100644 index b3047fc802748..0000000000000 --- a/packages/plugin-conflux/src/templates/confiPump.ts +++ /dev/null @@ -1,9 +0,0 @@ -export const confiPumpTemplate = ` -Extract Conflux ConfiPump Parameters, including token creation, buy, and sell, from the latest messages: - -{{recentMessages}} - -For token creation, should come up with a name, symbol, and description. -For token buy, should come up with the amount of CFX to buy which token (with token address starting with 0x). -For token sell, should come up with the amount of token to sell (with token address starting with 0x). -`; diff --git a/packages/plugin-conflux/src/templates/transfer.ts b/packages/plugin-conflux/src/templates/transfer.ts deleted file mode 100644 index 57fef7ad0d4a5..0000000000000 --- a/packages/plugin-conflux/src/templates/transfer.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const confluxTransferTemplate = ` -Extract Conflux Core Space Transfer Parameters from the latest message: - -{{recentMessages}} - -The to address should be the Conflux Core Space address, starting with "cfx:" or "cfxtest:". -`; diff --git a/packages/plugin-conflux/src/types.ts b/packages/plugin-conflux/src/types.ts deleted file mode 100644 index ac6615bdedc2c..0000000000000 --- a/packages/plugin-conflux/src/types.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { z } from "zod"; - -export const TransferSchema = z.object({ - to: z.string(), - amount: z.number(), // use number ignoring decimals issue -}); - -export interface TransferContent { - to: string; - amount: number; -} - -export const isTransferContent = (object: any): object is TransferContent => { - if (TransferSchema.safeParse(object).success) { - return true; - } - console.error("Invalid content: ", object); - return false; -}; - -export const PumpCreateSchema = z.object({ - action: z.literal("CREATE_TOKEN"), - params: z.object({ - symbol: z.string(), - name: z.string(), - description: z.string(), - }), -}); - -export const PumpBuySchema = z.object({ - action: z.literal("BUY_TOKEN"), - params: z.object({ - tokenAddress: z.string(), - value: z.number(), - }), -}); - -export const PumpSellSchema = z.object({ - action: z.literal("SELL_TOKEN"), - params: z.object({ - tokenAddress: z.string(), - value: z.number(), - }), -}); - -export const PumpSchema = z.union([ - PumpCreateSchema, - PumpBuySchema, - PumpSellSchema, -]); - -export type PumpContent = z.infer; -export type PumpCreateContent = z.infer; -export type PumpBuyContent = z.infer; -export type PumpSellContent = z.infer; - -export function isPumpContent(object: any): object is PumpContent { - if (PumpSchema.safeParse(object).success) { - return true; - } - console.error("Invalid content: ", object); - return false; -} - -export function isPumpCreateContent(object: any): object is PumpCreateContent { - return PumpCreateSchema.safeParse(object).success; -} - -export function isPumpBuyContent(object: any): object is PumpBuyContent { - return PumpBuySchema.safeParse(object).success; -} - -export function isPumpSellContent(object: any): object is PumpSellContent { - return PumpSellSchema.safeParse(object).success; -} diff --git a/packages/plugin-conflux/tsconfig.json b/packages/plugin-conflux/tsconfig.json deleted file mode 100644 index 2f2bfd6e53d3a..0000000000000 --- a/packages/plugin-conflux/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src" - }, - "include": ["src"] -} diff --git a/packages/plugin-conflux/tsup.config.ts b/packages/plugin-conflux/tsup.config.ts deleted file mode 100644 index f63d4d37fcfde..0000000000000 --- a/packages/plugin-conflux/tsup.config.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "cive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-conflux/vitest.config.ts b/packages/plugin-conflux/vitest.config.ts deleted file mode 100644 index 98cca09b2ca22..0000000000000 --- a/packages/plugin-conflux/vitest.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - setupFiles: ['./__tests__/setup.ts'], - include: ['**/__tests__/**/*.test.ts'], - }, -}); diff --git a/packages/plugin-cosmos/README.md b/packages/plugin-cosmos/README.md deleted file mode 100644 index 79b00efd50528..0000000000000 --- a/packages/plugin-cosmos/README.md +++ /dev/null @@ -1,223 +0,0 @@ -# `@elizaos/plugin-cosmos` - -This plugin provides actions and utilities for interacting with Cosmos-compatible blockchains. - ---- - -## Development - -Prepare Eliza according to [README](../../README.md) -Add variables required for `@elizaos/plugin-cosmos` : - -``` -COSMOS_RECOVERY_PHRASE=your recovery phrase words -COSMOS_AVAILABLE_CHAINS=chain1,chain2,chain3 -``` - -Ensure the appropriate environment variables are added for the plugin. If they are correctly configured, the project will run with `@elizaos/plugin-cosmos` - -Run Eliza - -``` -pnpm run dev -``` - -## Configuration - -### Default Setup - -To start using the plugin, you need to provide your **Cosmos account recovery phrases** and the list of **available chains**. Add the following to your `.env` file: - -```env -COSMOS_RECOVERY_PHRASE=your recovery phrase words -COSMOS_AVAILABLE_CHAINS=chain1,chain2,chain3 -``` - -Ensure that the chain names in `COSMOS_AVAILABLE_CHAINS` match the identifiers from the [chain-registry](https://github.com/cosmos/chain-registry) library for compatibility. - -### Using the Cosmos Helper Character - -The plugin includes a pre-configured character, `cosmosHelper.character.json`, optimized for Cosmos-related operations. This character enhances interaction by: - -- Handling repeated prompts effectively. - -- Requesting confirmation before executing detected actions. - -To use the character, pass it with the `--characters` flag: - -```bash ---characters='../characters/cosmosHelper.character.json' -``` - ---- - -### Custom chain configuration - -Plugin allows you to pass you custom chain config to `createCosmosPlugin` function invoked in `../agent/src/index`. - -Your custom configuration fulfills the interfaces from `chain-registry` - -``` -import type { assets, chains } from "chain-registry"; - -export interface ICosmosPluginCustomChainData { - chainData: (typeof chains)[number]; - assets: (typeof assets)[number]; -} - -export interface ICosmosPluginOptions { - customChainData?: ICosmosPluginCustomChainData[]; -} -``` - -## Actions - -### Token Transfer - -This plugin supports a token transfer action, which allows users to transfer tokens between addresses on Cosmos-compatible blockchains. The action prompts for confirmation to ensure secure execution. - -#### Example Prompts - -Below are examples of how the transfer action can be initiated and confirmed: - -**Example** - -1. User input: - -``` -Make transfer 0.0001 ATOM to cosmos1nk3uuw6zt5t5aqw5fvujkd54sa4uws9xv465ad on cosmoshubtestnet. -``` - -2. Plugin response: - -``` -Do you confirm the transfer action? -``` - -3. User confirmation: - -``` -Yes -``` - -4. Action executed. - -### Token IBC Transfer - -This plugin supports a token transfer action, which allows users to transfer tokens between addresses on Cosmos-compatible blockchains between different chains. - -#### Example Prompts - -Below are examples of how the ibc transfer action can be initiated and confirmed: - -**Example** - -1. User input: - -``` -Make an IBC transfer 0.0001 OSMO to neutron1nk3uuw6zt5t5aqw5fvujkd54sa4uws9xg2nk82 from osmosistestnet to neutrontestnet -``` - -2. Plugin response: - -``` -Before making the IBC transfer, I would like to confirm the details. You would like to transfer 0.0001 OSMO from osmosistestnet to neutrontestnet, specifically to the address neutron1nk3uuw6zt5t5aqw5fvujkd54sa4uws9xg2nk82, is that correct? -``` - -3. User confirmation: - -``` -Yes -``` - -4. Action executed. - ---- - -### Token IBC Swap -This action allows swapping tokens between chains. The implementation of swapping is based on the Skip API and uses the @skip-go/client library. -To place transactions on chains, they must first be added to the env file. Specifically: -```env -COSMOS_AVAILABLE_CHAINS=osmosis,neutron,axelar,cosmoshub,terra2,pryzm -``` -Keep in mind that most swaps require intermediate chains. These chains must also be included in the env file. - -You can check which chains are supported by the Skip API and this plugin here: [Skip API Documentation](https://docs.skip.build/). - -The list below contains all supported chains extracted from the Skip API: -```env -COSMOS_AVAILABLE_CHAINS=terra2,quicksilver,coreum,regen,mars,passage,dhealth,lumnetwork,provenance,chihuahua,pryzm,fetchhub,comdex,kyve,xpla,umee,celestia,osmosis,empowerchain,migaloo,dymension,kujira,self,humans,gitopia,agoric,doravota,int3face,quasar,gravitybridge,kava,sifchain,seda,shentu,decentr,cronos,carbon,stride,haqq,jackal,omniflixhub,noble,sentinel,nolus,odin,assetmantle,dydx,beezee,axelar,nois,mantrachain,elys,crescent,synternet,xion,source,akash,sei,canto,teritori,stargaze,titan,aura,evmos,archway,gateway,cheqd,milkyway,nibiru,cosmoshub,ununifi,nyx,bitsong,cryptoorgchain,neutron,penumbra,terra,shido,sommelier,saga,secretnetwork,chain4energy,juno,andromeda,planq,lava,oraichain,injective,persistence,composable -``` - -#### Example prompts: - -##### Default flow -An example of the default flow of a swap request: - -1. User input: -``` - Swap 10 OSMO on osmosis to ATOM on cosmoshub -``` -2. Agent asks for confirmation: -``` -About to swap 10 OSMO on osmosis to ATAOM on cosmoshub. I would like to confirm the transaction details. Please confirm that you would like to proceed with the swap. -``` -3. User confiramation: -``` -confirming / yes -``` -4. Agent calls action and responds after completing: -``` -Successfuly swapped 10 OSMO tokens to ATOM on chain cosmoshub. -Transaction Hash: E84F36D6EEFAA6D7B70827F34EDAB83258BB86EFE33AEA1F4559D00B30CD3B50 -``` - -##### Special case handling -An example of a flow when there is more than one token with the same symbol on the destination chain (e.g., multiple USDC tokens): -1. User input: -``` -Swap 10 OSMO on osmosis to ATOM on cosmoshub -``` -2. Agent asks for confirmation: -``` -About to swap 10 OSMO on osmosis to ATAOM on cosmoshub. I would like to confirm the transaction details. Please confirm that you would like to proceed with the swap. -``` -3. User confirmation: -``` -confirming / yes -``` -4. Model response: -``` -I will now initiate the swap of 10 OSMO on the Osmosis chain to USDC on the Axelar chain. Please wait for the transaction to complete. If you need to swap more assets, feel free to let me know. - -Error occured. Swap was not performed. Please provide denom for coin: USDC, on Chain Name: axelar. It is necessary as the symbol USDC is not unique among coins on chain axelar. -Select one from found assets: -Symbol: USDC Desc: Circle's stablecoin on Axelar Denom: uusdc, -Symbol: USDC Desc: Circle's stablecoin from Polygon on Axelar Denom: polygon-uusdc, -Symbol: USDC Desc: Circle's stablecoin from Avalanche on Axelar Denom: avalanche-uusdc -``` -5. User response: -``` -Swap 10 OSMO on osmosis to USDC with denom uusdc on axelar -``` -6. Action call and agent response: -``` -Successfuly swapped 10 OSMO tokens to USDC uusdc on chain axelar. -Transaction Hash: E84F36D6EEFAA6D7B70827F34EDAB83258BB86EFE33AEA1F4559D00B30CD3B50 -``` - -## Contribution - -The plugin includes comprehensive tests. Before submitting any pull requests, ensure all tests pass. - -### Running Tests - -Navigate to the `plugin-cosmos` directory and execute: - -```bash -pnpm test -``` - ---- - -This plugin simplifies Cosmos blockchain interactions and enhances productivity with its tailored features. Happy building! diff --git a/packages/plugin-cosmos/biome.json b/packages/plugin-cosmos/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-cosmos/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-cosmos/package.json b/packages/plugin-cosmos/package.json deleted file mode 100644 index a62520f8db5c1..0000000000000 --- a/packages/plugin-cosmos/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "@elizaos/plugin-cosmos", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "@chain-registry/utils": "^1.51.41", - "@cosmjs/cosmwasm-stargate": "^0.32.4", - "@cosmjs/proto-signing": "^0.32.4", - "@cosmjs/stargate": "^0.32.4", - "@skip-go/client": "^0.16.3", - "axios": "^1.7.9", - "bignumber.js": "9.1.2", - "chain-registry": "^1.69.68", - "interchain": "^1.10.4", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@chain-registry/types": "^0.50.44" - } -} diff --git a/packages/plugin-cosmos/src/actions/ibc-swap/index.ts b/packages/plugin-cosmos/src/actions/ibc-swap/index.ts deleted file mode 100644 index 033ec79429595..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-swap/index.ts +++ /dev/null @@ -1,196 +0,0 @@ -import { - composeContext, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; - -import { initWalletChainsData } from "../../providers/wallet/utils"; -import { cosmosIBCSwapTemplate } from "../../templates"; -import type { - ICosmosPluginOptions, - ICosmosWalletChains, -} from "../../shared/interfaces"; -import type { IBCSwapActionParams } from "./types.ts"; -import { IBCSwapAction } from "./services/ibc-swap-action-service.ts"; -import { prepareAmbiguityErrorMessage } from "./services/ibc-swap-utils.ts"; - -export const createIBCSwapAction = (pluginOptions: ICosmosPluginOptions) => ({ - name: "COSMOS_IBC_SWAP", - description: "Swaps tokens on cosmos chains", - handler: async ( - _runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: { [key: string]: unknown }, - _callback?: HandlerCallback - ) => { - const cosmosIBCSwapContext = composeContext({ - state: state, - template: cosmosIBCSwapTemplate, - templatingEngine: "handlebars", - }); - - const cosmosIBCSwapContent = await generateObjectDeprecated({ - runtime: _runtime, - context: cosmosIBCSwapContext, - modelClass: ModelClass.SMALL, - }); - - const paramOptions: IBCSwapActionParams = { - fromChainName: cosmosIBCSwapContent.fromChainName, - fromTokenSymbol: cosmosIBCSwapContent.fromTokenSymbol, - fromTokenAmount: cosmosIBCSwapContent.fromTokenAmount, - toTokenSymbol: cosmosIBCSwapContent.toTokenSymbol, - toChainName: cosmosIBCSwapContent.toChainName, - toTokenDenom: cosmosIBCSwapContent?.toTokenDenom || undefined, - fromTokenDenom: cosmosIBCSwapContent?.fromTokenDenom || undefined, - }; - - console.log( - "Parameters extracted from user prompt: ", - JSON.stringify(paramOptions, null, 2) - ); - - try { - const walletProvider: ICosmosWalletChains = - await initWalletChainsData(_runtime); - - const action = new IBCSwapAction(walletProvider); - - const customAssets = (pluginOptions?.customChainData ?? []).map( - (chainData) => chainData.assets - ); - - if (_callback) { - - const swapResp = await action.execute( - paramOptions, - customAssets, - _callback - ); - - const text = - swapResp.status === "STATE_COMPLETED_SUCCESS" - ? `Successfully swapped ${swapResp.fromTokenAmount} ${swapResp.fromTokenSymbol} tokens to ${swapResp.toTokenSymbol} on chain ${swapResp.toChainName}.\nTransaction Hash: ${swapResp.txHash}` - : `Error occured swapping ${swapResp.fromTokenAmount} ${swapResp.fromTokenSymbol} tokens to ${swapResp.toTokenSymbol} on chain ${swapResp.toChainName}.\nTransaction Hash: ${swapResp.txHash}, try again`; - await _callback({ - text: text, - content: { - success: - swapResp.status === "STATE_COMPLETED_SUCCESS", - hash: swapResp.txHash, - fromTokenAmount: paramOptions.fromTokenAmount, - fromToken: paramOptions.fromTokenSymbol, - toToken: paramOptions.toTokenSymbol, - fromChain: paramOptions.fromChainName, - toChain: paramOptions.toChainName, - }, - }); - } - return true; - } catch (error) { - console.error("Error during ibc token swap:", error); - - const regex = - /Ambiguity Error.*value:([^\s.]+)\s+chainName:([^\s.]+)/; - const match = error.message.match(regex); - - if (match) { - const value = match[1]; - const chainName = match[2]; - - if (_callback) { - await _callback({ - text: prepareAmbiguityErrorMessage(value, chainName), - content: { error: error.message }, - }); - } - } else { - console.error("Unhandled error:", error); - - if (_callback) { - await _callback({ - text: `Error ibc swapping tokens: ${error.message}`, - content: { error: error.message }, - }); - } - } - return false; - } - }, - template: cosmosIBCSwapTemplate, - validate: async (runtime: IAgentRuntime) => { - const mnemonic = runtime.getSetting("COSMOS_RECOVERY_PHRASE"); - const availableChains = runtime.getSetting("COSMOS_AVAILABLE_CHAINS"); - const availableChainsArray = availableChains?.split(","); - - return !(mnemonic && availableChains && availableChainsArray.length); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Swap {{0.0001 ATOM}} from {{cosmoshub}} to {{OM}} on {{mantrachain1}}", - action: "COSMOS_IBC_SWAP", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the swap?", - action: "COSMOS_IBC_SWAP", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_IBC_SWAP", - }, - }, - { - user: "{{user2}}", - content: { - text: "Starting swap transaction. Keep in mind that it might take couple of minutes", - action: "COSMOS_IBC_SWAP", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Swap {{0.0001 OM}} from {{mantrachain}} to {{OSMO}} on {{osmosis}}", - action: "COSMOS_IBC_SWAP", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the swap?", - action: "COSMOS_IBC_SWAP", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_IBC_SWAP", - }, - }, - { - user: "{{user2}}", - content: { - text: "Starting swap transaction. Keep in mind that it might take couple of minutes", - action: "COSMOS_IBC_SWAP", - }, - }, - ], - ], - similes: ["COSMOS_SWAP", "COSMOS_SWAP_IBC"], -}); diff --git a/packages/plugin-cosmos/src/actions/ibc-swap/schema.ts b/packages/plugin-cosmos/src/actions/ibc-swap/schema.ts deleted file mode 100644 index 74f0c2c2d6fcc..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-swap/schema.ts +++ /dev/null @@ -1,11 +0,0 @@ -import {z} from "zod"; - -export const IBCSwapParamsSchema = z.object({ - fromChainName: z.string().min(1), - fromTokenSymbol: z.string().regex(/^[A-Z0-9]+$/), - fromTokenAmount: z.string().regex(/^\d+$/), - toTokenSymbol: z.string().regex(/^[A-Z0-9]+$/), - toChainName: z.string().min(1), - toTokenDenom: z.string().regex(/^ibc\/[A-F0-9]{64}$/), - fromTokenDenom: z.string().regex(/^ibc\/[A-F0-9]{64}$/), -}); diff --git a/packages/plugin-cosmos/src/actions/ibc-swap/services/ibc-swap-action-service.ts b/packages/plugin-cosmos/src/actions/ibc-swap/services/ibc-swap-action-service.ts deleted file mode 100644 index 37c09ec1dd270..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-swap/services/ibc-swap-action-service.ts +++ /dev/null @@ -1,136 +0,0 @@ -import {assets, chains} from "chain-registry"; -import type{ - ICosmosActionService, - ICosmosPluginCustomChainData, - ICosmosSwap, - ICosmosWalletChains, -} from "../../../shared/interfaces.ts"; -import type{IBCSwapActionParams} from "../types.ts"; -import { - convertDisplayUnitToBaseUnit, - getChainByChainName, - getChainNameByChainId, - getDenomBySymbol, - getExponentByDenom, -} from "@chain-registry/utils"; -import {getAvailableAssets} from "../../../shared/helpers/cosmos-assets.ts"; -import type {HandlerCallback} from "@elizaos/core"; - -export class IBCSwapAction implements ICosmosActionService { - constructor(private cosmosWalletChains: ICosmosWalletChains) { - this.cosmosWalletChains = cosmosWalletChains; - } - - async execute( - params: IBCSwapActionParams, - customChainAssets?: ICosmosPluginCustomChainData["assets"][], - _callback?: HandlerCallback - ): Promise { - const fromChain = getChainByChainName(chains, params.fromChainName); - if (!fromChain) { - throw new Error(`Cannot find source chain: ${params.fromChainName}`); - } - - const toChain = getChainByChainName(chains, params.toChainName); - if (!toChain) { - throw new Error(`Cannot find destination chain: ${params.toChainName}`); - } - - const availableAssets = getAvailableAssets(assets, customChainAssets); - - const denomFrom = - params.fromTokenDenom || - getDenomBySymbol( - availableAssets, - params.fromTokenSymbol, - params.fromChainName - ); - if (!denomFrom) { - throw new Error(`Cannot find source token denom for symbol: ${params.fromTokenSymbol}`); - } - - const exponentFrom = getExponentByDenom( - availableAssets, - denomFrom, - params.fromChainName - ); - - const denomTo = - params.toTokenDenom || - getDenomBySymbol( - availableAssets, - params.toTokenSymbol, - params.toChainName - ); - if (!denomTo) { - throw new Error(`Cannot find destination token denom for symbol: ${params.toTokenSymbol}`); - } - - console.log( - `Swap data: Swapping token ${denomFrom} with exponent ${exponentFrom} to token ${denomTo}` - ); - - const skipClient = this.cosmosWalletChains.getSkipClient( - params.fromChainName - ); - - const route = await skipClient.route({ - smartSwapOptions: {}, - amountOut: convertDisplayUnitToBaseUnit( - availableAssets, - params.fromTokenSymbol, - params.fromTokenAmount, - params.fromChainName - ), - sourceAssetDenom: denomFrom, - sourceAssetChainID: fromChain.chain_id, - destAssetDenom: denomTo, - destAssetChainID: toChain.chain_id, - }); - - // Required chains must be added to env file. Note that swaps can use intermediate chains to complete the swap request - // These chains should also be included - const userAddresses = await Promise.all( - route.requiredChainAddresses.map(async (chainID) => { - const chainName = getChainNameByChainId(chains, chainID); - return { - chainID, - address: - await this.cosmosWalletChains.getWalletAddress( - chainName - ), - }; - }) - ); - - if (_callback) { - await _callback({ - text: `Expected swap result: ${route.estimatedAmountOut} ${params.toTokenSymbol}, \nEstimated Fee: ${route.estimatedFees}. \nEstimated time: ${route.estimatedRouteDurationSeconds}`, - }); - } - - let result: ICosmosSwap; - - await skipClient.executeRoute({ - route, - userAddresses, - onTransactionCompleted: async (_chainID, txHash, status) => { - console.log( - `Route completed with tx hash: ${txHash} & status: ${status.state}` - ); - - result = { - status: status.state, - fromChainName: params.fromChainName, - fromTokenAmount: params.fromTokenAmount, - fromTokenSymbol: params.fromTokenSymbol, - toChainName: params.toChainName, - toTokenSymbol: params.toTokenSymbol, - txHash, - }; - }, - }); - - return result; - } -} diff --git a/packages/plugin-cosmos/src/actions/ibc-swap/services/ibc-swap-utils.ts b/packages/plugin-cosmos/src/actions/ibc-swap/services/ibc-swap-utils.ts deleted file mode 100644 index 7f6cdc48403e5..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-swap/services/ibc-swap-utils.ts +++ /dev/null @@ -1,23 +0,0 @@ -import {assets} from "chain-registry"; - -export const prepareAmbiguityErrorMessage = ( - coinSymbol: string, - chainName: string -): string => { - const chainAssets = assets.find((chain) => chain.chain_name === chainName); - if (!chainAssets) { - throw new Error(`Chain ${chainName} not found in registry`); - } - - const ambiguousAssets = chainAssets.assets.filter( - (asset) => asset.symbol === coinSymbol - ); - - console.log( - `Ambiguous Assets found: ${JSON.stringify(ambiguousAssets, null, 2)}` - ); - - const assetsText = `${ambiguousAssets.map((a) => `Symbol: ${a.symbol} Desc: ${a.description} Denom: ${a.base}`).join(",\n")}`; - - return `Error occured. Swap was not performed. Please provide denom for coin: ${coinSymbol}, on Chain Name: ${chainName}. It is necessary as the symbol ${coinSymbol} is not unique among coins on chain ${chainName}. \n Select one from found assets:\n${assetsText}`; -}; diff --git a/packages/plugin-cosmos/src/actions/ibc-swap/types.ts b/packages/plugin-cosmos/src/actions/ibc-swap/types.ts deleted file mode 100644 index 1232c281884cb..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-swap/types.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { z } from "zod"; -import type { IBCSwapParamsSchema } from "./schema.ts"; - -export type IBCSwapActionParams = z.infer; diff --git a/packages/plugin-cosmos/src/actions/ibc-transfer/index.ts b/packages/plugin-cosmos/src/actions/ibc-transfer/index.ts deleted file mode 100644 index f4c61d3ee4c35..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-transfer/index.ts +++ /dev/null @@ -1,226 +0,0 @@ -import { - composeContext, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; -import { initWalletChainsData } from "../../providers/wallet/utils"; -import { - cosmosIBCTransferTemplate, - cosmosTransferTemplate, -} from "../../templates"; -import type { - ICosmosPluginOptions, - ICosmosWalletChains, -} from "../../shared/interfaces"; -import type { IBCTransferActionParams } from "./types"; -import { IBCTransferAction } from "./services/ibc-transfer-action-service"; -import { bridgeDenomProvider } from "./services/bridge-denom-provider"; - -export const createIBCTransferAction = ( - pluginOptions: ICosmosPluginOptions -) => ({ - name: "COSMOS_IBC_TRANSFER", - description: "Transfer tokens between addresses on cosmos chains", - handler: async ( - _runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: { [key: string]: unknown }, - _callback?: HandlerCallback - ) => { - const cosmosIBCTransferContext = composeContext({ - state: state, - template: cosmosIBCTransferTemplate, - templatingEngine: "handlebars", - }); - - const cosmosIBCTransferContent = await generateObjectDeprecated({ - runtime: _runtime, - context: cosmosIBCTransferContext, - modelClass: ModelClass.SMALL, - }); - - const paramOptions: IBCTransferActionParams = { - chainName: cosmosIBCTransferContent.chainName, - symbol: cosmosIBCTransferContent.symbol, - amount: cosmosIBCTransferContent.amount, - toAddress: cosmosIBCTransferContent.toAddress, - targetChainName: cosmosIBCTransferContent.targetChainName, - }; - - try { - const walletProvider: ICosmosWalletChains = - await initWalletChainsData(_runtime); - - const action = new IBCTransferAction(walletProvider); - - const customAssets = (pluginOptions?.customChainData ?? []).map( - (chainData) => chainData.assets - ); - - const transferResp = await action.execute( - paramOptions, - bridgeDenomProvider, - customAssets - ); - - if (_callback) { - await _callback({ - text: `Successfully transferred ${paramOptions.amount} tokens from ${paramOptions.chainName} to ${paramOptions.toAddress} on ${paramOptions.targetChainName}\nTransaction Hash: ${transferResp.txHash}`, - content: { - success: true, - hash: transferResp.txHash, - amount: paramOptions.amount, - recipient: transferResp.to, - fromChain: paramOptions.chainName, - toChain: paramOptions.targetChainName, - }, - }); - - const newMemory: Memory = { - userId: _message.agentId, - agentId: _message.agentId, - roomId: _message.roomId, - content: { - text: `Transaction ${paramOptions.amount} ${paramOptions.symbol} to address ${paramOptions.toAddress} from chain ${paramOptions.chainName} to ${paramOptions.targetChainName} was successfully transferred. Tx hash: ${transferResp.txHash}`, - }, - }; - - await _runtime.messageManager.createMemory(newMemory); - } - return true; - } catch (error) { - console.error("Error during ibc token transfer:", error); - - if (_callback) { - await _callback({ - text: `Error ibc transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - - const newMemory: Memory = { - userId: _message.agentId, - agentId: _message.agentId, - roomId: _message.roomId, - content: { - text: `Transaction ${paramOptions.amount} ${paramOptions.symbol} to address ${paramOptions.toAddress} on chain ${paramOptions.chainName} to ${paramOptions.targetChainName} was unsuccessful.`, - }, - }; - - await _runtime.messageManager.createMemory(newMemory); - - return false; - } - }, - template: cosmosTransferTemplate, - validate: async (runtime: IAgentRuntime) => { - const mnemonic = runtime.getSetting("COSMOS_RECOVERY_PHRASE"); - const availableChains = runtime.getSetting("COSMOS_AVAILABLE_CHAINS"); - const availableChainsArray = availableChains?.split(","); - - return !!(mnemonic && availableChains && availableChainsArray.length); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Make an IBC transfer {{0.0001 ATOM}} to {{osmosis1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf}} from {{cosmoshub}} to {{osmosis}}", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the IBC transfer action?", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "", - action: "COSMOS_IBC_TRANSFER", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send {{50 OSMO}} to {{juno13248w8dtnn07sxc3gq4l3ts4rvfyat6f4qkdd6}} from {{osmosis}} to {{juno}}", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the IBC transfer action?", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "", - action: "COSMOS_IBC_TRANSFER", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Transfer {{0.005 JUNO}} from {{juno}} to {{cosmos1n0xv7z2pkl4eppnm7g2rqhe2q8q6v69h7w93fc}} on {{cosmoshub}}", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the IBC transfer action?", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_IBC_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "", - action: "COSMOS_IBC_TRANSFER", - }, - }, - ], - ], - similes: [ - "COSMOS_BRIDGE_TOKEN", - "COSMOS_IBC_SEND_TOKEN", - "COSMOS_TOKEN_IBC_TRANSFER", - "COSMOS_MOVE_IBC_TOKENS", - ], -}); diff --git a/packages/plugin-cosmos/src/actions/ibc-transfer/schema.ts b/packages/plugin-cosmos/src/actions/ibc-transfer/schema.ts deleted file mode 100644 index a288031ccf309..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-transfer/schema.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { z } from "zod"; - -export const IBCTransferParamsSchema = z.object({ - chainName: z.string(), - symbol: z.string(), - amount: z.string().regex(/^\d+$/, "Amount must be a numeric string"), - toAddress: z.string().regex(/^[a-z0-9]+$/, "Invalid bech32 address format"), - targetChainName: z.string(), -}); diff --git a/packages/plugin-cosmos/src/actions/ibc-transfer/services/bridge-denom-provider.ts b/packages/plugin-cosmos/src/actions/ibc-transfer/services/bridge-denom-provider.ts deleted file mode 100644 index 54133dd673ca7..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-transfer/services/bridge-denom-provider.ts +++ /dev/null @@ -1,37 +0,0 @@ -import type { IDenomProvider } from "../../../shared/interfaces"; -import { SkipApiAssetsFromSourceFetcher } from "../../../shared/services/skip-api/assets-from-source-fetcher/skip-api-assets-from-source-fetcher"; - -export const bridgeDenomProvider: IDenomProvider = async ( - sourceAssetDenom: string, - sourceAssetChainId: string, - destChainId: string -) => { - const skipApiAssetsFromSourceFetcher = - SkipApiAssetsFromSourceFetcher.getInstance(); - const bridgeData = await skipApiAssetsFromSourceFetcher.fetch( - sourceAssetDenom, - sourceAssetChainId - ); - - const destAssets = bridgeData.dest_assets[destChainId]; - - if (!destAssets?.assets) { - throw new Error(`No assets found for chain ${destChainId}`); - } - - const ibcAssetData = destAssets.assets?.find( - ({ origin_denom }) => origin_denom === sourceAssetDenom - ); - - if (!ibcAssetData) { - throw new Error(`No matching asset found for denom ${sourceAssetDenom}`); - } - - if (!ibcAssetData.denom) { - throw new Error("No IBC asset data"); - } - - return { - denom: ibcAssetData.denom, - }; -}; diff --git a/packages/plugin-cosmos/src/actions/ibc-transfer/services/ibc-transfer-action-service.ts b/packages/plugin-cosmos/src/actions/ibc-transfer/services/ibc-transfer-action-service.ts deleted file mode 100644 index 1c06c84b620e6..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-transfer/services/ibc-transfer-action-service.ts +++ /dev/null @@ -1,141 +0,0 @@ -import { - convertDisplayUnitToBaseUnit, - getAssetBySymbol, - getChainByChainName, -} from "@chain-registry/utils"; -import { assets, chains } from "chain-registry"; -import type { - IDenomProvider, - ICosmosActionService, - ICosmosPluginCustomChainData, - ICosmosTransaction, - ICosmosWalletChains, -} from "../../../shared/interfaces.ts"; -import { getAvailableAssets } from "../../../shared/helpers/cosmos-assets.ts"; -import type { IBCTransferActionParams } from "../types.ts"; - -export class IBCTransferAction implements ICosmosActionService { - constructor(private cosmosWalletChains: ICosmosWalletChains) { - this.cosmosWalletChains = cosmosWalletChains; - } - - async execute( - params: IBCTransferActionParams, - bridgeDenomProvider: IDenomProvider, - customChainAssets?: ICosmosPluginCustomChainData["assets"][] - ): Promise { - const senderAddress = await this.cosmosWalletChains.getWalletAddress( - params.chainName - ); - - const skipClient = this.cosmosWalletChains.getSkipClient( - params.chainName - ); - - if (!senderAddress) { - throw new Error( - `Cannot get wallet address for chain ${params.chainName}` - ); - } - - if (!params.toAddress) { - throw new Error("No receiver address"); - } - - if (!params.targetChainName) { - throw new Error("No target chain name"); - } - - if (!params.chainName) { - throw new Error("No chain name"); - } - - if (!params.symbol) { - throw new Error("No symbol"); - } - - const availableAssets = getAvailableAssets(assets, customChainAssets); - - const denom = getAssetBySymbol( - availableAssets, - params.symbol, - params.chainName - ); - - const sourceChain = getChainByChainName(chains, params.chainName); - const destChain = getChainByChainName(chains, params.targetChainName); - - if (!denom.base) { - throw new Error("Cannot find asset"); - } - - if (!sourceChain) { - throw new Error("Cannot find source chain"); - } - - if (!destChain) { - throw new Error("Cannot find destination chain"); - } - - const bridgeDenomResult = await bridgeDenomProvider( - denom.base, - sourceChain.chain_id, - destChain.chain_id - ); - - if (!bridgeDenomResult || !bridgeDenomResult.denom) { - throw new Error("Failed to get destination asset denomination"); - } - - const destAssetDenom = bridgeDenomResult.denom; - - const route = await skipClient.route({ - destAssetChainID: destChain.chain_id, - destAssetDenom, - sourceAssetChainID: sourceChain.chain_id, - sourceAssetDenom: denom.base, - amountIn: convertDisplayUnitToBaseUnit( - availableAssets, - params.symbol, - params.amount, - params.chainName - ), - cumulativeAffiliateFeeBPS: "0", - }); - const fromAddress = { - chainID: sourceChain.chain_id, - address: await this.cosmosWalletChains.getWalletAddress(params.chainName) - }; - - const toAddress = { - chainID: destChain.chain_id, - address: params.toAddress - }; - - const userAddresses = [fromAddress, toAddress]; - - let txHash: string | undefined; - - try { - await skipClient.executeRoute({ - route, - userAddresses, - onTransactionCompleted: async (_, executeRouteTxHash) => { - txHash = executeRouteTxHash; - }, - }); - } catch (error) { - throw new Error(`Failed to execute route: ${error?.message}`); - } - - if (!txHash) { - throw new Error("Transaction hash is undefined after executing route"); - } - - return { - from: senderAddress, - to: params.toAddress, - txHash, - }; - } -} diff --git a/packages/plugin-cosmos/src/actions/ibc-transfer/types.ts b/packages/plugin-cosmos/src/actions/ibc-transfer/types.ts deleted file mode 100644 index 4bc559885d883..0000000000000 --- a/packages/plugin-cosmos/src/actions/ibc-transfer/types.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { z } from "zod"; -import type { IBCTransferParamsSchema } from "./schema"; - -export type IBCTransferActionParams = z.infer; diff --git a/packages/plugin-cosmos/src/actions/transfer/index.ts b/packages/plugin-cosmos/src/actions/transfer/index.ts deleted file mode 100644 index 67db6333f80c5..0000000000000 --- a/packages/plugin-cosmos/src/actions/transfer/index.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { - composeContext, - generateObjectDeprecated, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; -import { initWalletChainsData } from "../../providers/wallet/utils"; -import { cosmosTransferTemplate } from "../../templates"; -import { CosmosTransferActionService } from "./services/cosmos-transfer-action-service"; -import type { CosmosTransferParams } from "./types"; -import type { - ICosmosPluginOptions, - ICosmosWalletChains, -} from "../../shared/interfaces"; - -export const createTransferAction = (pluginOptions: ICosmosPluginOptions) => ({ - name: "COSMOS_TRANSFER", - description: "Transfer tokens between addresses on the same chain", - handler: async ( - _runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: { [key: string]: unknown }, - _callback?: HandlerCallback - ) => { - const cosmosTransferContext = composeContext({ - state: state, - template: cosmosTransferTemplate, - templatingEngine: "handlebars", - }); - - const cosmosTransferContent = await generateObjectDeprecated({ - runtime: _runtime, - context: cosmosTransferContext, - modelClass: ModelClass.SMALL, - }); - - const paramOptions: CosmosTransferParams = { - chainName: cosmosTransferContent.chainName, - symbol: cosmosTransferContent.symbol, - amount: cosmosTransferContent.amount, - toAddress: cosmosTransferContent.toAddress, - }; - - try { - const walletProvider: ICosmosWalletChains = - await initWalletChainsData(_runtime); - - const action = new CosmosTransferActionService(walletProvider); - - const customAssets = (pluginOptions?.customChainData ?? []).map( - (chainData) => chainData.assets - ); - - const transferResp = await action.execute( - paramOptions, - customAssets - ); - - if (_callback) { - await _callback({ - text: `Successfully transferred ${paramOptions.amount} tokens to ${paramOptions.toAddress}\nGas paid: ${transferResp.gasPaid}\nTransaction Hash: ${transferResp.txHash}`, - content: { - success: true, - hash: transferResp.txHash, - amount: paramOptions.amount, - recipient: transferResp.to, - chain: cosmosTransferContent.fromChain, - }, - }); - - const newMemory: Memory = { - userId: _message.agentId, - agentId: _message.agentId, - roomId: _message.roomId, - content: { - text: `Transaction ${paramOptions.amount} ${paramOptions.symbol} to address ${paramOptions.toAddress} on chain ${paramOptions.toAddress} was successfully transfered.\n Gas paid: ${transferResp.gasPaid}. Tx hash: ${transferResp.txHash}`, - }, - }; - - await _runtime.messageManager.createMemory(newMemory); - } - return true; - } catch (error) { - console.error("Error during token transfer:", error); - - if (_callback) { - await _callback({ - text: `Error transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - - const newMemory: Memory = { - userId: _message.agentId, - agentId: _message.agentId, - roomId: _message.roomId, - content: { - text: `Transaction ${paramOptions.amount} ${paramOptions.symbol} to address ${paramOptions.toAddress} on chain ${paramOptions.toAddress} was unsuccessful.`, - }, - }; - - await _runtime.messageManager.createMemory(newMemory); - - return false; - } - }, - template: cosmosTransferTemplate, - validate: async (runtime: IAgentRuntime) => { - const mnemonic = runtime.getSetting("COSMOS_RECOVERY_PHRASE"); - const availableChains = runtime.getSetting("COSMOS_AVAILABLE_CHAINS"); - const availableChainsArray = availableChains?.split(","); - - return !(mnemonic && availableChains && availableChainsArray.length); - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Make transfer {{0.0001 OM}} to {{mantra1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf}} on {{mantrachaintestnet2}}", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the transfer action?", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "", - action: "COSMOS_TRANSFER", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send {{10 OSMO}} to {{osmo13248w8dtnn07sxc3gq4l3ts4rvfyat6f4qkdd6}} on {{osmosistestnet}}", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the transfer action?", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "", - action: "COSMOS_TRANSFER", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send {{0.0001 OM}} on {{mantrachaintestnet2}} to {{mantra1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf}}.", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "Do you confirm the transfer action?", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user1}}", - content: { - text: "Yes", - action: "COSMOS_TRANSFER", - }, - }, - { - user: "{{user2}}", - content: { - text: "", - action: "COSMOS_TRANSFER", - }, - }, - ], - ], - similes: [ - "COSMOS_SEND_TOKENS", - "COSMOS_TOKEN_TRANSFER", - "COSMOS_MOVE_TOKENS", - ], -}); diff --git a/packages/plugin-cosmos/src/actions/transfer/schema.ts b/packages/plugin-cosmos/src/actions/transfer/schema.ts deleted file mode 100644 index f2aae2c55a1d8..0000000000000 --- a/packages/plugin-cosmos/src/actions/transfer/schema.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { z } from "zod"; - -export const cosmosTransferParamsSchema = z.object({ - chainName: z.string(), - symbol: z.string(), - amount: z.string(), - toAddress: z.string(), -}); diff --git a/packages/plugin-cosmos/src/actions/transfer/services/cosmos-transfer-action-service.ts b/packages/plugin-cosmos/src/actions/transfer/services/cosmos-transfer-action-service.ts deleted file mode 100644 index 836cf0c2ef7cd..0000000000000 --- a/packages/plugin-cosmos/src/actions/transfer/services/cosmos-transfer-action-service.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { - convertDisplayUnitToBaseUnit, - getAssetBySymbol, -} from "@chain-registry/utils"; -import type { Coin } from "@cosmjs/stargate"; -import { assets } from "chain-registry"; -import { getPaidFeeFromReceipt } from "../../../shared/helpers/cosmos-transaction-receipt.ts"; -import type { - ICosmosActionService, - ICosmosPluginCustomChainData, - ICosmosTransaction, - ICosmosWalletChains, -} from "../../../shared/interfaces.ts"; -import { CosmosTransactionFeeEstimator } from "../../../shared/services/cosmos-transaction-fee-estimator.ts"; -import type { CosmosTransferParams } from "../types.ts"; -import { getAvailableAssets } from "../../../shared/helpers/cosmos-assets.ts"; - -export class CosmosTransferActionService implements ICosmosActionService { - constructor(private cosmosWalletChains: ICosmosWalletChains) { - this.cosmosWalletChains = cosmosWalletChains; - } - - async execute( - params: CosmosTransferParams, - customChainAssets?: ICosmosPluginCustomChainData["assets"][] - ): Promise { - const signingCosmWasmClient = - this.cosmosWalletChains.getSigningCosmWasmClient(params.chainName); - - const senderAddress = await this.cosmosWalletChains.getWalletAddress( - params.chainName - ); - - if (!senderAddress) { - throw new Error( - `Cannot get wallet address for chain ${params.chainName}` - ); - } - - if (!params.toAddress) { - throw new Error("No receiver address"); - } - - if (!params.symbol) { - throw new Error("No symbol"); - } - - const availableAssets = getAvailableAssets(assets, customChainAssets); - - const coin: Coin = { - denom: getAssetBySymbol( - availableAssets, - params.symbol, - params.chainName - ).base, - amount: convertDisplayUnitToBaseUnit( - availableAssets, - params.symbol, - params.amount, - params.chainName - ), - }; - - const gasFee = - await CosmosTransactionFeeEstimator.estimateGasForCoinTransfer( - signingCosmWasmClient, - senderAddress, - params.toAddress, - [coin] - ); - - const txDeliveryResponse = await signingCosmWasmClient.sendTokens( - senderAddress, - params.toAddress, - [coin], - { gas: gasFee.toString(), amount: [{ ...coin, amount: gasFee.toString() }] } - ); - - const gasPaid = getPaidFeeFromReceipt(txDeliveryResponse); - - return { - from: senderAddress, - to: params.toAddress, - gasPaid, - txHash: txDeliveryResponse.transactionHash, - }; - } -} diff --git a/packages/plugin-cosmos/src/actions/transfer/types.ts b/packages/plugin-cosmos/src/actions/transfer/types.ts deleted file mode 100644 index 0d2dd26678f53..0000000000000 --- a/packages/plugin-cosmos/src/actions/transfer/types.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { z } from "zod"; -import type { cosmosTransferParamsSchema } from "./schema"; - -export type CosmosTransferParams = z.infer; diff --git a/packages/plugin-cosmos/src/index.ts b/packages/plugin-cosmos/src/index.ts deleted file mode 100644 index 22c90aa4ed1d3..0000000000000 --- a/packages/plugin-cosmos/src/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { createTransferAction } from "./actions/transfer"; -import { createCosmosWalletProvider } from "./providers/wallet"; -import type { ICosmosPluginOptions } from "./shared/interfaces"; -import {createIBCSwapAction} from "./actions/ibc-swap"; -import {createIBCTransferAction} from "./actions/ibc-transfer"; - -export const createCosmosPlugin = ( - pluginOptions?: ICosmosPluginOptions -): Plugin => ({ - name: "cosmos", - description: "Cosmos blockchain integration plugin", - providers: [createCosmosWalletProvider(pluginOptions)], - evaluators: [], - services: [], - actions: [createTransferAction(pluginOptions), createIBCSwapAction(pluginOptions), createIBCTransferAction(pluginOptions),], -}); - -export default createCosmosPlugin; diff --git a/packages/plugin-cosmos/src/providers/wallet/index.ts b/packages/plugin-cosmos/src/providers/wallet/index.ts deleted file mode 100644 index 7d07cf6924f4c..0000000000000 --- a/packages/plugin-cosmos/src/providers/wallet/index.ts +++ /dev/null @@ -1,69 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { - convertBaseUnitToDisplayUnit, - getSymbolByDenom, -} from "@chain-registry/utils"; -import { assets } from "chain-registry"; -import { initWalletChainsData } from "./utils"; -import type { ICosmosPluginOptions } from "../../shared/interfaces"; -import { getAvailableAssets } from "../../shared/helpers/cosmos-assets"; - -export const createCosmosWalletProvider = ( - pluginOptions: ICosmosPluginOptions -) => ({ - get: async (runtime: IAgentRuntime) => { - let providerContextMessage = ""; - - const customAssets = (pluginOptions?.customChainData ?? []).map( - (chainData) => chainData.assets - ); - - const availableAssets = getAvailableAssets(assets, customAssets); - - try { - const provider = await initWalletChainsData(runtime); - - for (const [chainName, { wallet }] of Object.entries( - provider.walletChainsData - )) { - const address = await wallet.getWalletAddress(); - const balances = await wallet.getWalletBalances(); - - const convertedCoinsToDisplayDenom = balances.map((balance) => { - const symbol = getSymbolByDenom( - availableAssets, - balance.denom, - chainName - ); - - return { - amount: symbol - ? convertBaseUnitToDisplayUnit( - availableAssets, - symbol, - balance.amount, - chainName - ) - : balance.amount, - symbol: symbol ?? balance.denom, - }; - }); - - const balancesToString = convertedCoinsToDisplayDenom - .map((balance) => `- ${balance.amount} ${balance.symbol}`) - .join("\n"); - - providerContextMessage += `Chain: ${chainName}\nAddress: ${address}\nBalances:\n${balancesToString}\n________________\n`; - } - - return providerContextMessage; - } catch (error) { - console.error( - "Error Initializing in Cosmos wallet provider:", - error - ); - - return null; - } - }, -}); diff --git a/packages/plugin-cosmos/src/providers/wallet/utils.ts b/packages/plugin-cosmos/src/providers/wallet/utils.ts deleted file mode 100644 index 19df87996781e..0000000000000 --- a/packages/plugin-cosmos/src/providers/wallet/utils.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { CosmosWalletChains } from "../../shared/entities/cosmos-wallet-chains-data"; - -export const initWalletChainsData = async (runtime: IAgentRuntime) => { - const mnemonic = runtime.getSetting("COSMOS_RECOVERY_PHRASE"); - const availableChains = runtime.getSetting("COSMOS_AVAILABLE_CHAINS"); - - if (!mnemonic) { - throw new Error("COSMOS_RECOVERY_PHRASE is missing"); - } - - if (!availableChains) { - throw new Error("COSMOS_AVAILABLE_CHAINS is missing"); - } - - const availableChainsArray = availableChains.split(","); - - if (!availableChainsArray.length) { - throw new Error("COSMOS_AVAILABLE_CHAINS is empty"); - } - - return await CosmosWalletChains.create(mnemonic, availableChainsArray); -}; diff --git a/packages/plugin-cosmos/src/shared/entities/cosmos-wallet-chains-data.ts b/packages/plugin-cosmos/src/shared/entities/cosmos-wallet-chains-data.ts deleted file mode 100644 index 334dc8197b7f4..0000000000000 --- a/packages/plugin-cosmos/src/shared/entities/cosmos-wallet-chains-data.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { getChainByChainName } from "@chain-registry/utils"; -import { SigningCosmWasmClient } from "@cosmjs/cosmwasm-stargate"; -import { chains } from "chain-registry"; -import { CosmosWallet } from "./cosmos-wallet"; -import type { - ICosmosPluginCustomChainData, - ICosmosWalletChains, - ICosmosWalletChainsData, -} from "../interfaces"; -import { getAvailableChains } from "../helpers/cosmos-chains"; -import { SkipClient } from "@skip-go/client"; - -export class CosmosWalletChains implements ICosmosWalletChains { - public walletChainsData: ICosmosWalletChainsData = {}; - - private constructor(walletChainsData: ICosmosWalletChainsData) { - this.walletChainsData = walletChainsData; - } - - public static async create( - mnemonic: string, - availableChainNames: string[], - customChainsData?: ICosmosPluginCustomChainData["chainData"][] - ) { - const walletChainsData: ICosmosWalletChainsData = {}; - const availableChains = getAvailableChains(chains, customChainsData); - - for (const chainName of availableChainNames) { - const chain = getChainByChainName(availableChains, chainName); - - if (!chain) { - throw new Error(`Chain ${chainName} not found`); - } - - const wallet = await CosmosWallet.create( - mnemonic, - chain.bech32_prefix, - chain.apis.rpc[0].address - ); - - const chainRpcAddress = chain.apis?.rpc?.[0].address; - - if (!chainRpcAddress) { - throw new Error(`RPC address not found for chain ${chainName}`); - } - - const signingCosmWasmClient = - await SigningCosmWasmClient.connectWithSigner( - chain.apis.rpc[0].address, - wallet.directSecp256k1HdWallet - ); - - const skipClient = new SkipClient({ - getCosmosSigner: async () => wallet.directSecp256k1HdWallet, - }); - - walletChainsData[chainName] = { - wallet, - signingCosmWasmClient, - skipClient, - }; - } - - return new CosmosWalletChains(walletChainsData); - } - - public async getWalletAddress(chainName: string) { - const chainWalletsForGivenChain = this.walletChainsData[chainName]; - if (!chainWalletsForGivenChain) { - throw new Error(`Invalid chain name. If ${chainName} is required, it should be added to env file.`); - } - - return await chainWalletsForGivenChain.wallet.getWalletAddress(); - } - - public getSigningCosmWasmClient(chainName: string) { - return this.walletChainsData[chainName].signingCosmWasmClient; - } - - public getSkipClient(chainName: string): SkipClient { - const chainWalletsForGivenChain = this.walletChainsData[chainName]; - - if (!chainWalletsForGivenChain) { - throw new Error("Invalid chain name"); - } - - return chainWalletsForGivenChain.skipClient; - } - public async getUserAddress(chainName: string): Promise { - return this.walletChainsData[chainName].wallet.getWalletAddress(); - } -} diff --git a/packages/plugin-cosmos/src/shared/entities/cosmos-wallet.ts b/packages/plugin-cosmos/src/shared/entities/cosmos-wallet.ts deleted file mode 100644 index cfcbe68443a1d..0000000000000 --- a/packages/plugin-cosmos/src/shared/entities/cosmos-wallet.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; -import { cosmos } from "interchain"; -import type { ICosmosWallet } from "../interfaces"; - -type RPCQueryClient = Awaited< - ReturnType ->; - -export class CosmosWallet implements ICosmosWallet { - public rpcQueryClient: RPCQueryClient; - public directSecp256k1HdWallet: DirectSecp256k1HdWallet; - - private constructor( - directSecp256k1HdWallet: DirectSecp256k1HdWallet, - rpcQueryClient: RPCQueryClient - ) { - this.directSecp256k1HdWallet = directSecp256k1HdWallet; - this.rpcQueryClient = rpcQueryClient; - } - - public static async create( - mnemonic: string, - chainPrefix: string, - rpcEndpoint: string - ) { - const directSecp256k1HdWallet = - await DirectSecp256k1HdWallet.fromMnemonic(mnemonic, { - prefix: chainPrefix, - }); - - const rpcQueryClient = await cosmos.ClientFactory.createRPCQueryClient({ - rpcEndpoint, - }); - - return new CosmosWallet(directSecp256k1HdWallet, rpcQueryClient); - } - - public async getWalletAddress() { - const [account] = await this.directSecp256k1HdWallet.getAccounts(); - - return account.address; - } - - public async getWalletBalances() { - const walletAddress = await this.getWalletAddress(); - - const allBalances = - await this.rpcQueryClient.cosmos.bank.v1beta1.allBalances({ - address: walletAddress, - }); - - return allBalances.balances; - } -} diff --git a/packages/plugin-cosmos/src/shared/helpers/cosmos-assets.ts b/packages/plugin-cosmos/src/shared/helpers/cosmos-assets.ts deleted file mode 100644 index ae92d2cb2448e..0000000000000 --- a/packages/plugin-cosmos/src/shared/helpers/cosmos-assets.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { AssetList } from "@chain-registry/types"; - -export const getAvailableAssets = ( - assets: AssetList[], - customAssets: AssetList[] -) => { - const result: AssetList[] = []; - const safeAssets = assets || []; - const safeCustomAssets = customAssets || []; - - // Get custom asset chain names for faster lookup - const customChainNames = new Set( - safeCustomAssets.map(asset => asset.chain_name) - ); - - // Add non-duplicate assets - for (const asset of safeAssets) { - if (!customChainNames.has(asset.chain_name)) { - result.push(asset); - } - } - - // Add all custom assets - result.push(...safeCustomAssets); - - return result; -} \ No newline at end of file diff --git a/packages/plugin-cosmos/src/shared/helpers/cosmos-chains.ts b/packages/plugin-cosmos/src/shared/helpers/cosmos-chains.ts deleted file mode 100644 index 708bd918e936b..0000000000000 --- a/packages/plugin-cosmos/src/shared/helpers/cosmos-chains.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { Chain } from "@chain-registry/types"; - -export const getAvailableChains = (chains: Chain[], customChains: Chain[]) => [ - ...(chains?.filter( - (chain) => - !(customChains ?? []) - ?.map((customChain) => customChain.chain_name) - ?.includes(chain.chain_name) - ) ?? []), - ...(customChains ?? []), -]; diff --git a/packages/plugin-cosmos/src/shared/helpers/cosmos-transaction-receipt.ts b/packages/plugin-cosmos/src/shared/helpers/cosmos-transaction-receipt.ts deleted file mode 100644 index 689a5dc387d88..0000000000000 --- a/packages/plugin-cosmos/src/shared/helpers/cosmos-transaction-receipt.ts +++ /dev/null @@ -1,44 +0,0 @@ -import type { - DeliverTxResponse, - ExecuteResult, -} from "@cosmjs/cosmwasm-stargate"; - -const DEFUALT_EVENTS = [ - { eventName: "fee_pay", attributeType: "fee" }, - { eventName: "tip_refund", attributeType: "tip" }, -]; - -export const getPaidFeeFromReceipt = ( - receipt: ExecuteResult | DeliverTxResponse, - eventsToPickGasFor = DEFUALT_EVENTS -) => { - const selectedEvents = receipt.events.filter(({ type }) => - eventsToPickGasFor.map(({ eventName }) => eventName).includes(type) - ); - - return selectedEvents.reduce((acc, { attributes }) => { - return ( - acc + - attributes.reduce((_acc, { key, value }) => { - if ( - eventsToPickGasFor.some( - ({ attributeType }) => attributeType === key - ) - ) { - const testValue = value.match(/\d+/)?.[0]; - const testValueAsNumber = Number(testValue); - - if (Number.isNaN(testValueAsNumber)) { - return _acc; - } - - _acc = _acc + testValueAsNumber; - - return _acc; - } - - return _acc; - }, 0) - ); - }, 0); -}; diff --git a/packages/plugin-cosmos/src/shared/interfaces.ts b/packages/plugin-cosmos/src/shared/interfaces.ts deleted file mode 100644 index 52dd8731b63ad..0000000000000 --- a/packages/plugin-cosmos/src/shared/interfaces.ts +++ /dev/null @@ -1,64 +0,0 @@ -import type { SigningCosmWasmClient } from "@cosmjs/cosmwasm-stargate"; -import type { Coin, DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; -import type { assets, chains } from "chain-registry"; -import type { SkipClient, StatusState} from "@skip-go/client"; - -export interface ICosmosPluginCustomChainData { - chainData: (typeof chains)[number]; - assets: (typeof assets)[number]; -} - -export interface ICosmosPluginOptions { - customChainData?: ICosmosPluginCustomChainData[]; -} - -export interface ICosmosActionService { - execute: ((...params: unknown[]) => void) | (() => void); -} - -export interface ICosmosTransaction { - from: string; - to: string; - txHash: string; - gasPaid?: number; -} - -export interface ICosmosSwap { - status: StatusState; - fromChainName: string; - fromTokenSymbol: string; - fromTokenAmount: string; - toTokenSymbol: string; - toChainName: string; - txHash: string; -} - -export interface ICosmosWallet { - directSecp256k1HdWallet: DirectSecp256k1HdWallet; - - getWalletAddress(): Promise; - getWalletBalances(): Promise; -} - -export interface ICosmosChainWallet { - wallet: ICosmosWallet; - signingCosmWasmClient: SigningCosmWasmClient; - skipClient: SkipClient; -} - -export interface ICosmosWalletChains { - walletChainsData: ICosmosWalletChainsData; - - getWalletAddress(chainName: string): Promise; - getSigningCosmWasmClient(chainName: string): SigningCosmWasmClient; - getSkipClient(chainName: string): SkipClient; -} - -export interface ICosmosWalletChainsData { - [chainName: string]: ICosmosChainWallet; -} - -export type IDenomProvider = ( - sourceAssetDenom: string, - sourceAssetChainId: string, - destChainId: string) => Promise<{ denom: string }> diff --git a/packages/plugin-cosmos/src/shared/services/cosmos-transaction-fee-estimator.ts b/packages/plugin-cosmos/src/shared/services/cosmos-transaction-fee-estimator.ts deleted file mode 100644 index d9a09c29ffb5e..0000000000000 --- a/packages/plugin-cosmos/src/shared/services/cosmos-transaction-fee-estimator.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { SigningCosmWasmClient } from "@cosmjs/cosmwasm-stargate"; -import type { EncodeObject } from "@cosmjs/proto-signing"; -import type { Coin, MsgSendEncodeObject } from "@cosmjs/stargate"; - -export class CosmosTransactionFeeEstimator { - private static async estimateGasForTransaction< - Message extends readonly EncodeObject[], - >( - signingCosmWasmClient: SigningCosmWasmClient, - senderAddress: string, - message: Message, - memo = "" - ): Promise { - const estimatedGas = await signingCosmWasmClient.simulate( - senderAddress, - message, - memo - ); - - // Add 20% to the estimated gas to make sure we have enough gas to cover the transaction - const safeEstimatedGas = Math.ceil(estimatedGas * 1.2); - - return safeEstimatedGas; - } - - static estimateGasForCoinTransfer( - signingCosmWasmClient: SigningCosmWasmClient, - senderAddress: string, - recipientAddress: string, - amount: readonly Coin[], - memo = "" - ): Promise { - return this.estimateGasForTransaction( - signingCosmWasmClient, - senderAddress, - [ - { - typeUrl: "/cosmos.bank.v1beta1.MsgSend", - value: { - fromAddress: senderAddress, - toAddress: recipientAddress, - amount: [...amount], - }, - }, - ], - memo - ); - } -} diff --git a/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/interfaces.ts b/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/interfaces.ts deleted file mode 100644 index 21dd7aa5e3725..0000000000000 --- a/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/interfaces.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { z } from "zod"; -import type { - skipApiAssetsFromSourceParamsSchema, - skipApiAssetsFromSourceResponseAssetSchema, - skipApiAssetsFromSourceResponseSchema, -} from "./schema"; - -export type SkipApiAssetsFromSourceParams = z.infer< - typeof skipApiAssetsFromSourceParamsSchema ->; -export type SkipApiAssetsFromSourceResponseAsset = z.infer< - typeof skipApiAssetsFromSourceResponseAssetSchema ->; -export type SkipApiAssetsFromSourceResponse = z.infer< - typeof skipApiAssetsFromSourceResponseSchema ->; diff --git a/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/schema.ts b/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/schema.ts deleted file mode 100644 index 6272e8a100167..0000000000000 --- a/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/schema.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { z } from "zod"; - -export const skipApiAssetsFromSourceParamsSchema = z.object({ - source_asset_denom: z.string(), - source_asset_chain_id: z.string(), - allow_multi_tx: z.boolean(), -}); - -export const skipApiAssetsFromSourceResponseAssetSchema = z.object({ - denom: z.string(), - chain_id: z.string(), - origin_denom: z.string(), - origin_chain_id: z.string(), - trace: z.string(), - symbol: z.string().optional(), - name: z.string().optional(), - logo_uri: z.string().optional(), - decimals: z.number().optional(), - recommended_symbol: z.string().optional(), -}); - -export const skipApiAssetsFromSourceResponseSchema = z.object({ - dest_assets: z.record( - z.string(), - z.object({ - assets: z.array(skipApiAssetsFromSourceResponseAssetSchema), - }) - ), -}); diff --git a/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/skip-api-assets-from-source-fetcher.ts b/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/skip-api-assets-from-source-fetcher.ts deleted file mode 100644 index 31c15e14cdae3..0000000000000 --- a/packages/plugin-cosmos/src/shared/services/skip-api/assets-from-source-fetcher/skip-api-assets-from-source-fetcher.ts +++ /dev/null @@ -1,79 +0,0 @@ -import axios from "axios"; -import { skipApiAssetsFromSourceResponseSchema } from "./schema"; -import type { - SkipApiAssetsFromSourceParams, - SkipApiAssetsFromSourceResponse, -} from "./interfaces"; -import { skipApiBaseUrl } from "../config"; - -type CacheKey = `${string}_${string}`; -const endpointPath = "fungible/assets_from_source"; - -export class SkipApiAssetsFromSourceFetcher { - private static instance: SkipApiAssetsFromSourceFetcher; - private cache: Map; - private readonly apiUrl: string; - - private constructor() { - this.cache = new Map(); - this.apiUrl = `${skipApiBaseUrl}${endpointPath}`; - } - - public static getInstance(): SkipApiAssetsFromSourceFetcher { - if (!SkipApiAssetsFromSourceFetcher.instance) { - SkipApiAssetsFromSourceFetcher.instance = - new SkipApiAssetsFromSourceFetcher(); - } - return SkipApiAssetsFromSourceFetcher.instance; - } - - private generateCacheKey( - sourceAssetDenom: string, - sourceAssetChainId: string - ): CacheKey { - return `${sourceAssetDenom}_${sourceAssetChainId}`; - } - - public async fetch( - sourceAssetDenom: string, - sourceAssetChainId: string - ): Promise { - const cacheKey = this.generateCacheKey( - sourceAssetDenom, - sourceAssetChainId - ); - - if (this.cache.has(cacheKey)) { - const cachedData = this.cache.get(cacheKey); - if (!cachedData) { - throw new Error("Cache inconsistency: data not found after check"); - } - return cachedData; - } - - const requestData: SkipApiAssetsFromSourceParams = { - source_asset_denom: sourceAssetDenom, - source_asset_chain_id: sourceAssetChainId, - allow_multi_tx: false, - }; - - try { - const response = await axios.post(this.apiUrl, requestData, { - headers: { - "Content-Type": "application/json", - }, - timeout: 5000, - }); - - const validResponse = skipApiAssetsFromSourceResponseSchema.parse( - response.data - ); - - this.cache.set(cacheKey, validResponse); - return response.data; - } catch (error) { - console.error("Error fetching assets:", error); - throw error; - } - } -} diff --git a/packages/plugin-cosmos/src/shared/services/skip-api/config.ts b/packages/plugin-cosmos/src/shared/services/skip-api/config.ts deleted file mode 100644 index d3d4d5a16e776..0000000000000 --- a/packages/plugin-cosmos/src/shared/services/skip-api/config.ts +++ /dev/null @@ -1 +0,0 @@ -export const skipApiBaseUrl = "https://api.skip.build/v2/"; diff --git a/packages/plugin-cosmos/src/templates/index.ts b/packages/plugin-cosmos/src/templates/index.ts deleted file mode 100644 index 074a8e51897c5..0000000000000 --- a/packages/plugin-cosmos/src/templates/index.ts +++ /dev/null @@ -1,187 +0,0 @@ -export const cosmosTransferTemplate = `Given the recent messages and cosmos wallet information below: -{{recentMessages}} -{{walletInfo}} -Extract the following information about the requested transfer: -1. **Amount**: - - Extract only the numeric value from the instruction. - - The value must be a string representing the amount in the display denomination (e.g., "0.0001" for OM, chimba, etc.). Do not include the symbol. - -2. **Recipient Address**: - - Must be a valid Bech32 address that matches the chain's address prefix. - - Example for "mantra": "mantra1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf". - -3. **Token Symbol**: - - The symbol must be a string representing the token's display denomination (e.g., "OM", "chimba", etc.). - -4. **Chain name**: - - Identify the chain mentioned in the instruction where the transfer will take place (e.g., carbon, axelar, mantrachaintestnet2). - - Provide this as a string. - -Respond with a JSON markdown block containing only the extracted values. All fields except 'token' are required: -\`\`\`json -{ - "symbol": string, // The symbol of token. - "amount": string, // The amount to transfer as a string. - "toAddress": string, // The recipient's address. - "chainName": string // The chain name. -\`\`\` - -Example response for the input: "Make transfer 0.0001 OM to mantra1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf on mantrachaintestnet2", the response should be: -\`\`\`json -{ - "symbol": "OM", - "amount": "0.0001", - "toAddress": "mantra1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf", - "chainName": "mantrachaintestnet2" -\`\`\` -Now respond with a JSON markdown block containing only the extracted values. -`; - -export const cosmosIBCTransferTemplate = `Given the recent messages and cosmos wallet information below: -{{recentMessages}} -{{walletInfo}} -Extract the following information about the requested IBC transfer: -1. **Amount**: - - Extract only the numeric value from the instruction. - - The value must be a string representing the amount in the display denomination (e.g., "0.0001" for ATOM, OSMO, etc.). Do not include the symbol. - -2. **Recipient Address**: - - Must be a valid Bech32 address that matches the target chain's address prefix. - - Example for "cosmoshub": "cosmos1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf". - -3. **Token Symbol**: - - The symbol must be a string representing the token's display denomination (e.g., "ATOM", "OSMO", etc.). - -4. **Source Chain Name**: - - Identify the source chain mentioned in the instruction (e.g., cosmoshub, osmosis, axelar). - - Provide this as a string. - -5. **Target Chain Name**: - - Identify the target chain mentioned in the instruction (e.g., cosmoshub, osmosis, axelar). - - Provide this as a string. - -Respond with a JSON markdown block containing only the extracted values. All fields are required: -\`\`\`json -{ - "symbol": string, // The symbol of the token. - "amount": string, // The amount to transfer as a string. - "toAddress": string, // The recipient's address. - "chainName": string, // The source chain name. - "targetChainName": string // The target chain name. -} -\`\`\` - -Example response for the input: "Make an IBC transfer of 0.0001 ATOM to osmo1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf from cosmoshub to osmosis", the response should be: -\`\`\`json -{ - "symbol": "ATOM", - "amount": "0.0001", - "toAddress": "osmo1pcnw46km8m5amvf7jlk2ks5std75k73aralhcf", - "chainName": "cosmoshub", - "targetChainName": "osmosis" -} -\`\`\` - -Now respond with a JSON markdown block containing only the extracted values. -`; - -export const cosmosIBCSwapTemplate = `Given the recent messages and cosmos wallet information below: -{{recentMessages}} -{{walletInfo}} -Make sure that you extracted latest info about requested swap from recent messages. Espessialy if there was another one placed before. -Also the extracted info MUST match the confirmed by user data in latest prompt in which you asked for confirmation! -Extract the following information about the requested IBC swap: - -1. **fromChainName**: - - Identify the source chain mentioned in the instruction (e.g., cosmoshub, osmosis, axelar). - - Provide this as a string. - -2. **fromTokenSymbol**: - - The symbol must be a string representing the token's display denomination (e.g., "ATOM", "OSMO", etc.). - -3. **fromTokenAmount**: - - Extract only the numeric value from the instruction. - - The value must be a string representing the amount in the display denomination (e.g., "0.0001" for ATOM, OSMO, etc.). Do not include the symbol. - -4. **toChainName**: - - Identify the target chain mentioned in the instruction (e.g., cosmoshub, osmosis, axelar). - - Provide this as a string. - -5. **toTokenSymbol**: - - The symbol must be a string representing the result token's display denomination (e.g., "OM", "ATOM", etc.). - -6. **toTokenDenom**: - - optional parameter, if present must be a string. (uom, uatom, usomo, ibc/53046FFF6CAD109D8F9B2C7C9913853AD241928CD05CDDE419343D176025DA74 or other ibc/ values) - -7. **fromTokenDenom**: - - optional parameter, if present must be a string. (uom, uatom, usomo, ibc/53046FFF6CAD109D8F9B2C7C9913853AD241928CD05CDDE419343D176025DA74 or other ibc/ values) - -Keep in mind that toTokenDenom and fromTokenDenom are optional parameters. - -Respond with a JSON markdown block containing only the extracted values. All fields are required: -\`\`\`json -{ - "fromChainName": string, // Source chain from which tokens will be taken to swap (String). - "fromTokenSymbol": string, // Symbol of token to be swapped (String). - "fromTokenAmount": string, // Amount of tokens to be swapped (String). - "toChainName": string, // Name of chain on which result token is hosted (String). - "toTokenSymbol": string, // Symbol of result token (String). - "fromTokenDenom": string, // denom of token to be swapped (String). Optional, might not be present. - "toTokenDenom": string // denom of result token (String). Optional, might not be present. -} -\`\`\` - -Example response for the input: "Swap {{1}} {{ATOM}} from {{cosmoshub}} to {{OM}} on {{mantrachain}}", the response should be: -\`\`\`json -{ - "fromChainName": "cosmoshub", - "fromTokenSymbol": "ATOM", - "fromTokenAmount": "1", - "fromTokenDenom": null, - "toChainName": "mantrachain", - "toTokenSymbol": "OM", - "toTokenDenom": null -} -\`\`\` - - -Example response for the input: "Swap {{1}} {{ATOM}} with denom {{uatom}} from {{cosmoshub}} to {{OM}} on {{mantrachain}}", the response should be: -\`\`\`json -{ - "fromChainName": "cosmoshub", - "fromTokenSymbol": "ATOM", - "fromTokenAmount": "1", - "fromTokenDenom": "uatom", - "toChainName": "mantrachain", - "toTokenSymbol": "OM", - "fromTokenDenom": null -} -\`\`\` - -Example response for the input: "Swap {{1}} {{ATOM}} with denom {{uatom}} from {{cosmoshub}} to {{OM}} (denom: {{ibc/53046FFF6CAD109D8F9B2C7C9913853AD241928CD05CDDE419343D176025DA74}} ) on {{mantrachain}}", the response should be: -\`\`\`json -{ - "fromChainName": "cosmoshub", - "fromTokenSymbol": "ATOM", - "fromTokenAmount": "1", - "fromTokenDenom": "uatom", - "toChainName": "mantrachain", - "toTokenSymbol": "OM", - "toTokenDenom": "ibc/53046FFF6CAD109D8F9B2C7C9913853AD241928CD05CDDE419343D176025DA74" -} -\`\`\` - -Example response for the input: "Swap {{100}} {{USDC}} with denom {{uusdc}} from {{axelar}} to {{ATOM}} on {{cosmoshub}}", the response should be: -\`\`\`json -{ - "fromChainName": "axelar", - "fromTokenSymbol": "USDC", - "fromTokenAmount": "100", - "fromTokenDenom": "uusdc", - "toChainName": "cosmoshub", - "toTokenSymbol": "ATOM", -} -\`\`\` - -Now respond with a JSON markdown block containing only the extracted values. -`; diff --git a/packages/plugin-cosmos/src/tests/bridge-denom-provider.test.ts b/packages/plugin-cosmos/src/tests/bridge-denom-provider.test.ts deleted file mode 100644 index 622c1dde262a6..0000000000000 --- a/packages/plugin-cosmos/src/tests/bridge-denom-provider.test.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { vi, expect, it, beforeEach, describe } from "vitest"; -import { bridgeDenomProvider } from "../actions/ibc-transfer/services/bridge-denom-provider"; -import { SkipApiAssetsFromSourceFetcher } from "../shared/services/skip-api/assets-from-source-fetcher/skip-api-assets-from-source-fetcher"; - -vi.mock("./bridge-data-fetcher", () => ({ - BridgeDataFetcher: { - getInstance: vi.fn().mockReturnValue({ - fetch: vi.fn(), - }), - }, -})); - -describe("bridgeDataProvider", () => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let mockFetchBridgeData: any; - - beforeEach(() => { - mockFetchBridgeData = vi.fn(); - SkipApiAssetsFromSourceFetcher.getInstance().fetch = - mockFetchBridgeData; - }); - - it("should return correct channelId and ibcDenom when valid data is returned", async () => { - const mockResponse = { - dest_assets: { - osmos: { - assets: [ - { - origin_denom: "atom", - denom: "uatom", - trace: "channel-123/abc", - }, - ], - }, - }, - }; - - mockFetchBridgeData.mockResolvedValue(mockResponse); - - const sourceAssetDenom = "atom"; - const sourceAssetChainId = "cosmos"; - const destinationAdssetChainId = "osmos"; - - const result = await bridgeDenomProvider( - sourceAssetDenom, - sourceAssetChainId, - destinationAdssetChainId - ); - - expect(result).toEqual({ - denom: "uatom", - }); - }); - - it("should throw an error when ibcAssetData is not found", async () => { - const mockResponse = { - dest_assets: { - osmos: { - assets: [ - { - origin_denom: "btc", - denom: "ubtc", - trace: "channel-123/abc", - }, - ], - }, - }, - }; - - mockFetchBridgeData.mockResolvedValue(mockResponse); - - const sourceAssetDenom = "atom"; - const sourceAssetChainId = "cosmos"; - const destinationAdssetChainId = "osmos"; - - await expect( - bridgeDenomProvider( - sourceAssetDenom, - sourceAssetChainId, - destinationAdssetChainId - ) - ).rejects.toThrowError(); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/cosmos-assets.test.ts b/packages/plugin-cosmos/src/tests/cosmos-assets.test.ts deleted file mode 100644 index 83dc7b5c65ace..0000000000000 --- a/packages/plugin-cosmos/src/tests/cosmos-assets.test.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { describe, it, expect } from "vitest"; -import type { Asset, AssetList } from "@chain-registry/types"; -import { getAvailableAssets } from "../shared/helpers/cosmos-assets.ts"; - -describe("getAvailableAssets", () => { - it("should return all assets when no custom assets are provided", () => { - const assets: AssetList[] = [ - { chain_name: "chain1", assets: [] }, - { chain_name: "chain2", assets: [] }, - ]; - const customAssets: AssetList[] = []; - - const result = getAvailableAssets(assets, customAssets); - - expect(result).toEqual(assets); - }); - - it("should include custom assets and exclude duplicates", () => { - const assets: AssetList[] = [ - { chain_name: "chain1", assets: [] }, - { chain_name: "chain2", assets: [] }, - ]; - const customAssets: AssetList[] = [ - { chain_name: "chain2", assets: [{ symbol: "CUS2" } as Asset] }, - { chain_name: "chain3", assets: [{ symbol: "CUS3" } as Asset] }, - ]; - - const expectedResult: AssetList[] = [ - { chain_name: "chain1", assets: [] }, - { chain_name: "chain2", assets: [{ symbol: "CUS2" } as Asset] }, - { chain_name: "chain3", assets: [{ symbol: "CUS3" } as Asset] }, - ]; - - const result = getAvailableAssets(assets, customAssets); - - expect(result).toEqual(expectedResult); - }); - - it("should return only custom assets when no original assets are provided", () => { - const assets: AssetList[] = []; - const customAssets: AssetList[] = [ - { chain_name: "chain1", assets: [{ symbol: "CUS1" } as Asset] }, - { chain_name: "chain2", assets: [{ symbol: "CUS2" } as Asset] }, - ]; - - const result = getAvailableAssets(assets, customAssets); - - expect(result).toEqual(customAssets); - }); - - it("should handle empty inputs gracefully", () => { - const assets: AssetList[] = []; - const customAssets: AssetList[] = []; - - const result = getAvailableAssets(assets, customAssets); - - expect(result).toEqual([]); - }); - - it("should handle undefined customAssets gracefully", () => { - const assets: AssetList[] = [ - { chain_name: "chain1", assets: [] }, - { chain_name: "chain2", assets: [] }, - ]; - const customAssets: AssetList[] | undefined = undefined; - - const result = getAvailableAssets(assets, customAssets ?? []); - - expect(result).toEqual(assets); - }); - - it("should handle undefined assets gracefully", () => { - const assets: AssetList[] | undefined = undefined; - const customAssets: AssetList[] = [ - { chain_name: "chain1", assets: [{ symbol: "CUS1" } as Asset] }, - { chain_name: "chain2", assets: [{ symbol: "CUS2" } as Asset] }, - ]; - - const result = getAvailableAssets(assets ?? [], customAssets); - - expect(result).toEqual(customAssets); - }); - - it("should handle both assets and customAssets as undefined gracefully", () => { - const assets: AssetList[] | undefined = undefined; - const customAssets: AssetList[] | undefined = undefined; - - const result = getAvailableAssets(assets ?? [], customAssets ?? []); - - expect(result).toEqual([]); - }); - - it("should handle assets and customAssets with nested values", () => { - const assets: AssetList[] = [ - { chain_name: "chain1", assets: [{ symbol: "AS1" } as Asset] }, - { chain_name: "chain2", assets: [{ symbol: "AS2" } as Asset] }, - ]; - const customAssets: AssetList[] = [ - { chain_name: "chain2", assets: [{ symbol: "CUS2" } as Asset] }, - { chain_name: "chain3", assets: [{ symbol: "CUS3" } as Asset] }, - ]; - - const expectedResult: AssetList[] = [ - { chain_name: "chain1", assets: [{ symbol: "AS1" } as Asset] }, - { chain_name: "chain2", assets: [{ symbol: "CUS2" } as Asset] }, - { chain_name: "chain3", assets: [{ symbol: "CUS3" } as Asset] }, - ]; - - const result = getAvailableAssets(assets, customAssets); - - expect(result).toEqual(expectedResult); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/cosmos-chains.test.ts b/packages/plugin-cosmos/src/tests/cosmos-chains.test.ts deleted file mode 100644 index ebd755ad49b00..0000000000000 --- a/packages/plugin-cosmos/src/tests/cosmos-chains.test.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { describe, it, expect } from "vitest"; -import type { Chain } from "@chain-registry/types"; -import { getAvailableChains } from "../shared/helpers/cosmos-chains.ts"; - -describe("getAvailableChains", () => { - it("should return all chains when no custom chains are provided", () => { - const chains: Chain[] = [ - { chain_name: "chain1" } as Chain, - { chain_name: "chain2" } as Chain, - ]; - const customChains: Chain[] = []; - - const result = getAvailableChains(chains, customChains); - - expect(result).toEqual(chains); - }); - - it("should include custom chains and exclude duplicates", () => { - const chains: Chain[] = [ - { chain_name: "chain1" } as Chain, - { chain_name: "chain2" } as Chain, - ]; - const customChains: Chain[] = [ - { chain_name: "chain2" } as Chain, - { chain_name: "chain3" } as Chain, - ]; - - const result = getAvailableChains(chains, customChains); - - expect(result).toEqual([ - { chain_name: "chain1" } as Chain, - { chain_name: "chain2" } as Chain, - { chain_name: "chain3" } as Chain, - ]); - }); - - it("should return only custom chains when no original chains are provided", () => { - const chains: Chain[] = []; - const customChains: Chain[] = [ - { chain_name: "chain1" } as Chain, - { chain_name: "chain2" } as Chain, - ]; - - const result = getAvailableChains(chains, customChains); - - expect(result).toEqual(customChains); - }); - - it("should handle empty inputs gracefully", () => { - const chains: Chain[] = []; - const customChains: Chain[] = []; - - const result = getAvailableChains(chains, customChains); - - expect(result).toEqual([]); - }); - - it("should handle undefined customChains gracefully", () => { - const chains: Chain[] = [ - { chain_name: "chain1" } as Chain, - { chain_name: "chain2" } as Chain, - ]; - const customChains: Chain[] | undefined = undefined; - - const result = getAvailableChains(chains, customChains); - - expect(result).toEqual(chains); - }); - - it("should handle undefined chains gracefully", () => { - const chains: Chain[] | undefined = undefined; - const customChains: Chain[] = [ - { chain_name: "chain1" } as Chain, - { chain_name: "chain2" } as Chain, - ]; - - const result = getAvailableChains(chains ?? [], customChains); - - expect(result).toEqual(customChains); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/cosmos-ibc-swap-action-service.test.ts b/packages/plugin-cosmos/src/tests/cosmos-ibc-swap-action-service.test.ts deleted file mode 100644 index 74f64d8716463..0000000000000 --- a/packages/plugin-cosmos/src/tests/cosmos-ibc-swap-action-service.test.ts +++ /dev/null @@ -1,300 +0,0 @@ -import { describe, it, expect, vi, beforeEach, Mock } from "vitest"; -import { IBCSwapAction } from "../actions/ibc-swap/services/ibc-swap-action-service.ts"; -import { HandlerCallback } from "@elizaos/core"; -import { getAssetBySymbol, getDenomBySymbol } from "@chain-registry/utils"; -import { Asset } from "@chain-registry/types"; - -vi.mock("@cosmjs/cosmwasm-stargate", () => ({ - SigningCosmWasmClient: { - connectWithSigner: vi.fn(), - }, -})); - -vi.mock("@chain-registry/utils", () => ({ - getAssetBySymbol: vi.fn(), - getChainByChainName: vi.fn((_, chainName: string) => { - if (chainName === "source-chain") - return { chain_id: "source-chain-id" }; - return { chain_id: "target-chain-id" }; - }), - getChainNameByChainId: vi.fn((_, chainId: string) => { - if (chainId === "source-chain-id") return "source-chain"; - return "target-chain"; - }), - getDenomBySymbol: vi.fn((_, symbol: string) => { - if (symbol === "fromTokenSymbol") return "fromTokenDenom"; - else if (symbol === "toTokenSymbol") return "toTokenDenom"; - }), - getExponentByDenom: vi.fn((_, denom: string) => { - if (denom === "fromTokenDenom") return "6"; - }), - convertDisplayUnitToBaseUnit: vi.fn(() => "1000000"), - getChainByChainId: vi.fn(() => ({ chainId: "target-chain-id" })), -})); -describe("IBCSwapAction", () => { - const mockWalletChains = { - getWalletAddress: vi.fn(), - getSkipClient: vi.fn(), - walletChainsData: {}, - getSigningCosmWasmClient: vi.fn(), - }; - - const mockSkipClient = { - route: vi.fn(), - executeRoute: vi.fn(), - }; - - const params = { - fromChainName: "source-chain", - fromTokenSymbol: "fromTokenSymbol", - fromTokenAmount: "1000", - toChainName: "target-chain", - toTokenSymbol: "toTokenSymbol", - }; - - const _callback: Mock = vi.fn(); - - const customChainAssets = []; - - beforeEach(() => { - vi.clearAllMocks(); - (mockWalletChains.getSkipClient as Mock).mockReturnValue( - mockSkipClient - ); - }); - - it("should complete", async () => { - // Mock wallet addresses - (mockWalletChains.getWalletAddress as Mock) - .mockImplementationOnce(() => "source-chain-address") - .mockImplementationOnce(() => "target-chain-address"); - - // Mock route call, including `requiredChainAddresses` - (mockSkipClient.route as Mock).mockResolvedValue({ - estimatedAmountOut: "123", - estimatedFees: "1", - estimatedRouteDurationSeconds: "1", - requiredChainAddresses: ["source-chain-id", "target-chain-id"], - }); - - // Mock asset symbols - (getAssetBySymbol as Mock).mockImplementation((symbol: string) => { - if (symbol === "fromTokenSymbol") { - return { asset: { base: "fromTokenDenom" } }; - } - if (symbol === "toTokenSymbol") { - return { asset: { base: "toTokenDenom" } }; - } - return null; - }); - - // Mock `executeRoute` to simulate transaction completion - (mockSkipClient.executeRoute as Mock).mockImplementation( - ({ onTransactionCompleted }) => { - onTransactionCompleted("target-chain-id", "mockTxHash", { - state: "success", - }); - } - ); - - const ibcSwapAction = new IBCSwapAction(mockWalletChains); - - // Execute the action - const result = await ibcSwapAction.execute( - params, - customChainAssets, - _callback - ); - - // Validate the route call - expect(mockSkipClient.route).toHaveBeenCalledWith({ - smartSwapOptions: {}, - amountOut: "1000000", - sourceAssetDenom: "fromTokenDenom", - sourceAssetChainID: "source-chain-id", - destAssetDenom: "toTokenDenom", - destAssetChainID: "target-chain-id", - }); - - // Validate the callback - expect(_callback).toHaveBeenCalledWith({ - text: `Expected swap result: 123 ${params.toTokenSymbol}, \nEstimated Fee: 1. \nEstimated time: 1`, - }); - - // Validate the final result - expect(result).toEqual({ - fromChainName: params.fromChainName, - fromTokenAmount: params.fromTokenAmount, - fromTokenSymbol: params.fromTokenSymbol, - toChainName: params.toChainName, - toTokenSymbol: params.toTokenSymbol, - txHash: "mockTxHash", - status: "success", - }); - }); - - it("should throw an error if route fails", async () => { - // Mock route failure - (mockSkipClient.route as Mock).mockRejectedValue( - new Error("Route failed") - ); - - const ibcSwapAction = new IBCSwapAction(mockWalletChains); - - await expect( - ibcSwapAction.execute(params, customChainAssets, _callback) - ).rejects.toThrow("Route failed"); - }); - - it("should handle transaction failure during execution", async () => { - // Mock successful route call - (mockSkipClient.route as Mock).mockResolvedValue({ - estimatedAmountOut: "123", - estimatedFees: "1", - estimatedRouteDurationSeconds: "1", - requiredChainAddresses: ["source-chain-id", "target-chain-id"], - }); - - // Mock transaction failure - (mockSkipClient.executeRoute as Mock).mockImplementation( - ({ onTransactionCompleted }) => { - onTransactionCompleted("target-chain-id", "mockTxHash", { - state: "failure", - }); - } - ); - - const ibcSwapAction = new IBCSwapAction(mockWalletChains); - - const result = await ibcSwapAction.execute( - params, - customChainAssets, - _callback - ); - - // Validate the final result - expect(result).toEqual({ - status: "failure", - fromChainName: params.fromChainName, - fromTokenAmount: params.fromTokenAmount, - fromTokenSymbol: params.fromTokenSymbol, - toChainName: params.toChainName, - toTokenSymbol: params.toTokenSymbol, - txHash: "mockTxHash", - }); - - }); - - it("should complete without callback", async () => { - // Mock wallet addresses - (mockWalletChains.getWalletAddress as Mock) - .mockImplementationOnce(() => "source-chain-address") - .mockImplementationOnce(() => "target-chain-address"); - - // Mock route call - (mockSkipClient.route as Mock).mockResolvedValue({ - estimatedAmountOut: "123", - estimatedFees: "1", - estimatedRouteDurationSeconds: "1", - requiredChainAddresses: ["source-chain-id", "target-chain-id"], - }); - - // Mock transaction completion - (mockSkipClient.executeRoute as Mock).mockImplementation( - ({ onTransactionCompleted }) => { - onTransactionCompleted("target-chain-id", "mockTxHash", { - state: "success", - }); - } - ); - - const ibcSwapAction = new IBCSwapAction(mockWalletChains); - - // Execute without callback - const result = await ibcSwapAction.execute(params, customChainAssets); - - expect(result).toEqual({ - "status": "success", - fromChainName: params.fromChainName, - fromTokenAmount: params.fromTokenAmount, - fromTokenSymbol: params.fromTokenSymbol, - toChainName: params.toChainName, - toTokenSymbol: params.toTokenSymbol, - txHash: "mockTxHash", - }); - }); - - it("should use custom chain assets when provided", async () => { - const customAssets = [ - { - chain_name: "source-chain", - assets: [ - { - symbol: "fromTokenSymbol", - denom: "customFromDenom", - } as unknown as Asset, - ], - }, - { - chain_name: "target-chain", - assets: [ - { - symbol: "toTokenSymbol", - denom: "customToDenom", - } as unknown as Asset, - ], - }, - ]; - - (getDenomBySymbol as Mock).mockImplementation((assets, symbol) => { - if (symbol === "fromTokenSymbol") return "customFromDenom"; - if (symbol === "toTokenSymbol") return "customToDenom"; - }); - - // Mock route call - (mockSkipClient.route as Mock).mockResolvedValue({ - estimatedAmountOut: "123", - estimatedFees: "1", - estimatedRouteDurationSeconds: "1", - requiredChainAddresses: ["source-chain-id", "target-chain-id"], - }); - - // Mock transaction completion - (mockSkipClient.executeRoute as Mock).mockImplementation( - ({ onTransactionCompleted }) => { - onTransactionCompleted("target-chain-id", "mockTxHash", { - state: "success", - }); - } - ); - - const ibcSwapAction = new IBCSwapAction(mockWalletChains); - - const result = await ibcSwapAction.execute( - params, - customAssets, - _callback - ); - - expect(result).toEqual({ - "status": "success", - fromChainName: params.fromChainName, - fromTokenAmount: params.fromTokenAmount, - fromTokenSymbol: params.fromTokenSymbol, - toChainName: params.toChainName, - toTokenSymbol: params.toTokenSymbol, - txHash: "mockTxHash", - }); - - expect(getDenomBySymbol).toHaveBeenCalledWith( - expect.anything(), - "fromTokenSymbol", - "source-chain" - ); - expect(getDenomBySymbol).toHaveBeenCalledWith( - expect.anything(), - "toTokenSymbol", - "target-chain" - ); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/cosmos-ibc-transfer-action-service.test.ts b/packages/plugin-cosmos/src/tests/cosmos-ibc-transfer-action-service.test.ts deleted file mode 100644 index 6912287cf92b2..0000000000000 --- a/packages/plugin-cosmos/src/tests/cosmos-ibc-transfer-action-service.test.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from "vitest"; -import { IBCTransferAction } from "../actions/ibc-transfer/services/ibc-transfer-action-service"; -import { assets } from "chain-registry"; -import * as CosmosAssetsHelpers from "../shared/helpers/cosmos-assets"; -import { getAssetBySymbol } from "@chain-registry/utils"; -import { getAvailableAssets } from "../shared/helpers/cosmos-assets"; - -vi.mock("@chain-registry/utils", () => ({ - getAssetBySymbol: vi.fn(), - getChainByChainName: vi.fn((_, chainName: string) => { - if (chainName === "test-chain") return { chain_id: "source-chain-id" }; - return { chain_id: "target-chain-id" }; - }), - convertDisplayUnitToBaseUnit: vi.fn(() => "1"), - getChainByChainId: vi.fn(() => ({ chainId: "target-chain-id" })), -})); - -vi.mock("../shared/helpers/cosmos-assets", () => ({ - getAvailableAssets: vi.fn(), -})); - -describe("IBCTransferAction", () => { - const mockWalletChains = { - getWalletAddress: vi.fn(), - getSkipClient: vi.fn(), - }; - - const mockBridgeDenomProvider = vi.fn(); - const mockSkipClient = { - route: vi.fn(), - executeRoute: vi.fn(), - }; - - const params = { - chainName: "test-chain", - targetChainName: "target-chain", - symbol: "ATOM", - amount: "10", - toAddress: "cosmos1receiveraddress", - }; - - const customChainAssets = []; - - beforeEach(() => { - vi.clearAllMocks(); - mockWalletChains.getSkipClient.mockReturnValue(mockSkipClient); - }); - - it("throws an error if sender address is not available", async () => { - mockWalletChains.getWalletAddress.mockResolvedValue(null); - // @ts-expect-error --- ... - const ibcTransferAction = new IBCTransferAction(mockWalletChains); - - await expect( - ibcTransferAction.execute( - params, - mockBridgeDenomProvider, - customChainAssets - ) - ).rejects.toThrow( - `Cannot get wallet address for chain ${params.chainName}` - ); - }); - - it("throws an error if receiver address is missing", async () => { - const invalidParams = { ...params, toAddress: undefined }; - mockWalletChains.getWalletAddress.mockResolvedValue( - "cosmos1senderaddress" - ); - // @ts-expect-error --- ... - const ibcTransferAction = new IBCTransferAction(mockWalletChains); - - await expect( - ibcTransferAction.execute( - invalidParams, - mockBridgeDenomProvider, - customChainAssets - ) - ).rejects.toThrow("No receiver address"); - }); - - it("throws an error if target chain name is missing", async () => { - const invalidParams = { ...params, targetChainName: undefined }; - mockWalletChains.getWalletAddress.mockResolvedValue( - "cosmos1senderaddress" - ); - // @ts-expect-error --- ... - const ibcTransferAction = new IBCTransferAction(mockWalletChains); - - await expect( - ibcTransferAction.execute( - invalidParams, - mockBridgeDenomProvider, - customChainAssets - ) - ).rejects.toThrow("No target chain name"); - }); - - it("throws an error if symbol is missing", async () => { - const invalidParams = { ...params, symbol: undefined }; - mockWalletChains.getWalletAddress.mockResolvedValue( - "cosmos1senderaddress" - ); - // @ts-expect-error --- ... - const ibcTransferAction = new IBCTransferAction(mockWalletChains); - - await expect( - ibcTransferAction.execute( - invalidParams, - mockBridgeDenomProvider, - customChainAssets - ) - ).rejects.toThrow("No symbol"); - }); - - it("throws an error if asset cannot be found", async () => { - mockWalletChains.getWalletAddress.mockResolvedValue( - "cosmos1senderaddress" - ); - - vi.spyOn(CosmosAssetsHelpers, "getAvailableAssets").mockReturnValue([]); - // @ts-expect-error --- ... - getAssetBySymbol.mockReturnValue({ - base: null, - }); - - // @ts-expect-error --- ... - const ibcTransferAction = new IBCTransferAction(mockWalletChains); - - await expect( - ibcTransferAction.execute( - params, - mockBridgeDenomProvider, - customChainAssets - ) - ).rejects.toThrow("Cannot find asset"); - }); - - it("executes the IBC transfer successfully", async () => { - const senderAddress = "cosmos1senderaddress"; - const targetChainId = "target-chain-id"; - const sourceChainId = "source-chain-id"; - const mockTxHash = "mock_tx_hash_123"; - - mockWalletChains.getWalletAddress.mockResolvedValue(senderAddress); - // @ts-expect-error --- ... - getAvailableAssets.mockReturnValue(assets); - - // @ts-expect-error --- ... - getAssetBySymbol.mockReturnValue({ - base: "uatom", - }); - const params = { - chainName: "test-chain", - targetChainName: "target-chain", - symbol: "ATOM", - amount: "10", - toAddress: "cosmos1receiveraddress", - }; - - mockBridgeDenomProvider.mockResolvedValue({ denom: "uatom" }); - mockSkipClient.route.mockResolvedValue({ - requiredChainAddresses: [sourceChainId, targetChainId], - }); - mockSkipClient.executeRoute.mockImplementation(async ({ onTransactionCompleted }) => { - await onTransactionCompleted(null, mockTxHash); - }); - - // @ts-expect-error --- ... - const ibcTransferAction = new IBCTransferAction(mockWalletChains); - - const result = await ibcTransferAction.execute( - params, - mockBridgeDenomProvider, - customChainAssets - ); - - expect(result).toEqual({ - from: senderAddress, - to: params.toAddress, - txHash: mockTxHash, - }); - expect(mockSkipClient.executeRoute).toHaveBeenCalled(); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/cosmos-transaction-fee-estimator.test.ts b/packages/plugin-cosmos/src/tests/cosmos-transaction-fee-estimator.test.ts deleted file mode 100644 index 17c891a17405d..0000000000000 --- a/packages/plugin-cosmos/src/tests/cosmos-transaction-fee-estimator.test.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { describe, it, expect, vi, beforeEach, type Mock } from "vitest"; -import type { SigningCosmWasmClient } from "@cosmjs/cosmwasm-stargate"; -import { CosmosTransactionFeeEstimator } from "../shared/services/cosmos-transaction-fee-estimator"; - -vi.mock("@cosmjs/cosmwasm-stargate", () => ({ - SigningCosmWasmClient: { - simulate: vi.fn(), - }, -})); - -vi.mock("../shared/helpers/cosmos-messages", () => ({ - generateIbcTransferMessage: vi.fn(), -})); - -describe("FeeEstimator", () => { - let mockSigningCosmWasmClient: SigningCosmWasmClient; - - beforeEach(() => { - mockSigningCosmWasmClient = { - simulate: vi.fn(), - } as unknown as SigningCosmWasmClient; - - vi.clearAllMocks(); - }); - - it("should estimate gas for sending tokens successfully", async () => { - const mockGasEstimation = 200000; - - (mockSigningCosmWasmClient.simulate as Mock).mockResolvedValue( - mockGasEstimation - ); - - const senderAddress = "cosmos1senderaddress"; - const recipientAddress = "cosmos1recipientaddress"; - const amount = [{ denom: "uatom", amount: "1000000" }]; - const memo = "Test memo"; - - const estimatedGas = - await CosmosTransactionFeeEstimator.estimateGasForCoinTransfer( - mockSigningCosmWasmClient, - senderAddress, - recipientAddress, - amount, - memo - ); - - // Add 20% to the estimated gas to make sure we have enough gas to cover the transaction - expect(estimatedGas).toBe(mockGasEstimation + mockGasEstimation * 0.2); - expect(mockSigningCosmWasmClient.simulate).toHaveBeenCalledWith( - senderAddress, - [ - { - typeUrl: "/cosmos.bank.v1beta1.MsgSend", - value: { - fromAddress: senderAddress, - toAddress: recipientAddress, - amount: [...amount], - }, - }, - ], - memo - ); - }); - - it("should throw an error if gas estimation fails", async () => { - (mockSigningCosmWasmClient.simulate as Mock).mockRejectedValue( - new Error("Gas estimation failed") - ); - - const senderAddress = "cosmos1senderaddress"; - const recipientAddress = "cosmos1recipientaddress"; - const amount = [{ denom: "uatom", amount: "1000000" }]; - - await expect( - CosmosTransactionFeeEstimator.estimateGasForCoinTransfer( - mockSigningCosmWasmClient, - senderAddress, - recipientAddress, - amount - ) - ).rejects.toThrow("Gas estimation failed"); - - expect(mockSigningCosmWasmClient.simulate).toHaveBeenCalledWith( - senderAddress, - [ - { - typeUrl: "/cosmos.bank.v1beta1.MsgSend", - value: { - fromAddress: senderAddress, - toAddress: recipientAddress, - amount: [...amount], - }, - }, - ], - "" - ); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/cosmos-transfer-action-service.test.ts b/packages/plugin-cosmos/src/tests/cosmos-transfer-action-service.test.ts deleted file mode 100644 index ff39235235921..0000000000000 --- a/packages/plugin-cosmos/src/tests/cosmos-transfer-action-service.test.ts +++ /dev/null @@ -1,189 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from "vitest"; -import { CosmosTransferActionService } from "../actions/transfer/services/cosmos-transfer-action-service.ts"; -import type { AssetList } from "@chain-registry/types"; - -vi.mock("@cosmjs/cosmwasm-stargate", () => ({ - SigningCosmWasmClient: { - connectWithSigner: vi.fn(), - }, -})); - -vi.mock("@chain-registry/utils", () => ({ - getAssetBySymbol: vi.fn().mockResolvedValue({ base: "uom" }), - convertDisplayUnitToBaseUnit: vi.fn().mockResolvedValue("OM"), -})); - -vi.mock("../shared/services/cosmos-transaction-fee-estimator.ts", () => ({ - CosmosTransactionFeeEstimator: { - estimateGasForCoinTransfer: vi.fn().mockResolvedValue(1000), - }, -})); - -vi.mock("../shared/helpers/cosmos-transaction-receipt.ts", () => ({ - getPaidFeeFromReceipt: vi.fn().mockReturnValue(1000), -})); - -vi.mock("../../../shared/helpers/cosmos-assets.ts", () => ({ - getAvailableAssets: vi.fn().mockResolvedValue([] as unknown as AssetList[]), -})); - -describe("CosmosTransferActionService", () => { - describe("Execute", () => { - const mockSigningCosmWasmClient = { - sendTokens: vi.fn().mockResolvedValue({ - transactionHash: "mockTxHash", - }), - }; - - const mockCosmosWalletChains = { - walletChainsData: {}, - getWalletAddress: vi.fn().mockReturnValue("senderAddress"), - getSigningCosmWasmClient: vi - .fn() - .mockReturnValue(mockSigningCosmWasmClient), - }; - - beforeEach(() => { - vi.clearAllMocks(); - }); - - it("should handle transfer successfully without custom chain assets passed", async () => { - const mockCosmosTransferParams = { - chainName: "test", - symbol: "ts", - amount: "1234", - toAddress: "receiverAddress", - }; - - const cosmosTransferActionService = new CosmosTransferActionService( - mockCosmosWalletChains - ); - - const expectedResult = { - from: "senderAddress", - to: "receiverAddress", - gasPaid: 1000, - txHash: "mockTxHash", - }; - - await expect( - cosmosTransferActionService.execute(mockCosmosTransferParams) - ).resolves.toEqual(expectedResult); - }); - - it("should handle transfer successfully with custom chain assets passed", async () => { - const mockCosmosTransferParams = { - chainName: "test", - symbol: "ts", - amount: "1234", - toAddress: "receiverAddress", - }; - - const mockCustomChainAssets: AssetList[] = [ - { - chain_name: "cosmos", - assets: [ - { - denom_units: [{ denom: "ucustom", exponent: 0 }], - base: "ucustom", - symbol: "CUS", - display: "custom", - type_asset: "unknown", - name: "asset", - }, - ], - }, - ]; - - const cosmosTransferActionService = new CosmosTransferActionService( - mockCosmosWalletChains - ); - - const expectedResult = { - from: "senderAddress", - to: "receiverAddress", - gasPaid: 1000, - txHash: "mockTxHash", - }; - - await expect( - cosmosTransferActionService.execute( - mockCosmosTransferParams, - mockCustomChainAssets - ) - ).resolves.toEqual(expectedResult); - }); - - it("should throw an error if no receiver address is provided", async () => { - const mockCosmosTransferParams = { - chainName: "test", - symbol: "ts", - amount: "1234", - }; - - const cosmosTransferActionService = new CosmosTransferActionService( - mockCosmosWalletChains - ); - - await expect( - cosmosTransferActionService.execute(mockCosmosTransferParams) - ).rejects.toThrow("No receiver address"); - }); - - it("should throw an error if no symbol is provided", async () => { - const mockCosmosTransferParams = { - chainName: "test", - amount: "1234", - toAddress: "address", - }; - - const cosmosTransferActionService = new CosmosTransferActionService( - mockCosmosWalletChains - ); - - await expect( - cosmosTransferActionService.execute(mockCosmosTransferParams) - ).rejects.toThrow("No symbol"); - }); - - it("should throw an error if transfer fails", async () => { - const mockCosmosTransferParams = { - chainName: "test", - symbol: "ts", - amount: "1234", - toAddress: "receiverAddress", - }; - - mockSigningCosmWasmClient.sendTokens.mockImplementation(() => { - throw new Error("Transaction Failed"); - }); - - const cosmosTransferActionService = new CosmosTransferActionService( - mockCosmosWalletChains - ); - - await expect( - cosmosTransferActionService.execute(mockCosmosTransferParams) - ).rejects.toThrow("Transaction Failed"); - }); - - it("should throw an error invalid chain name is provided", async () => { - const mockCosmosTransferParams = { - chainName: "test", - symbol: "ts", - amount: "1234", - toAddress: "address", - }; - - mockCosmosWalletChains.getWalletAddress.mockResolvedValue(null); - - const cosmosTransferActionService = new CosmosTransferActionService( - mockCosmosWalletChains - ); - - await expect( - cosmosTransferActionService.execute(mockCosmosTransferParams) - ).rejects.toThrow("Cannot get wallet address for chain"); - }); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/cosmos-wallet-chains-data.test.ts b/packages/plugin-cosmos/src/tests/cosmos-wallet-chains-data.test.ts deleted file mode 100644 index ce9215e6a3935..0000000000000 --- a/packages/plugin-cosmos/src/tests/cosmos-wallet-chains-data.test.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { vi, expect, it, describe, beforeEach, type Mock } from "vitest"; -import type { Chain } from "@chain-registry/types"; -import { getChainByChainName } from "@chain-registry/utils"; -import { CosmosWallet } from "../shared/entities/cosmos-wallet.ts"; -import { getAvailableChains } from "../shared/helpers/cosmos-chains.ts"; -import { SigningCosmWasmClient } from "@cosmjs/cosmwasm-stargate"; -import { CosmosWalletChains } from "../shared/entities/cosmos-wallet-chains-data.ts"; - -vi.mock("@chain-registry/utils", () => ({ - getChainByChainName: vi.fn(), - getAvailableChains: vi.fn(), -})); - -vi.mock("@cosmjs/cosmwasm-stargate", () => ({ - SigningCosmWasmClient: { - connectWithSigner: vi.fn(), - }, -})); - -vi.mock("@skip-go/client", () => ({ - SkipClient: vi.fn(() => ({})), -})); - -vi.mock("../shared/entities/cosmos-wallet.ts", () => ({ - CosmosWallet: { - create: vi.fn(), - }, -})); - -vi.mock("../shared/helpers/cosmos-chains.ts", () => { - return { - getAvailableChains: vi.fn(), - }; -}); - -describe("CosmosWalletChains", () => { - let mockMnemonic: string; - let mockChains: Chain[]; - - beforeEach(() => { - vi.clearAllMocks(); - - mockMnemonic = "test mnemonic"; - - mockChains = [ - { - name: "chain1", - bech32_prefix: "cosmos", - apis: { - rpc: [ - { - address: "mockedRpcAddress", - }, - ], - }, - } as unknown as Chain, - ]; - }); - - it("should create a CosmosWalletChains instance", async () => { - vi.mocked(getAvailableChains).mockReturnValue(mockChains); - vi.mocked(getChainByChainName).mockReturnValue(mockChains[0]); - - const mockCosmosWalletCreate = { - directSecp256k1HdWallet: {}, - getWalletAddress: vi.fn().mockResolvedValue("mockedAddress"), - getWalletBalances: vi.fn(), - }; - - (CosmosWallet.create as Mock).mockResolvedValue(mockCosmosWalletCreate); - - (SigningCosmWasmClient.connectWithSigner as Mock).mockResolvedValue({}); - - const availableChains = ["chain1"]; - - const expectedResult = { - walletChainsData: { - chain1: { - wallet: mockCosmosWalletCreate, - signingCosmWasmClient: {}, - skipClient: {}, - }, - }, - }; - - const result = await CosmosWalletChains.create( - mockMnemonic, - availableChains - ); - - expect(result).toEqual(expectedResult); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/ibc-swap-utils.test.ts b/packages/plugin-cosmos/src/tests/ibc-swap-utils.test.ts deleted file mode 100644 index d731c3d1b9606..0000000000000 --- a/packages/plugin-cosmos/src/tests/ibc-swap-utils.test.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { describe, it, expect, vi } from "vitest"; - -import { prepareAmbiguityErrorMessage } from "../actions/ibc-swap/services/ibc-swap-utils.ts"; - -vi.mock("chain-registry", () => ({ - assets: [ - { - chain_name: "test-chain", - assets: [ - { - symbol: "ATOM", - description: "Cosmos Hub token", - base: "atom-base", - }, - { - symbol: "ATOM", - description: "Wrapped Cosmos token", - base: "wrapped-atom-base", - }, - ], - }, - ], -})); - -describe("Utility Functions Tests", () => { - describe("prepareAmbiguityErrorMessage", () => { - it("should return an error message for ambiguous assets", () => { - const result = prepareAmbiguityErrorMessage("ATOM", "test-chain"); - - expect(result).toContain("Error occured. Swap was not performed."); - expect(result).toContain("ATOM"); - expect(result).toContain("test-chain"); - expect(result).toContain( - "Symbol: ATOM Desc: Cosmos Hub token Denom: atom-base" - ); - expect(result).toContain( - "Symbol: ATOM Desc: Wrapped Cosmos token Denom: wrapped-atom-base" - ); - }); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/paid-fee.test.ts b/packages/plugin-cosmos/src/tests/paid-fee.test.ts deleted file mode 100644 index 2fa3f0dfecc1e..0000000000000 --- a/packages/plugin-cosmos/src/tests/paid-fee.test.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { describe, it, expect } from "vitest"; -import type { - DeliverTxResponse, - ExecuteResult, -} from "@cosmjs/cosmwasm-stargate"; -import { getPaidFeeFromReceipt } from "../shared/helpers/cosmos-transaction-receipt"; - -describe("PaidFee", () => { - describe("getPaidFeeFromReceipt", () => { - it("should return the correct fee from a matching event", () => { - const receipt: ExecuteResult = { - logs: [], - transactionHash: "", - events: [ - { - type: "fee_pay", - attributes: [ - { key: "fee", value: "100uatom" }, - { key: "other_key", value: "200" }, - ], - }, - { - type: "tip_refund", - attributes: [{ key: "tip", value: "50uatom" }], - }, - ], - height: 0, - gasUsed: BigInt(0), - gasWanted: BigInt(0), - }; - - const result = getPaidFeeFromReceipt(receipt); - - expect(result).toBe(150); - }); - - it("should return 0 if no matching events are present", () => { - const receipt: DeliverTxResponse = { - height: 0, - transactionHash: "", - gasUsed: BigInt(0), - gasWanted: BigInt(0), - code: 0, - events: [ - { - type: "unrelated_event", - attributes: [{ key: "some_key", value: "123" }], - }, - ], - rawLog: "", - msgResponses: [], - txIndex: 0, - }; - - const result = getPaidFeeFromReceipt(receipt); - - expect(result).toBe(0); - }); - - it("should ignore invalid number values", () => { - const receipt: ExecuteResult = { - logs: [], - transactionHash: "", - events: [ - { - type: "fee_pay", - attributes: [ - { key: "fee", value: "invalid_value" }, - { key: "fee", value: "200uatom" }, - ], - }, - ], - height: 0, - gasUsed: BigInt(0), - gasWanted: BigInt(0), - }; - - const result = getPaidFeeFromReceipt(receipt); - - expect(result).toBe(200); - }); - - it("should handle an empty receipt gracefully", () => { - const receipt: DeliverTxResponse = { - height: 0, - transactionHash: "", - gasUsed: BigInt(0), - gasWanted: BigInt(0), - code: 0, - events: [], - rawLog: "", - msgResponses: [], - txIndex: 0, - }; - - const result = getPaidFeeFromReceipt(receipt); - - expect(result).toBe(0); - }); - }); -}); diff --git a/packages/plugin-cosmos/src/tests/skip-api-assets-from-source-fetcher.test.ts b/packages/plugin-cosmos/src/tests/skip-api-assets-from-source-fetcher.test.ts deleted file mode 100644 index fe69a7ad63d14..0000000000000 --- a/packages/plugin-cosmos/src/tests/skip-api-assets-from-source-fetcher.test.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from "vitest"; -import axios from "axios"; -import { SkipApiAssetsFromSourceFetcher } from "../shared/services/skip-api/assets-from-source-fetcher/skip-api-assets-from-source-fetcher"; - -vi.mock("axios"); - -describe("SkipApiAssetsFromSourceFetcher", () => { - let fetcher: SkipApiAssetsFromSourceFetcher; - - beforeEach(() => { - fetcher = SkipApiAssetsFromSourceFetcher.getInstance(); - vi.clearAllMocks(); - }); - - it("should return the same instance from getInstance", () => { - const fetcher1 = SkipApiAssetsFromSourceFetcher.getInstance(); - const fetcher2 = SkipApiAssetsFromSourceFetcher.getInstance(); - expect(fetcher1).toBe(fetcher2); - }); - - it("should use cache when data is already fetched", async () => { - const mockResponse = { - dest_assets: { - someKey: { - assets: [ - { - denom: "atom", - chain_id: "cosmos", - origin_denom: "atom", - origin_chain_id: "cosmos", - trace: "someTrace", - symbol: "ATOM", - name: "Cosmos Atom", - logo_uri: "http://someurl.com/logo.png", - decimals: 6, - recommended_symbol: "ATOM", - }, - ], - }, - }, - }; - - // @ts-expect-error -- ... - axios.post.mockResolvedValueOnce({ data: mockResponse }); - - const sourceAssetDenom = "atom"; - const sourceAssetChainId = "cosmos"; - - await fetcher.fetch(sourceAssetDenom, sourceAssetChainId); - - expect(axios.post).toHaveBeenCalledTimes(1); - - await fetcher.fetch(sourceAssetDenom, sourceAssetChainId); - expect(axios.post).toHaveBeenCalledTimes(1); - }); - - it("should fetch and cache data correctly", async () => { - const mockResponse = { - dest_assets: { - someKey: { - assets: [ - { - denom: "atom", - chain_id: "cosmos", - origin_denom: "atom", - origin_chain_id: "cosmos", - trace: "someTrace", - symbol: "ATOM", - name: "Cosmos Atom", - logo_uri: "http://someurl.com/logo.png", - decimals: 6, - recommended_symbol: "ATOM", - }, - ], - }, - }, - }; - - // @ts-expect-error -- ... - axios.post.mockResolvedValueOnce({ data: mockResponse }); - - const sourceAssetDenom = "atom"; - const sourceAssetChainId = "cosmos"; - - const result = await fetcher.fetch( - sourceAssetDenom, - sourceAssetChainId - ); - - expect(result).toEqual(mockResponse); - - const cacheKey = `${sourceAssetDenom}_${sourceAssetChainId}`; - expect(fetcher["cache"].has(cacheKey)).toBe(true); - - const cachedResult = await fetcher.fetch( - sourceAssetDenom, - sourceAssetChainId - ); - expect(cachedResult).toEqual(mockResponse); - }); -}); diff --git a/packages/plugin-cosmos/tsconfig.json b/packages/plugin-cosmos/tsconfig.json deleted file mode 100644 index 2d8d3fe8181fb..0000000000000 --- a/packages/plugin-cosmos/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src", - "typeRoots": [ - "./node_modules/@types", - "./src/types" - ], - "declaration": true - }, - "include": [ - "src" - ] -} \ No newline at end of file diff --git a/packages/plugin-cosmos/tsup.config.ts b/packages/plugin-cosmos/tsup.config.ts deleted file mode 100644 index 12d9ae64f9695..0000000000000 --- a/packages/plugin-cosmos/tsup.config.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "chain-registry", - "bignumber.js", - "@cosmjs/stargate", - "@cosmjs/proto-signing", - "@cosmjs/cosmwasm-stargate", - "zod", - ], -}); diff --git a/packages/plugin-cronos/README.md b/packages/plugin-cronos/README.md deleted file mode 100644 index 4d07b0cc5d561..0000000000000 --- a/packages/plugin-cronos/README.md +++ /dev/null @@ -1,257 +0,0 @@ -# @elizaos/plugin-cronos - -Cronos plugin for Eliza, extending the EVM plugin functionality. - -## Supported Networks - -### Mainnet -- Cronos Mainnet (Chain ID: 25) - - RPC Endpoint: https://evm.cronos.org/ - - Explorer: https://explorer.cronos.org/ - - Native Token: CRO - -### Testnet -- Cronos Testnet 3 (Chain ID: 338) - - RPC Endpoint: https://evm-t3.cronos.org/ - - Explorer: https://cronos.org/explorer/testnet3 - - Native Token: TCRO - -## Installation - -```bash -pnpm add @elizaos/plugin-cronos -``` - -## Usage - -### Basic Setup -```typescript -import { cronosPlugin } from "@elizaos/plugin-cronos"; - -// Use the plugin in your Eliza configuration -const config = { - plugins: [cronosPlugin], - // ... rest of your config -}; -``` - -### Character Configuration Guide - -Create a `your-character.character.json` file with the following structure: - -```json -{ - "name": "YourCharacterName", - "plugins": ["@elizaos/plugin-cronos"], - "clients": ["telegram"], - "modelProvider": "openai", - "settings": { - "secrets": {}, - "chains": { - "evm": ["cronos", "cronosTestnet"] - } - }, - "system": "Primary function is to execute token transfers and check balances on Cronos chain.", - "actions": { - "SEND_TOKEN": { - "enabled": true, - "priority": 1, - "force": true, - "schema": { - "type": "object", - "properties": { - "fromChain": { - "type": "string", - "description": "The chain to execute the transfer on", - "enum": ["cronos", "cronosTestnet"] - }, - "toAddress": { - "type": "string", - "description": "The recipient's wallet address", - "pattern": "^0x[a-fA-F0-9]{40}$" - }, - "amount": { - "type": "string", - "description": "The amount of tokens to transfer", - "pattern": "^[0-9]*(\\.[0-9]+)?$" - } - }, - "required": ["fromChain", "toAddress", "amount"] - }, - "triggers": [ - "send * CRO to *", - "transfer * CRO to *" - ], - "examples": [ - { - "input": "Send 0.1 CRO to 0x...", - "output": { - "fromChain": "cronos", - "toAddress": "0x...", - "amount": "0.1" - } - } - ] - }, - "CHECK_BALANCE": { - "enabled": true, - "priority": 1, - "force": true, - "schema": { - "type": "object", - "properties": { - "chain": { - "type": "string", - "description": "The chain to check balance on", - "enum": ["cronos", "cronosTestnet"] - } - }, - "required": ["chain"] - }, - "triggers": [ - "check balance", - "show balance", - "what's my balance", - "how much CRO do I have", - "check balance on *", - "show balance on *" - ], - "examples": [ - { - "input": "check balance", - "output": { - "chain": "cronos" - } - }, - { - "input": "what's my balance on testnet", - "output": { - "chain": "cronosTestnet" - } - } - ] - } - }, - "messageExamples": [ - [ - { - "user": "{{user1}}", - "content": { - "text": "Send 100 CRO to 0x..." - } - }, - { - "user": "YourCharacterName", - "content": { - "text": "Processing token transfer...", - "action": "SEND_TOKEN" - } - } - ], - [ - { - "user": "{{user1}}", - "content": { - "text": "What's my balance?" - } - }, - { - "user": "YourCharacterName", - "content": { - "text": "Checking your balance...", - "action": "CHECK_BALANCE" - } - } - ] - ] -} -``` - -#### Key Configuration Fields: - -1. **Basic Setup** - - `name`: Your character's name - - `plugins`: Include `@elizaos/plugin-cronos` - - `clients`: Supported client platforms - -2. **Chain Settings** - - Configure both mainnet and testnet in `settings.chains.evm` - - Available options: `"cronos"` (mainnet) and `"cronosTestnet"` - -3. **Action Configuration** - - `SEND_TOKEN`: Action for token transfers - - `CHECK_BALANCE`: Action for checking wallet balance - - `schema`: Defines the required parameters for each action - - `triggers`: Phrases that activate the actions - - `examples`: Sample inputs and outputs - -4. **Message Examples** - - Provide example interactions - - Show how actions are triggered - - Demonstrate expected responses - -### Action Examples -``` -// Send tokens on mainnet -"Send 0.1 CRO to 0x..." use mainnet - -// Send tokens on testnet -"Send 0.1 TCRO to 0x..." use testnet - -// Check balance on mainnet -"check balance" -"what's my balance" -"how much CRO do I have" - -// Check balance on testnet -"check balance on testnet" -"what's my balance on testnet" -``` - -## Features - -- All standard EVM functionality inherited from @elizaos/plugin-evm -- Preconfigured for both Cronos Mainnet and Testnet -- Native CRO/TCRO token support -- Automated token transfer actions -- Balance checking functionality -- Built-in chain configuration - -## Environment Variables - -Required environment variable for transactions: - -```env -# Wallet private key (Required, must start with 0x) -CRONOS_PRIVATE_KEY=0x... -``` - -### Security Warnings ⚠️ - -- **NEVER** commit private keys to version control -- **NEVER** share private keys with anyone -- **ALWAYS** use environment variables or secure key management -- Use separate keys for mainnet and testnet -- Monitor your wallet for unauthorized transactions - -### Setup - -1. Create `.env` file: -```env -CRONOS_PRIVATE_KEY=0x... # Mainnet -``` - -2. For testnet development, use `.env.local`: -```env -CRONOS_PRIVATE_KEY=0x... # Testnet only -``` - -3. Add to `.gitignore`: -``` -.env -.env.* -``` - -## License - -MIT \ No newline at end of file diff --git a/packages/plugin-cronos/__tests__/actions/balance.test.ts b/packages/plugin-cronos/__tests__/actions/balance.test.ts deleted file mode 100644 index 71e0534afd854..0000000000000 --- a/packages/plugin-cronos/__tests__/actions/balance.test.ts +++ /dev/null @@ -1,152 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { balanceAction } from '../../src/actions/balance'; -import { - type IAgentRuntime, - type Memory, - ModelClass, - ModelProviderName, - type State, - type HandlerCallback, -} from '@elizaos/core'; -import * as core from '@elizaos/core'; - -// Mock generateObject -vi.mock('@elizaos/core', async () => { - const actual = await vi.importActual('@elizaos/core'); - return { - ...actual, - generateObject: vi.fn().mockResolvedValue({ - object: { - chain: 'cronos', - address: '0x1234567890123456789012345678901234567890' - } - }), - }; -}); - -// Mock wallet provider -vi.mock('../../src/providers/wallet', () => ({ - initCronosWalletProvider: vi.fn().mockReturnValue({ - switchChain: vi.fn(), - getWalletClient: vi.fn(), - getAddressBalance: vi.fn().mockResolvedValue('1.0'), - }), -})); - -describe('balance action', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn().mockReturnValue('0x1234567890123456789012345678901234567890123456789012345678901234'), - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - generateText: vi.fn(), - model: { - [ModelClass.SMALL]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.MEDIUM]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.LARGE]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - } - }, - modelProvider: ModelProviderName.OPENAI, - }; - - const mockMessage: Memory = { - content: { - text: 'Check balance for 0x1234567890123456789012345678901234567890 on Cronos', - }, - }; - - const mockCallback: HandlerCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should validate successfully', async () => { - const result = await balanceAction.validate(mockRuntime); - expect(result).toBe(true); - }); - - it('should handle successful balance check', async () => { - const result = await balanceAction.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Balance for 0x1234567890123456789012345678901234567890 on cronos is 1.0 CRO', - content: { - success: true, - balance: '1.0', - chain: 'cronos', - address: '0x1234567890123456789012345678901234567890' - } - }); - }); - - it('should handle balance check with existing state', async () => { - const mockState = {}; - const result = await balanceAction.handler( - mockRuntime, - mockMessage, - mockState, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockRuntime.updateRecentMessageState).toHaveBeenCalledWith(mockState); - }); - - it('should handle balance check failure', async () => { - const mockError = new Error('Failed to fetch balance'); - const mockProvider = { - switchChain: vi.fn(), - getWalletClient: vi.fn(), - getAddressBalance: vi.fn().mockRejectedValue(mockError), - }; - - // Reset the mock first - const walletModule = await import('../../src/providers/wallet'); - vi.mocked(walletModule.initCronosWalletProvider).mockResolvedValueOnce(mockProvider); - - const result = await balanceAction.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Error checking balance: Failed to fetch balance', - content: { error: 'Failed to fetch balance' } - }); - }); -}); diff --git a/packages/plugin-cronos/__tests__/actions/transfer.test.ts b/packages/plugin-cronos/__tests__/actions/transfer.test.ts deleted file mode 100644 index 191654428403c..0000000000000 --- a/packages/plugin-cronos/__tests__/actions/transfer.test.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { transferAction } from '../../src/actions/transfer'; -import { - type IAgentRuntime, - type Memory, - ModelClass, - ModelProviderName, - type State, - type HandlerCallback, -} from '@elizaos/core'; -import * as core from '@elizaos/core'; -import { parseEther } from 'viem'; - -// Mock generateObject -vi.mock('@elizaos/core', async () => { - const actual = await vi.importActual('@elizaos/core'); - return { - ...actual, - generateObject: vi.fn().mockResolvedValue({ - object: { - chain: 'cronos', - toAddress: '0x1234567890123456789012345678901234567890', - amount: '1.0' - } - }), - }; -}); - -// Mock wallet provider -vi.mock('../../src/providers/wallet', () => ({ - initCronosWalletProvider: vi.fn().mockReturnValue({ - switchChain: vi.fn(), - getWalletClient: vi.fn().mockReturnValue({ - account: { - address: '0x1234567890123456789012345678901234567890', - }, - sendTransaction: vi.fn().mockResolvedValue('0x123'), - }), - }), -})); - -describe('transfer action', () => { - const mockRuntime: IAgentRuntime = { - getSetting: vi.fn().mockReturnValue('0x1234567890123456789012345678901234567890123456789012345678901234'), - composeState: vi.fn().mockResolvedValue({}), - updateRecentMessageState: vi.fn().mockResolvedValue({}), - generateText: vi.fn(), - model: { - [ModelClass.SMALL]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.MEDIUM]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - }, - [ModelClass.LARGE]: { - name: 'gpt-4', - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - stop: [], - } - }, - modelProvider: ModelProviderName.OPENAI, - }; - - const mockMessage: Memory = { - content: { - text: 'Send 1.0 CRO to 0x1234567890123456789012345678901234567890 on Cronos', - }, - }; - - const mockCallback: HandlerCallback = vi.fn(); - - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should validate successfully', async () => { - const result = await transferAction.validate(mockRuntime); - expect(result).toBe(true); - }); - - it('should handle successful transfer', async () => { - const result = await transferAction.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Successfully transferred 1.0 CRO to 0x1234567890123456789012345678901234567890\nTransaction Hash: 0x123', - content: { - success: true, - hash: '0x123', - amount: '1', - recipient: '0x1234567890123456789012345678901234567890', - chain: undefined - } - }); - }); - - it('should handle transfer with existing state', async () => { - const mockState = {}; - const result = await transferAction.handler( - mockRuntime, - mockMessage, - mockState, - undefined, - mockCallback - ); - - expect(result).toBe(true); - expect(mockRuntime.updateRecentMessageState).toHaveBeenCalledWith(mockState); - }); - - - it('should handle transfer failure', async () => { - const mockError = new Error('Transfer failed'); - const mockProvider = { - switchChain: vi.fn(), - getWalletClient: vi.fn().mockReturnValue({ - account: { - address: '0x1234567890123456789012345678901234567890', - }, - sendTransaction: vi.fn().mockRejectedValue(mockError), - }), - }; - - // Reset the mock first - const walletModule = await import('../../src/providers/wallet'); - vi.mocked(walletModule.initCronosWalletProvider).mockResolvedValueOnce(mockProvider); - - const result = await transferAction.handler( - mockRuntime, - mockMessage, - undefined, - undefined, - mockCallback - ); - - expect(result).toBe(false); - expect(mockCallback).toHaveBeenCalledWith({ - text: 'Error transferring tokens: Transfer failed: Transfer failed', - content: { error: 'Transfer failed: Transfer failed' } - }); - }); -}); diff --git a/packages/plugin-cronos/__tests__/setup.ts b/packages/plugin-cronos/__tests__/setup.ts deleted file mode 100644 index cea985b0bf433..0000000000000 --- a/packages/plugin-cronos/__tests__/setup.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { vi } from 'vitest'; - -// Mock viem functions -vi.mock('viem', () => ({ - isAddress: vi.fn().mockReturnValue(true), - formatEther: vi.fn().mockReturnValue('1.0'), - parseEther: vi.fn().mockReturnValue(BigInt(1000000000000000000)), // 1 ETH -})); - -// Mock wallet provider -vi.mock('../../src/providers/wallet', () => ({ - initCronosWalletProvider: vi.fn().mockReturnValue({ - switchChain: vi.fn(), - getWalletClient: vi.fn().mockReturnValue({ - account: { - address: '0x1234567890123456789012345678901234567890', - }, - sendTransaction: vi.fn().mockResolvedValue('0x123'), - }), - getAddressBalance: vi.fn().mockResolvedValue('1.0'), - }), -})); diff --git a/packages/plugin-cronos/biome.json b/packages/plugin-cronos/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-cronos/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-cronos/package.json b/packages/plugin-cronos/package.json deleted file mode 100644 index e93273a0101e5..0000000000000 --- a/packages/plugin-cronos/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@elizaos/plugin-cronos", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "tsup": "8.3.5", - "vitest": "3.0.5" - }, - "scripts": { - "build": "tsup --format esm --no-dts", - "dev": "tsup --format esm --no-dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "test": "vitest run" - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-cronos/src/actions/balance.ts b/packages/plugin-cronos/src/actions/balance.ts deleted file mode 100644 index 2af0114e3958e..0000000000000 --- a/packages/plugin-cronos/src/actions/balance.ts +++ /dev/null @@ -1,139 +0,0 @@ -import { - type Action, - composeContext, - generateObject, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { z } from "zod"; -import { isAddress } from "viem"; - -import { type CronosWalletProvider, initCronosWalletProvider } from "../providers/wallet"; -import type { BalanceParams } from "../types"; -import { balanceTemplate } from "../templates"; - -const BalanceSchema = z.object({ - chain: z.enum(["cronos", "cronosTestnet"], { - required_error: "Chain must be either cronos or cronosTestnet", - invalid_type_error: "Chain must be either cronos or cronosTestnet", - }), - address: z.string().refine((val) => isAddress(val), { - message: "Invalid Ethereum address format", - }), -}); - -export class BalanceAction { - constructor(private walletProvider: CronosWalletProvider) {} - - async getBalance(params: BalanceParams): Promise { - this.walletProvider.switchChain(params.chain); - const balance = await this.walletProvider.getAddressBalance(params.address); - - if (!balance) { - throw new Error("Failed to fetch balance"); - } - - return balance; - } -} - -const buildBalanceDetails = async ( - state: State, - runtime: IAgentRuntime, - _wp: CronosWalletProvider -): Promise => { - state.supportedChains = '"cronos"|"cronosTestnet"'; - - const context = composeContext({ - state, - template: balanceTemplate, - }); - - const balanceDetails = (await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: BalanceSchema, - })).object as BalanceParams; - - return balanceDetails; -}; - -export const balanceAction: Action = { - name: "CHECK_BALANCE", - description: "Check CRO token balance on Cronos chain", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - _options: Record, - callback?: HandlerCallback - ) => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const walletProvider = await initCronosWalletProvider(runtime); - const action = new BalanceAction(walletProvider); - - const paramOptions = await buildBalanceDetails( - currentState, - runtime, - walletProvider - ); - - try { - const balance = await action.getBalance(paramOptions); - if (callback) { - callback({ - text: `Balance for ${paramOptions.address} on ${paramOptions.chain} is ${balance} CRO`, - content: { - success: true, - balance, - chain: paramOptions.chain, - address: paramOptions.address, - }, - }); - } - return true; - } catch (error) { - if (callback) { - callback({ - text: `Error checking balance: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - validate: async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("CRONOS_PRIVATE_KEY"); - return typeof privateKey === "string" && privateKey.startsWith("0x"); - }, - examples: [ - [ - { - user: "assistant", - content: { - text: "I'll check your balance on Cronos mainnet", - action: "CHECK_BALANCE", - }, - }, - { - user: "user", - content: { - text: "What's my balance?", - action: "CHECK_BALANCE", - }, - }, - ], - ], - similes: ["balance", "CHECK_BALANCE", "GET_BALANCE", "SHOW_BALANCE"], -}; \ No newline at end of file diff --git a/packages/plugin-cronos/src/actions/transfer.ts b/packages/plugin-cronos/src/actions/transfer.ts deleted file mode 100644 index 411df7e52ee56..0000000000000 --- a/packages/plugin-cronos/src/actions/transfer.ts +++ /dev/null @@ -1,175 +0,0 @@ -import { ByteArray, formatEther, parseEther, type Hex, isAddress } from "viem"; -import { - type Action, - composeContext, - generateObject, - type HandlerCallback, - ModelClass, - type IAgentRuntime, - type Memory, - type State, -} from "@elizaos/core"; -import { z } from "zod"; - -import { type CronosWalletProvider, initCronosWalletProvider } from "../providers/wallet"; -import type { Transaction, TransferParams } from "../types"; -import { transferTemplate } from "../templates"; -import { cronos, cronosTestnet } from "../constants/chains"; - -const TransferSchema = z.object({ - fromChain: z.enum(["cronos", "cronosTestnet"]), - toAddress: z.string().refine((val) => isAddress(val), { - message: "Invalid Ethereum address", - }), - amount: z.string().refine((val) => { - try { - parseEther(val); - return true; - } catch { - return false; - } - }, { - message: "Invalid amount format", - }), - data: z.string().optional(), -}); - -export class TransferAction { - constructor(private walletProvider: CronosWalletProvider) {} - - async transfer(params: TransferParams): Promise { - if (!params.data) { - params.data = "0x"; - } - - this.walletProvider.switchChain(params.fromChain); - const walletClient = this.walletProvider.getWalletClient(params.fromChain); - const chainConfig = params.fromChain === "cronos" ? cronos : cronosTestnet; - - try { - const hash = await walletClient.sendTransaction({ - account: walletClient.account, - to: params.toAddress as Hex, - value: parseEther(params.amount), - data: params.data as Hex, - chain: chainConfig, - gasPrice: undefined, - maxFeePerGas: undefined, - maxPriorityFeePerGas: undefined, - maxFeePerBlobGas: undefined, - blobs: undefined, - kzg: undefined, - }); - - return { - hash, - from: walletClient.account.address, - to: params.toAddress, - value: parseEther(params.amount), - data: params.data as Hex, - chainId: chainConfig.id, - }; - } catch (error) { - throw new Error(`Transfer failed: ${error.message}`); - } - } -} - -const buildTransferDetails = async ( - state: State, - runtime: IAgentRuntime, - _wp: CronosWalletProvider -): Promise => { - state.supportedChains = '"cronos"|"cronosTestnet"'; - - const context = composeContext({ - state, - template: transferTemplate, - }); - - const transferDetails = (await generateObject({ - runtime, - context, - modelClass: ModelClass.SMALL, - schema: TransferSchema, - })).object as TransferParams; - - return transferDetails; -}; - -export const transferAction: Action = { - name: "SEND_TOKENS", - description: "Transfer CRO tokens on Cronos chain", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - _options: Record, - callback?: HandlerCallback - ) => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const walletProvider = await initCronosWalletProvider(runtime); - const action = new TransferAction(walletProvider); - - const paramOptions = await buildTransferDetails( - currentState, - runtime, - walletProvider - ); - - try { - const transferResp = await action.transfer(paramOptions); - if (callback) { - callback({ - text: `Successfully transferred ${paramOptions.amount} CRO to ${paramOptions.toAddress}\nTransaction Hash: ${transferResp.hash}`, - content: { - success: true, - hash: transferResp.hash, - amount: formatEther(transferResp.value), - recipient: transferResp.to, - chain: paramOptions.fromChain, - }, - }); - } - return true; - } catch (error) { - if (callback) { - callback({ - text: `Error transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - validate: async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("CRONOS_PRIVATE_KEY"); - return typeof privateKey === "string" && privateKey.startsWith("0x"); - }, - examples: [ - [ - { - user: "assistant", - content: { - text: "I'll help you transfer 1 CRO to 0x000000000000000000000000000000000000800A on Cronos Testnet", - action: "SEND_TOKENS", - }, - }, - { - user: "user", - content: { - text: "Transfer 1 CRO to 0x000000000000000000000000000000000000800A on Cronos Testnet", - action: "SEND_TOKENS", - }, - }, - ], - ], - similes: ["transfer", "SEND_TOKENS", "TOKEN_TRANSFER", "MOVE_TOKENS"], -}; \ No newline at end of file diff --git a/packages/plugin-cronos/src/constants/chains.ts b/packages/plugin-cronos/src/constants/chains.ts deleted file mode 100644 index 152e6c2e3ecbd..0000000000000 --- a/packages/plugin-cronos/src/constants/chains.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { defineChain } from "viem"; - -export const cronos = defineChain({ - id: 25, - name: "Cronos Mainnet", - nativeCurrency: { - decimals: 18, - name: "cronos", - symbol: "CRO", - }, - rpcUrls: { - default: { - http: ["https://evm.cronos.org/"], - }, - public: { - http: ["https://evm.cronos.org/"], - }, - }, - blockExplorers: { - default: { - name: "Cronos Explorer", - url: "https://explorer.cronos.org/", - }, - }, - testnet: false, -}); - -export const cronosTestnet = defineChain({ - id: 338, - name: "cronos-testnet", - nativeCurrency: { - decimals: 18, - name: "Cronos", - symbol: "TCRO", - }, - rpcUrls: { - default: { - http: ["https://evm-t3.cronos.org/"], - }, - public: { - http: ["https://evm-t3.cronos.org/"], - }, - }, - blockExplorers: { - default: { - name: "Cronos Explorer", - url: "https://cronos.org/explorer/testnet3", - }, - }, - testnet: true, -}); \ No newline at end of file diff --git a/packages/plugin-cronos/src/index.ts b/packages/plugin-cronos/src/index.ts deleted file mode 100644 index d2e2cdc6c43e8..0000000000000 --- a/packages/plugin-cronos/src/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -export * from "./actions/transfer"; -export * from "./actions/balance"; -export * from "./providers/wallet"; -export * from "./types"; - -import type { Plugin } from "@elizaos/core"; -import { transferAction } from "./actions/transfer"; -import { balanceAction } from "./actions/balance"; -import { cronosWalletProvider } from "./providers/wallet"; - -export const cronosPlugin: Plugin = { - name: "cronos", - description: "Cronos chain integration plugin", - providers: [cronosWalletProvider], - evaluators: [], - services: [], - actions: [transferAction, balanceAction], -}; - -export default cronosPlugin; \ No newline at end of file diff --git a/packages/plugin-cronos/src/providers/wallet.ts b/packages/plugin-cronos/src/providers/wallet.ts deleted file mode 100644 index 92b0344be82f9..0000000000000 --- a/packages/plugin-cronos/src/providers/wallet.ts +++ /dev/null @@ -1,197 +0,0 @@ -import { - createPublicClient, - createWalletClient, - formatUnits, - http, - type Address, - type WalletClient, - type PublicClient, - type Chain, - type HttpTransport, - type Account, - type PrivateKeyAccount, -} from "viem"; -import { privateKeyToAccount } from "viem/accounts"; -import { - type IAgentRuntime, - type Memory, - type State, - type ICacheManager, - elizaLogger, -} from "@elizaos/core"; -import NodeCache from "node-cache"; -import * as path from "node:path"; - -import { cronos, cronosTestnet } from "../constants/chains"; -import type { CronosChain, CronosProvider } from "../types"; - -export class CronosWalletProvider { - private cache: NodeCache; - private cacheKey = "cronos/wallet"; - private currentChain: CronosChain = "cronos"; - private CACHE_EXPIRY_SEC = 5; - chains: Record = { - cronos, - cronosTestnet, - }; - account: PrivateKeyAccount; - - constructor( - accountOrPrivateKey: PrivateKeyAccount | `0x${string}`, - private cacheManager: ICacheManager - ) { - this.setAccount(accountOrPrivateKey); - this.cache = new NodeCache({ stdTTL: this.CACHE_EXPIRY_SEC }); - } - - getAddress(): Address { - return this.account.address; - } - - getCurrentChain(): Chain { - return this.chains[this.currentChain]; - } - - getPublicClient( - chainName: CronosChain - ): PublicClient { - const transport = this.createHttpTransport(chainName); - - const publicClient = createPublicClient({ - chain: this.chains[chainName], - transport, - }); - return publicClient; - } - - getWalletClient(chainName: CronosChain): WalletClient { - const transport = this.createHttpTransport(chainName); - - const walletClient = createWalletClient({ - chain: this.chains[chainName], - transport, - account: this.account, - }); - - return walletClient; - } - - async getWalletBalance(): Promise { - return this.getAddressBalance(this.account.address); - } - - async getAddressBalance(address: Address): Promise { - const cacheKey = `balance_${address}_${this.currentChain}`; - const cachedData = await this.getCachedData(cacheKey); - if (cachedData) { - elizaLogger.log( - `Returning cached balance for address ${address} on chain: ${this.currentChain}` - ); - return cachedData; - } - - try { - const client = this.getPublicClient(this.currentChain); - const balance = await client.getBalance({ - address, - }); - const balanceFormatted = formatUnits(balance, 18); - this.setCachedData(cacheKey, balanceFormatted); - elizaLogger.log( - `Balance cached for address ${address} on chain: ${this.currentChain}` - ); - return balanceFormatted; - } catch (error) { - console.error(`Error getting balance for address ${address}:`, error); - return null; - } - } - - switchChain(chainName: CronosChain) { - if (!this.chains[chainName]) { - throw new Error(`Invalid Cronos chain: ${chainName}`); - } - this.currentChain = chainName; - } - - private async readFromCache(key: string): Promise { - const cached = await this.cacheManager.get( - path.join(this.cacheKey, key) - ); - return cached; - } - - private async writeToCache(key: string, data: T): Promise { - await this.cacheManager.set(path.join(this.cacheKey, key), data, { - expires: Date.now() + this.CACHE_EXPIRY_SEC * 1000, - }); - } - - private async getCachedData(key: string): Promise { - const cachedData = this.cache.get(key); - if (cachedData) { - return cachedData; - } - - const fileCachedData = await this.readFromCache(key); - if (fileCachedData) { - this.cache.set(key, fileCachedData); - return fileCachedData; - } - - return null; - } - - private async setCachedData(cacheKey: string, data: T): Promise { - this.cache.set(cacheKey, data); - await this.writeToCache(cacheKey, data); - } - - private setAccount = ( - accountOrPrivateKey: PrivateKeyAccount | `0x${string}` - ) => { - if (typeof accountOrPrivateKey === "string") { - this.account = privateKeyToAccount(accountOrPrivateKey); - } else { - this.account = accountOrPrivateKey; - } - }; - - private createHttpTransport = (chainName: CronosChain) => { - const chain = this.chains[chainName]; - return http(chain.rpcUrls.default.http[0]); - }; -} - -export const initCronosWalletProvider = async (runtime: IAgentRuntime) => { - const privateKey = runtime.getSetting("CRONOS_PRIVATE_KEY") as `0x${string}`; - if (!privateKey) { - throw new Error("CRONOS_PRIVATE_KEY is missing"); - } - return new CronosWalletProvider(privateKey, runtime.cacheManager); -}; - -export const cronosWalletProvider: CronosProvider = { - async get( - runtime: IAgentRuntime, - _message: Memory, - state?: State - ): Promise { - try { - const walletProvider = await initCronosWalletProvider(runtime); - const address = walletProvider.getAddress(); - const balance = await walletProvider.getWalletBalance(); - const chain = walletProvider.getCurrentChain(); - const agentName = state?.agentName || "The agent"; - - return `${agentName}'s Cronos Wallet: -Address: ${address} -Balance: ${balance} ${chain.nativeCurrency.symbol} -Chain: ${chain.name} (ID: ${chain.id}) -RPC: ${chain.rpcUrls.default.http[0]}`; - } catch (error) { - console.error("Error in Cronos wallet provider:", error); - return null; - } - }, -}; \ No newline at end of file diff --git a/packages/plugin-cronos/src/templates/index.ts b/packages/plugin-cronos/src/templates/index.ts deleted file mode 100644 index 2f3de9ed1b535..0000000000000 --- a/packages/plugin-cronos/src/templates/index.ts +++ /dev/null @@ -1,62 +0,0 @@ -export const transferTemplate = `You are a helpful assistant that helps users transfer CRO tokens on the Cronos chain. - -First, review the recent messages from the conversation: - - -{{recentMessages}} - - -Current context: -- Available chains: {{supportedChains}} - -Based on the context above, please provide the following transfer details in JSON format: -{ - "fromChain": "cronos" | "cronosTestnet", - "toAddress": "string (the recipient's address)", - "amount": "string (the amount of CRO to transfer)" -} - -Before providing the final JSON output, show your reasoning process inside tags: -1. Identify the chain, amount, and recipient address from the messages -2. Validate that: - - The chain is either "cronos" or "cronosTestnet" - - The address is a valid Ethereum-style address (0x...) - - The amount is a positive number - -Remember: -- The chain name must be exactly "cronos" or "cronosTestnet" -- The amount should be a string representing the number without any currency symbol -- The recipient address must be a valid Ethereum address starting with "0x" - -Now, process the user's request and provide your response.`; - -export const balanceTemplate = `You are a helpful assistant that helps users check their CRO token balance on the Cronos chain. - -First, review the recent messages from the conversation: - - -{{recentMessages}} - - -Current context: -- Available chains: {{supportedChains}} - -Based on the context above, please provide the following balance check details in JSON format: -{ - "chain": "cronos" | "cronosTestnet", - "address": "string (the address to check balance for)" -} - -Before providing the final JSON output, show your reasoning process inside tags: -1. Identify which chain to check the balance on from the messages -2. Identify the address to check balance for (if not specified, use the user's own address) -3. Validate that: - - The chain is either "cronos" or "cronosTestnet" - - The address is a valid Ethereum-style address (0x...) - -Remember: -- The chain name must be exactly "cronos" or "cronosTestnet" -- If no specific chain is mentioned, default to "cronos" -- The address must be a valid Ethereum address starting with "0x" - -Now, process the user's request and provide your response.`; \ No newline at end of file diff --git a/packages/plugin-cronos/src/types/index.ts b/packages/plugin-cronos/src/types/index.ts deleted file mode 100644 index 31c7a1806f55f..0000000000000 --- a/packages/plugin-cronos/src/types/index.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { Hex, Chain } from "viem"; -import { z } from "zod"; - -export type CronosChain = "cronos" | "cronosTestnet"; - -export interface Transaction { - hash: Hex; - from: Hex; - to: Hex; - value: bigint; - data: Hex; - chainId?: number; -} - -export interface TransferParams { - fromChain: CronosChain; - toAddress: Hex; - amount: string; - data?: Hex; -} - -export const BalanceParamsSchema = z.object({ - chain: z.enum(["cronos", "cronosTestnet"] as const), - address: z.string().regex(/^0x[a-fA-F0-9]{40}$/, "Invalid Ethereum address format"), -}); - -export interface BalanceParams { - chain: CronosChain; - address: Hex; -} - -export interface WalletConfig { - chains: Record; - privateKey: Hex; -} - -export interface CronosProvider { - get(runtime: any, message: any, state?: any): Promise; -} \ No newline at end of file diff --git a/packages/plugin-cronos/tsconfig.json b/packages/plugin-cronos/tsconfig.json deleted file mode 100644 index d946efe36dc72..0000000000000 --- a/packages/plugin-cronos/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-cronos/tsup.config.ts b/packages/plugin-cronos/tsup.config.ts deleted file mode 100644 index eb3d0007f9e67..0000000000000 --- a/packages/plugin-cronos/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: [ - "dotenv", - "fs", - "path", - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "viem", - "@elizaos/core" - ], -}); \ No newline at end of file diff --git a/packages/plugin-cronos/vitest.config.ts b/packages/plugin-cronos/vitest.config.ts deleted file mode 100644 index 419efc958f910..0000000000000 --- a/packages/plugin-cronos/vitest.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - globals: true, - environment: 'node', - setupFiles: ['./__tests__/setup.ts'], - include: ['**/__tests__/**/*.test.ts'], - } -}); diff --git a/packages/plugin-cronoszkevm/README.md b/packages/plugin-cronoszkevm/README.md deleted file mode 100644 index 064e2addd10e9..0000000000000 --- a/packages/plugin-cronoszkevm/README.md +++ /dev/null @@ -1,160 +0,0 @@ -# @elizaos/plugin-cronoszkevm - -A plugin for interacting with the Cronos zkEVM network within the ElizaOS ecosystem. - -## Description - -The Cronos zkEVM plugin enables seamless token transfers on the Cronos zkEVM network. It provides functionality for transferring various tokens including ZKCRO, USDC, and ETH using Web3 and zkSync integration. - -## Installation - -```bash -pnpm install @elizaos/plugin-cronoszkevm -``` - -## Configuration - -The plugin requires the following environment variables to be set: - -```typescript -CRONOSZKEVM_ADDRESS= -CRONOSZKEVM_PRIVATE_KEY= -``` - -## Usage - -### Basic Integration - -```typescript -import { cronosZkEVMPlugin } from "@elizaos/plugin-cronoszkevm"; -``` - -### Example Usage - -```typescript -// Send USDC tokens -"Send 100 USDC to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62"; - -// Send ZKCRO tokens -"Send 100 ZKCRO to 0xbD8679cf79137042214fA4239b02F4022208EE82"; - -// Send ETH tokens -"Transfer 1 ETH to 0x123..."; -``` - -## API Reference - -### Actions - -#### SEND_TOKEN - -Transfers tokens on the Cronos zkEVM network. - -**Aliases:** - -- TRANSFER_TOKEN_ON_CRONOSZKEVM -- TRANSFER_TOKENS_ON_CRONOSZK -- SEND_TOKENS_ON_CRONOSZKEVM -- SEND_TOKENS_ON_CRONOSZK -- SEND_ETH_ON_CRONOSZKEVM -- SEND_ETH_ON_CRONOSZK -- PAY_ON_CRONOSZKEVM -- PAY_ON_CRONOSZK - -**Input Content:** - -```typescript -interface TransferContent { - tokenAddress: string; // The token contract address - recipient: string; // The recipient's address - amount: string | number; // Amount to transfer -} -``` - -## Common Issues & Troubleshooting - -1. **Transaction Failures** - - - Ensure sufficient token balance for transfers - - Verify correct recipient address format (must start with 0x) - - Check network connectivity to Cronos zkEVM RPC endpoint - -2. **Configuration Issues** - - Verify CRONOSZKEVM_ADDRESS is properly set - - Ensure CRONOSZKEVM_PRIVATE_KEY is valid and secure - - Confirm RPC endpoint is accessible - -## Security Best Practices - -1. **Private Key Management** - - - Store private keys securely using environment variables - - Never expose private keys in code or logs - - Use separate accounts for development and production - -2. **Transaction Validation** - - Always validate addresses before sending transactions - - Verify token amounts and decimals - - Implement proper error handling - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run the plugin: - -```bash -pnpm run dev -``` - -## Common Token Addresses - -- ZKCRO/zkCRO: `0x000000000000000000000000000000000000800A` -- USDC/usdc: `0xaa5b845f8c9c047779bedf64829601d8b264076c` -- ETH/eth: `0x898b3560affd6d955b1574d87ee09e46669c60ea` - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Cronos zkEVM](https://cronos.org/zkevm): Layer 2 scaling solution for Cronos -- [Web3.js](https://web3js.org/): Ethereum JavaScript API -- [zkSync](https://zksync.io/): Zero-knowledge rollup technology -- [Ethers.js](https://docs.ethers.org/): Complete Ethereum library -- [Viem](https://viem.sh/): Modern TypeScript Ethereum library - -Special thanks to: - -- The Cronos team for developing zkEVM -- The Matter Labs team for zkSync technology -- The Web3.js and Ethers.js maintainers -- The Viem development team -- The Eliza community for their contributions and feedback - -For more information about Cronos zkEVM capabilities: - -- [Cronos zkEVM Documentation](https://docs.cronos.org/zkevm/) -- [zkEVM Bridge](https://zkevm.cronos.org/bridge) -- [Cronos Developer Portal](https://cronos.org/developers) -- [zkSync Integration Guide](https://docs.cronos.org/zkevm/integration) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-cronoszkevm/biome.json b/packages/plugin-cronoszkevm/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-cronoszkevm/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-cronoszkevm/package.json b/packages/plugin-cronoszkevm/package.json deleted file mode 100644 index c7c29b265959a..0000000000000 --- a/packages/plugin-cronoszkevm/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/plugin-cronoszkevm", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "tsup": "^8.3.5", - "viem": "2.22.2" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4" - }, - "scripts": { - "build": "tsup --format esm --dts", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - } -} diff --git a/packages/plugin-cronoszkevm/src/actions/index.ts b/packages/plugin-cronoszkevm/src/actions/index.ts deleted file mode 100644 index bd66651784122..0000000000000 --- a/packages/plugin-cronoszkevm/src/actions/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./transferAction"; diff --git a/packages/plugin-cronoszkevm/src/actions/transferAction.ts b/packages/plugin-cronoszkevm/src/actions/transferAction.ts deleted file mode 100644 index f7ea4708631b7..0000000000000 --- a/packages/plugin-cronoszkevm/src/actions/transferAction.ts +++ /dev/null @@ -1,301 +0,0 @@ -import type { Action } from "@elizaos/core"; -import { - type ActionExample, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - elizaLogger, - composeContext, - generateObject, -} from "@elizaos/core"; -import { validateCronosZkevmConfig } from "../enviroment"; - -import { - type Address, - createPublicClient, - erc20Abi, - http, - parseEther, - isAddress, - parseUnits, -} from "viem"; -import { mainnet, cronoszkEVM } from "viem/chains"; -import { z } from "zod"; -import { ZKCRO_ADDRESS, ERC20_OVERRIDE_INFO } from "../constants"; -import { useGetAccount, useGetWalletClient } from "../hooks"; -import { normalize } from "viem/ens"; -import { ValidateContext } from "../utils"; - -const ethereumClient = createPublicClient({ - chain: mainnet, - transport: http(), -}); - -const TransferSchema = z.object({ - tokenAddress: z.string(), - recipient: z.string(), - amount: z.string(), -}); - -export interface TransferContent extends Content { - tokenAddress: string; - recipient: string; - amount: string | number; -} - -const transferTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. - -Here are several frequently used addresses. Use these for the corresponding tokens: -- ZKCRO/zkCRO: 0x000000000000000000000000000000000000800A -- USDC/usdc: 0xaa5b845f8c9c047779bedf64829601d8b264076c -- ETH/eth: 0x898b3560affd6d955b1574d87ee09e46669c60ea - -Example response: -\`\`\`json -{ - "tokenAddress": "0xaa5b845f8c9c047779bedf64829601d8b264076c", - "recipient": "0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62", - "amount": "1000" -} -\`\`\` - -{{recentMessages}} - -Given the recent messages, extract the following information about the requested token transfer: -- Token contract address -- Recipient wallet address -- Amount to transfer - -Respond with a JSON markdown block containing only the extracted values.`; - -export const TransferAction: Action = { - name: "SEND_TOKEN", - similes: [ - "TRANSFER_TOKEN_ON_CRONOSZKEVM", - "TRANSFER_TOKENS_ON_CRONOSZK", - "SEND_TOKENS_ON_CRONOSZKEVM", - "SEND_TOKENS_ON_CRONOSZK", - "SEND_ETH_ON_CRONOSZKEVM", - "SEND_ETH_ON_CRONOSZK", - "PAY_ON_CRONOSZKEVM", - "PAY_ON_CRONOSZK", - ], - validate: async (runtime: IAgentRuntime, _message: Memory) => { - await validateCronosZkevmConfig(runtime); - return true; - }, - description: "Transfer tokens from the agent's wallet to another address", - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - elizaLogger.log("Starting Cronos zkEVM SEND_TOKEN handler..."); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose transfer context - const transferContext = composeContext({ - state: currentState, - template: transferTemplate, - }); - - // Generate transfer content - const content = ( - await generateObject({ - runtime, - context: transferContext, - modelClass: ModelClass.SMALL, - schema: TransferSchema, - }) - ).object as unknown as TransferContent; - - if (!isAddress(content.recipient, { strict: false })) { - elizaLogger.log("Resolving ENS name..."); - try { - const name = normalize(content.recipient.trim()); - const resolvedAddress = await ethereumClient.getEnsAddress({ - name, - }); - - if (isAddress(resolvedAddress, { strict: false })) { - elizaLogger.log(`${name} resolved to ${resolvedAddress}`); - content.recipient = resolvedAddress; - } - } catch (error) { - elizaLogger.error("Error resolving ENS name:", error); - } - } - - // Validate transfer content - if (!ValidateContext.transferAction(content)) { - console.error("Invalid content for TRANSFER_TOKEN action."); - if (callback) { - callback({ - text: "Unable to process transfer request. Invalid content provided.", - content: { error: "Invalid transfer content" }, - }); - } - return false; - } - - try { - const account = useGetAccount(runtime); - const walletClient = useGetWalletClient(); - - let hash: `0x${string}`; - - // Check if the token is native - if ( - content.tokenAddress.toLowerCase() !== - ZKCRO_ADDRESS.toLowerCase() - ) { - // Convert amount to proper token decimals - const tokenInfo = - ERC20_OVERRIDE_INFO[content.tokenAddress.toLowerCase()]; - const decimals = tokenInfo?.decimals ?? 18; // Default to 18 decimals if not specified - const tokenAmount = parseUnits( - content.amount.toString(), - decimals - ); - - // Execute ERC20 transfer - hash = await walletClient.writeContract({ - account, - chain: cronoszkEVM, - address: content.tokenAddress as Address, - abi: erc20Abi, - functionName: "transfer", - args: [content.recipient as Address, tokenAmount], - }); - } else { - hash = await walletClient.sendTransaction({ - account: account, - chain: cronoszkEVM, - to: content.recipient as Address, - value: parseEther(content.amount.toString()), - kzg: undefined, - }); - } - - elizaLogger.success( - `Transfer completed successfully! Transaction hash: ${hash}` - ); - if (callback) { - callback({ - text: `Transfer completed successfully! Transaction hash: ${hash}`, - content: {}, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error during token transfer:", error); - if (callback) { - callback({ - text: `Error transferring tokens: ${error.message}`, - content: { error: error.message }, - }); - } - return false; - } - }, - - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Send 0.01 ETH to 0x114B242D931B47D5cDcEe7AF065856f70ee278C4", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 0.01 ETH to that address now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 0.01 ETH to 0x114B242D931B47D5cDcEe7AF065856f70ee278C4\nTransaction: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send 0.01 ETH to alim.getclave.eth", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 0.01 ETH to alim.getclave.eth now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 0.01 ETH to alim.getclave.eth\nTransaction: 0xdde850f9257365fffffc11324726ebdcf5b90b01c6eec9b3e7ab3e81fde6f14b", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Send 100 USDC to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62", - }, - }, - { - user: "{{agent}}", - content: { - text: "Sure, I'll send 100 USDC to that address now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 100 USDC to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62\nTransaction: 0x4fed598033f0added272c3ddefd4d83a521634a738474400b27378db462a76ec", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Please send 100 ZKCRO tokens to 0xbD8679cf79137042214fA4239b02F4022208EE82", - }, - }, - { - user: "{{agent}}", - content: { - text: "Of course. Sending 100 ZKCRO to that address now.", - action: "SEND_TOKEN", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully sent 100 ZKCRO to 0xbD8679cf79137042214fA4239b02F4022208EE82\nTransaction: 0x0b9f23e69ea91ba98926744472717960cc7018d35bc3165bdba6ae41670da0f0", - }, - }, - ], - ] as ActionExample[][], -}; diff --git a/packages/plugin-cronoszkevm/src/constants/index.ts b/packages/plugin-cronoszkevm/src/constants/index.ts deleted file mode 100644 index f9290bc4025f8..0000000000000 --- a/packages/plugin-cronoszkevm/src/constants/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const ZKCRO_ADDRESS = "0x000000000000000000000000000000000000800A"; -export const ERC20_OVERRIDE_INFO = { - "0xaa5b845f8c9c047779bedf64829601d8b264076c": { - name: "USDC", - decimals: 6, - }, -}; diff --git a/packages/plugin-cronoszkevm/src/enviroment.ts b/packages/plugin-cronoszkevm/src/enviroment.ts deleted file mode 100644 index 4fbdf54d904e1..0000000000000 --- a/packages/plugin-cronoszkevm/src/enviroment.ts +++ /dev/null @@ -1,48 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { isAddress } from "viem"; -import { z } from "zod"; - -export const CronosZkEVMEnvSchema = z.object({ - CRONOSZKEVM_ADDRESS: z - .string() - .min(1, "Cronos zkEVM address is required") - .refine((address) => isAddress(address, { strict: false }), { - message: "Cronos zkEVM address must be a valid address", - }), - CRONOSZKEVM_PRIVATE_KEY: z - .string() - .min(1, "Cronos zkEVM private key is required") - .refine((key) => /^[a-fA-F0-9]{64}$/.test(key), { - message: - "Cronos zkEVM private key must be a 64-character hexadecimal string (32 bytes) without the '0x' prefix", - }), -}); - -export type CronoszkEVMConfig = z.infer; - -export async function validateCronosZkevmConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - CRONOSZKEVM_ADDRESS: - runtime.getSetting("CRONOSZKEVM_ADDRESS") || - process.env.CRONOSZKEVM_ADDRESS, - CRONOSZKEVM_PRIVATE_KEY: - runtime.getSetting("CRONOSZKEVM_PRIVATE_KEY") || - process.env.CRONOSZKEVM_PRIVATE_KEY, - }; - - return CronosZkEVMEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `CronosZkEVM configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/plugin-cronoszkevm/src/hooks/index.ts b/packages/plugin-cronoszkevm/src/hooks/index.ts deleted file mode 100644 index b77aa9b1c6c47..0000000000000 --- a/packages/plugin-cronoszkevm/src/hooks/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./useGetAccount"; -export * from "./useGetWalletClient"; diff --git a/packages/plugin-cronoszkevm/src/hooks/useGetAccount.ts b/packages/plugin-cronoszkevm/src/hooks/useGetAccount.ts deleted file mode 100644 index aa7011f87b528..0000000000000 --- a/packages/plugin-cronoszkevm/src/hooks/useGetAccount.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import type { PrivateKeyAccount } from "viem/accounts"; -import { privateKeyToAccount } from "viem/accounts"; - -export const useGetAccount = (runtime: IAgentRuntime): PrivateKeyAccount => { - const privateKey = runtime.getSetting("CRONOSZKEVM_PRIVATE_KEY"); - if (!privateKey) { - throw new Error("CRONOSZKEVM_PRIVATE_KEY not set"); - } - return privateKeyToAccount(`0x${privateKey}`); -}; diff --git a/packages/plugin-cronoszkevm/src/hooks/useGetWalletClient.ts b/packages/plugin-cronoszkevm/src/hooks/useGetWalletClient.ts deleted file mode 100644 index b1efd994892d8..0000000000000 --- a/packages/plugin-cronoszkevm/src/hooks/useGetWalletClient.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { createWalletClient, http } from "viem"; -import { cronoszkEVM } from "viem/chains"; -import { eip712WalletActions } from "viem/zksync"; - -export const useGetWalletClient = (): ReturnType => { - const client = createWalletClient({ - chain: cronoszkEVM, - transport: http(), - }).extend(eip712WalletActions()); - - return client; -}; diff --git a/packages/plugin-cronoszkevm/src/index.ts b/packages/plugin-cronoszkevm/src/index.ts deleted file mode 100644 index 1ac9a17d98292..0000000000000 --- a/packages/plugin-cronoszkevm/src/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { Plugin } from "@elizaos/core"; - -import { TransferAction } from "./actions"; - -export const cronosZkEVMPlugin: Plugin = { - name: "cronoszkevm", - description: "Cronos zkEVM plugin for Eliza", - actions: [TransferAction], - evaluators: [], - providers: [], -}; - -export default cronosZkEVMPlugin; diff --git a/packages/plugin-cronoszkevm/src/utils/index.ts b/packages/plugin-cronoszkevm/src/utils/index.ts deleted file mode 100644 index ad34a4003af60..0000000000000 --- a/packages/plugin-cronoszkevm/src/utils/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./validateContext"; diff --git a/packages/plugin-cronoszkevm/src/utils/validateContext.ts b/packages/plugin-cronoszkevm/src/utils/validateContext.ts deleted file mode 100644 index b024a2a049857..0000000000000 --- a/packages/plugin-cronoszkevm/src/utils/validateContext.ts +++ /dev/null @@ -1,25 +0,0 @@ -import type { TransferContent } from "../actions"; -import { isAddress } from "viem"; - -export class ValidateContext { - static transferAction( - content: TransferContent - ): content is TransferContent { - const { tokenAddress, recipient, amount } = content; - - // Validate types - const areTypesValid = - typeof tokenAddress === "string" && - typeof recipient === "string" && - (typeof amount === "string" || typeof amount === "number"); - - if (!areTypesValid) { - return false; - } - - // Validate addresses - return [tokenAddress, recipient].every((address) => - isAddress(address, { strict: false }) - ); - } -} diff --git a/packages/plugin-cronoszkevm/tsconfig.json b/packages/plugin-cronoszkevm/tsconfig.json deleted file mode 100644 index 005fbac9d3634..0000000000000 --- a/packages/plugin-cronoszkevm/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src" - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/plugin-cronoszkevm/tsup.config.ts b/packages/plugin-cronoszkevm/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/plugin-cronoszkevm/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-dcap/README.md b/packages/plugin-dcap/README.md deleted file mode 100644 index 5228d12369367..0000000000000 --- a/packages/plugin-dcap/README.md +++ /dev/null @@ -1,88 +0,0 @@ -# @elizaos/plugin-dcap - -A plugin for verifying DCAP attestation on-chain built based on the [automata-dcap-attestation](https://github.com/automata-network/automata-dcap-attestation). - -## Features - -This plugin provides the following features: -- Generate DCAP attestation on TDX using the `remoteAttestationProvider` provided by the [plugin-tee](https://github.com/elizaOS/eliza/tree/develop/packages/plugin-tee). -- Generate DCAP attestation on SGX using the `sgxAttestationProvider` provided by the [plugin-sgx](https://github.com/elizaOS/eliza/tree/develop/packages/plugin-sgx). -- Submit and verify DCAP attestation on-chain. - -## Future Features (coming soon) -- Support to verify DCAP attestation on more EVM networks. -- Support to verify DCAP attestation on Solana. -- Support to verify DCAP attestation using ZKVM and verify the zk proof on-chain. -- Support to topup the wallet before submitting the DCAP attestation on testnets. - -## Installation - -```bash -pnpm install @elizaos/plugin-dcap -``` - -## Configuration -1. Set up your environment variables: -```env -EVM_PRIVATE_KEY=your-private-key-here -DCAP_MODE=PLUGIN-SGX|PLUGIN-TEE|MOCK -``` -The EVM_PRIVATE_KEY used to submit the DCAP attestation on evm networks, please make sure it has enough balance to pay for the transaction fee. - -The DCAP_MODE is used to specify the mode of generating DCAP attestation, it can be: -- PLUGIN-SGX: Use the `sgxAttestationProvider` in `plugin-sgx` to generate the DCAP attestation. -- PLUGIN-TEE: Use the `remoteAttestationProvider` in `plugin-tee` to generate the DCAP attestation. -- MOCK: Use a predefined attestation, this option is only for testing purposes. - -Check the docs of `plugin-sgx` and `plugin-tee` for how to run your agent in TEE before using the SGX or TDX mode. - -2. Register the plugin in your Eliza configuration: -```typescript -import { dcapPlugin } from "@elizaos/plugin-dcap"; - -// In your Eliza configuration -plugins: [ - dcapPlugin, - // ... other plugins -]; -``` - -## Usage -The plugin provides an action `dcapOnChainVerifyAction` which will be triggered by natural languages like: -```plaintext -"Verify the DCAP attestation on-chain" -"Generate a DCAP attestation and verify it on-chain" -"DCAP_ON_CHAIN" # The keyword will also trigger the action -``` - -## Development - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run tests: - -```bash -pnpm test -``` - -We are welcom to any feedback and contributions! - -## Credits -- [Automata Network](https://ata.network): Provided the on-chain DCAP verification, enabling the decentralized verification of TEE attestations. -- [Phala Network](https://phala.network): Provided support for running agents in TDX environment and contributed the `plugin-tee` for generating DCAP attestation on TDX. -- [Gramine](https://gramineproject.io/): Provided support for running agents in SGX environment. - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-dcap/biome.json b/packages/plugin-dcap/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-dcap/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-dcap/package.json b/packages/plugin-dcap/package.json deleted file mode 100644 index cff3ec40899bf..0000000000000 --- a/packages/plugin-dcap/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "@elizaos/plugin-dcap", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "@elizaos/plugin-sgx": "workspace:*", - "@elizaos/plugin-tee": "workspace:*", - "ethers": "^6.13.5" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@types/node": "^20.0.0", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - } -} diff --git a/packages/plugin-dcap/src/actions/on-chain.ts b/packages/plugin-dcap/src/actions/on-chain.ts deleted file mode 100644 index 92f94a013a756..0000000000000 --- a/packages/plugin-dcap/src/actions/on-chain.ts +++ /dev/null @@ -1,116 +0,0 @@ -import type { Action } from "@elizaos/core"; -import { verifyAndAttestOnChain } from "../dcap.js"; -import { getQuote } from "../quote.js"; -import { DCAPMode } from "../types.js"; -import { - getDCAPMode, - getTEEMode, - hasPrivateKey, - hasTEEMode, -} from "../utils.js"; - -export const dcapOnChainVerifyAction: Action = { - name: "DCAP_ON_CHAIN", - description: - "This plugin is used to generate DCAP attestation and verify it on-chain. The user can also use the keyword DCAP_ON_CHAIN to trigger this action.", - similes: [ - "DCAP", - "DCAP_ATTESTATION", - "DCAP_TEE", - "DCAP_SGX", - "DCAP_TDX", - "VERIFY_ATTESTATION", - "VERIFY_DCAP", - "DCAP_VERIFICATION", - "ATTESTATION", - "GENERATE_ATTESTATION", - ], - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Generate a DCAP attestation and verify it on-chain", - action: "DCAP_ON_CHAIN", - }, - }, - { - user: "{{user2}}", - content: { - text: "Of course, hanlding it now...", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Verify the DCAP attestation on-chain", - action: "DCAP_ON_CHAIN" - }, - }, - { - user: "{{user2}}", - content: { - text: "Of course, hanlding it now...", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "DCAP_ON_CHAIN", - action: "DCAP_ON_CHAIN" - }, - }, - { - user: "{{user2}}", - content: { - text: "Of course, hanlding it now...", - }, - }, - ], - ], - async validate(runtime, _message) { - if (!hasPrivateKey(runtime)) return false; - const mode = getDCAPMode(runtime); - if (!mode) return false; - if (mode === DCAPMode.PLUGIN_TEE) return hasTEEMode(runtime); - return true; - }, - async handler(runtime, message, _state, _options, callback) { - const { agentId } = runtime; - const { userId, roomId, content } = message; - const quote = await getQuote( - // Attestation will be generated based on the message info - JSON.stringify({ - agentId, - timestamp: Date.now(), - message: { userId, roomId, content: content.text }, - }), - getDCAPMode(runtime), - getTEEMode(runtime) - ); - - const reply = (text: string) => - callback({ - text, - // source: quote, - action: "DCAP_ON_CHAIN", - }); - try { - const privateKey = runtime.getSetting("EVM_PRIVATE_KEY"); - if (!privateKey) { - throw new Error("EVM_PRIVATE_KEY not set"); - } - const tx = await verifyAndAttestOnChain(privateKey, quote); - reply(`Verified! Transaction hash: ${tx.hash}`); - return true; - } catch (e) { - reply(e instanceof Error ? e.message : "Attestation failed"); - return false; - } - }, - suppressInitialMessage: true, -}; diff --git a/packages/plugin-dcap/src/dcap.ts b/packages/plugin-dcap/src/dcap.ts deleted file mode 100644 index faaff6938c2df..0000000000000 --- a/packages/plugin-dcap/src/dcap.ts +++ /dev/null @@ -1,48 +0,0 @@ -import type { TransactionResponse } from "ethers"; -import { Contract, JsonRpcProvider, Wallet } from "ethers"; - -export namespace Chain { - export enum Testnet { - AUTOMATA = "automata_testnet", - } - - export enum Mainnet {} - - export const Config: Record = { - [Testnet.AUTOMATA]: { - rpcUrl: "https://1rpc.io/ata/testnet", - address: "0x6D67Ae70d99A4CcE500De44628BCB4DaCfc1A145", - }, - }; -} -export type Chain = Chain.Testnet | Chain.Mainnet; - -export async function verifyAndAttestOnChain( - privateKey: string, - rawQuote: string, - chain: Chain = Chain.Testnet.AUTOMATA -) { - const { rpcUrl, address } = Chain.Config[chain]; - const provider = new JsonRpcProvider(rpcUrl); - const wallet = new Wallet(privateKey, provider); - const contract = new Contract( - address, - [ - "function getBp() public view returns (uint16)", - "function verifyAndAttestOnChain(bytes calldata rawQuote) external payable returns (bool success, bytes memory output)", - ], - wallet - ); - const estimateGas = async (value: bigint) => - await contract.verifyAndAttestOnChain.estimateGas(rawQuote, { value }); - - const $bp = contract.getBp(); - const $fee = provider.getFeeData(); - const gas = await estimateGas(await provider.getBalance(wallet)); - const bp = await $bp; - const { gasPrice, maxFeePerGas } = await $fee; - const tx = await contract.verifyAndAttestOnChain(rawQuote, { - value: (gas * (gasPrice ?? maxFeePerGas ?? 0n) * bp * 105n) / 1000000n, - }); - return await (tx as TransactionResponse).wait(); -} diff --git a/packages/plugin-dcap/src/index.ts b/packages/plugin-dcap/src/index.ts deleted file mode 100644 index 3f2dc02300647..0000000000000 --- a/packages/plugin-dcap/src/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { dcapOnChainVerifyAction } from "./actions/on-chain"; - -export const dcapPlugin: Plugin = { - name: "dcap", - description: "Basic DCAP attestation plugin", - actions: [dcapOnChainVerifyAction], -}; diff --git a/packages/plugin-dcap/src/quote.ts b/packages/plugin-dcap/src/quote.ts deleted file mode 100644 index 5540fdae996e4..0000000000000 --- a/packages/plugin-dcap/src/quote.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { elizaLogger } from "@elizaos/core"; -import { SgxAttestationProvider } from "@elizaos/plugin-sgx"; -import { RemoteAttestationProvider, type TEEMode } from "@elizaos/plugin-tee"; -import { DCAPMode } from "./types"; - -export const DEFAULT_QUOTE = - "0x040002008100000000000000939a7233f79c4ca9940a0db3957f0607000000000000000000000000000000000000000004010700000000000000000000000000ffc97a88587660fb04e1f7c851300c96ae0b5a463ac46d035d16c2d9f36d0ed1d23775bcbd27deb219e3a3cc2802389500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000e700060000000000935be7742dd89c6a4df6dba8353d89041ae0f052beef993b1e7f4524d3bc57650df20e5582158352e1240b3f1fed55d800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000cb10000038e48e64abf8f5611911d4a4336e23e5f7391b93ceb84626e924b21924f46acea0ac936f32dfab2dbebcebc74505eb1029f6d4de0c3de764fa3bfb2e7e49405b3a7bfd5161496559f3a1beefa1c2834085bcf5848957721450ef5453137aebc5803205af25adc33a3264a25bfd194e938f6788fb41d29fce7b488c07cad0e8aa0600451000000707ff1a03ff0005000000000000000000000000000000000000000000000000000000000000000000000000000000001500000000000000e700000000000000e5a3a7b5d830c2953b98534c6c59a3a34fdc34e933f7f5898f0a85cf08846bca0000000000000000000000000000000000000000000000000000000000000000dc9e2a7c6f948f17474e34a7fc43ed030f7c1563f1babddf6340c82e0e54a8c500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ed3968c320e160628a093e3db8b40896ba0be928222ff1b4650aec7732002e4f00000000000000000000000000000000000000000000000000000000000000000a6fc270854cea3f3e4d3e85d5a27ab2fb59ab670c4c85b9e1afb6010d721eb311de49eaf1f22294fd8250de07b45398358d62202a5802d1fc6ca0c83331d28d2000000000000000000000000000000000000000000000000000000000000000000005005d0e00002d2d2d2d2d424547494e2043455254494649434154452d2d2d2d2d0a4d49494538444343424a65674177494241674956414c626f5474584633754564704934375445303177713556717946544d416f4743437147534d343942414d430a4d484178496a416742674e5642414d4d47556c756447567349464e4857434251513073675547786864475a76636d306751304578476a415942674e5642416f4d0a45556c756447567349454e76636e4276636d4630615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b47413155450a4341774351304578437a414a42674e5642415954416c56544d423458445449304d4449784e4445794d4455784f466f5844544d784d4449784e4445794d4455780a4f466f77634445694d434147413155454177775a535735305a5777675530645949464244537942445a584a3061575a70593246305a5445614d426747413155450a43677752535735305a577767513239796347397959585270623234784644415342674e564241634d43314e68626e526849454e7359584a684d517377435159440a5651514944414a445154454c4d416b474131554542684d4356564d775754415442676371686b6a4f5051494242676771686b6a4f50514d4242774e434141514a0a7546357974335071307430545278546d53705832354d674b68445332565857723179317277714b7244564d4c416b4d3168726d4559453974476b642b614e696b0a6c6d534d6c7532626365663873426644424872326f3449444444434341776777487759445652306a42426777466f41556c5739647a62306234656c4153636e550a3944504f4156634c336c5177617759445652306642475177596a42676f46366758495a616148523063484d364c79396863476b7564484a316333526c5a484e6c0a636e5a705932567a4c6d6c75644756734c6d4e766253397a5a3367765932567964476c6d61574e6864476c76626939324e4339775932746a636d772f593245390a6347786864475a76636d306d5a57356a62325270626d63395a4756794d4230474131556444675157424252753734554273776439726d4b7757522f6f493867720a5273675a6844414f42674e56485138424166384542414d434273417744415944565230544151482f4241497741444343416a6b4743537147534962345451454e0a4151534341696f776767496d4d42344743697147534962345451454e415145454549585643764b7459586d65764f6c3074374358693059776767466a42676f710a686b69472b453042445145434d494942557a415142677371686b69472b4530424451454341514942426a415142677371686b69472b45304244514543416749420a426a415142677371686b69472b4530424451454341774942416a415142677371686b69472b4530424451454342414942416a415142677371686b69472b4530420a4451454342514942417a415142677371686b69472b45304244514543426749424154415142677371686b69472b453042445145434277494241444151426773710a686b69472b4530424451454343414942417a415142677371686b69472b45304244514543435149424144415142677371686b69472b45304244514543436749420a4144415142677371686b69472b45304244514543437749424144415142677371686b69472b45304244514543444149424144415142677371686b69472b4530420a44514543445149424144415142677371686b69472b45304244514543446749424144415142677371686b69472b453042445145434477494241444151426773710a686b69472b45304244514543454149424144415142677371686b69472b4530424451454345514942437a416642677371686b69472b45304244514543456751510a4267594341674d4241414d4141414141414141414144415142676f71686b69472b45304244514544424149414144415542676f71686b69472b453042445145450a4241594167473846414141774477594b4b6f5a496876684e4151304242516f424154416542676f71686b69472b453042445145474242426a59435862523276320a757064486b387a73626b35314d45514743697147534962345451454e415163774e6a415142677371686b69472b45304244514548415145422f7a4151426773710a686b69472b45304244514548416745424144415142677371686b69472b45304244514548417745422f7a414b42676771686b6a4f5051514441674e48414442450a416941665651763145433233344a58526b5478427235344b572b6469616a75706a49536570485a69515430694667496745787a5055375668784754364b79327a0a4466544b4752693456302b4a7531754678644b41313454754d48593d0a2d2d2d2d2d454e442043455254494649434154452d2d2d2d2d0a2d2d2d2d2d424547494e2043455254494649434154452d2d2d2d2d0a4d4949436c6a4343416a32674177494241674956414a567658633239472b487051456e4a3150517a7a674658433935554d416f4743437147534d343942414d430a4d476778476a415942674e5642414d4d45556c756447567349464e48574342536232393049454e424d526f77474159445651514b4442464a626e526c624342440a62334a7762334a6864476c76626a45554d424947413155454277774c553246756447456751327868636d4578437a414a42674e564241674d416b4e424d5173770a435159445651514745774a56557a4165467730784f4441314d6a45784d4455774d5442614677307a4d7a41314d6a45784d4455774d5442614d484178496a41670a42674e5642414d4d47556c756447567349464e4857434251513073675547786864475a76636d306751304578476a415942674e5642416f4d45556c75644756730a49454e76636e4276636d4630615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b474131554543417743513045780a437a414a42674e5642415954416c56544d466b77457759484b6f5a497a6a3043415159494b6f5a497a6a304441516344516741454e53422f377432316c58534f0a3243757a7078773734654a423732457944476757357258437478327456544c7136684b6b367a2b5569525a436e71523770734f766771466553786c6d546c4a6c0a65546d693257597a33714f42757a43427544416642674e5648534d4547444157674251695a517a575770303069664f44744a5653763141624f536347724442530a42674e5648523845537a424a4d45656752614244686b466f64485277637a6f764c324e6c636e52705a6d6c6a5958526c63793530636e567a6447566b633256790a646d6c6a5a584d75615735305a577775593239744c306c756447567355306459556d397664454e424c6d526c636a416442674e5648513445466751556c5739640a7a62306234656c4153636e553944504f4156634c336c517744675944565230504151482f42415144416745474d42494741315564457745422f7751494d4159420a4166384341514177436759494b6f5a497a6a30454177494452774177524149675873566b6930772b6936565947573355462f32327561586530594a446a3155650a6e412b546a44316169356343494359623153416d4435786b66545670766f34556f79695359787244574c6d5552344349394e4b7966504e2b0a2d2d2d2d2d454e442043455254494649434154452d2d2d2d2d0a2d2d2d2d2d424547494e2043455254494649434154452d2d2d2d2d0a4d4949436a7a4343416a53674177494241674955496d554d316c71644e496e7a6737535655723951477a6b6e42717777436759494b6f5a497a6a3045417749770a614445614d4267474131554541777752535735305a5777675530645949464a766233516751304578476a415942674e5642416f4d45556c756447567349454e760a636e4276636d4630615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b47413155454341774351304578437a414a0a42674e5642415954416c56544d423458445445344d4455794d5445774e4455784d466f58445451354d54497a4d54497a4e546b314f566f77614445614d4267470a4131554541777752535735305a5777675530645949464a766233516751304578476a415942674e5642416f4d45556c756447567349454e76636e4276636d46300a615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b47413155454341774351304578437a414a42674e56424159540a416c56544d466b77457759484b6f5a497a6a3043415159494b6f5a497a6a3044415163445167414543366e45774d4449595a4f6a2f69505773437a61454b69370a314f694f534c52466857476a626e42564a66566e6b59347533496a6b4459594c304d784f346d717379596a6c42616c54565978465032734a424b357a6c4b4f420a757a43427544416642674e5648534d4547444157674251695a517a575770303069664f44744a5653763141624f5363477244425342674e5648523845537a424a0a4d45656752614244686b466f64485277637a6f764c324e6c636e52705a6d6c6a5958526c63793530636e567a6447566b63325679646d6c6a5a584d75615735300a5a577775593239744c306c756447567355306459556d397664454e424c6d526c636a416442674e564851344546675155496d554d316c71644e496e7a673753560a55723951477a6b6e4271777744675944565230504151482f42415144416745474d42494741315564457745422f7751494d4159424166384341514577436759490a4b6f5a497a6a3045417749445351417752674968414f572f35516b522b533943695344634e6f6f774c7550524c735747662f59693747535839344267775477670a41694541344a306c72486f4d732b586f356f2f7358364f39515778485241765a55474f6452513763767152586171493d0a2d2d2d2d2d454e442043455254494649434154452d2d2d2d2d0a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; - -async function tryQuote(promise: Promise, type: string) { - try { - const { quote } = await promise; - return quote as string; - } catch (e) { - elizaLogger.error(`Error getting ${type} quote`, e); - } -} - -export async function getQuote( - message: string, - dcapMode?: DCAPMode, - teeMode?: TEEMode -) { - switch (dcapMode) { - case DCAPMode.PLUGIN_SGX: - return await tryQuote( - new SgxAttestationProvider().generateAttestation(message), - "SGX" - ); - case DCAPMode.PLUGIN_TEE: - return await tryQuote( - new RemoteAttestationProvider(teeMode).generateAttestation( - message - ), - "TDX" - ); - } - return DEFAULT_QUOTE; -} diff --git a/packages/plugin-dcap/src/tests/on-chain.ts b/packages/plugin-dcap/src/tests/on-chain.ts deleted file mode 100644 index a670fe2efeca0..0000000000000 --- a/packages/plugin-dcap/src/tests/on-chain.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { describe, expect, it } from "vitest"; -import { Chain, verifyAndAttestOnChain } from "../dcap"; -import { DEFAULT_QUOTE } from "../quote"; - -const privateKey = - "0xc4389080437072a09215803a6b540f1e054797eeda2eec6d49076760d48e7589"; -const chain = Chain.Testnet.AUTOMATA; - -describe("Verify rawQuote", () => { - it("should verify rawQuote", async () => { - const tx = await verifyAndAttestOnChain( - privateKey, - DEFAULT_QUOTE, - chain - ); - expect(tx).toBeDefined(); - }); -}); - -describe("Verify random hex will fail", () => { - it("should not verify random hex", async () => { - await expect( - verifyAndAttestOnChain(privateKey, "0x1234", chain) - ).rejects.toThrow(); - }); -}); diff --git a/packages/plugin-dcap/src/types.ts b/packages/plugin-dcap/src/types.ts deleted file mode 100644 index 43e5db9277746..0000000000000 --- a/packages/plugin-dcap/src/types.ts +++ /dev/null @@ -1,6 +0,0 @@ -export enum DCAPMode { - OFF = "OFF", - PLUGIN_SGX = "PLUGIN-SGX", - PLUGIN_TEE = "PLUGIN-TEE", - MOCK = "MOCK", -} diff --git a/packages/plugin-dcap/src/utils.ts b/packages/plugin-dcap/src/utils.ts deleted file mode 100644 index caa5b82af24ac..0000000000000 --- a/packages/plugin-dcap/src/utils.ts +++ /dev/null @@ -1,41 +0,0 @@ -import type { IAgentRuntime } from "@elizaos/core"; -import { TEEMode } from "@elizaos/plugin-tee"; -import { DCAPMode } from "./types"; - -export const is0xString = (s: string) => - typeof s === "string" && s.startsWith("0x"); - -export function hasPrivateKey(runtime: IAgentRuntime) { - try { - return is0xString(runtime.getSetting("EVM_PRIVATE_KEY")); - } catch { - return false; - } -} - -export function getDCAPMode(runtime: IAgentRuntime) { - try { - const mode = runtime.getSetting("DCAP_MODE"); - if (!mode) return; - switch (mode.toUpperCase()) { - case DCAPMode.PLUGIN_SGX: - return DCAPMode.PLUGIN_SGX; - case DCAPMode.PLUGIN_TEE: - return DCAPMode.PLUGIN_TEE; - case DCAPMode.MOCK: - return DCAPMode.MOCK; - } - } catch {} -} - -export const getTEEMode = (runtime: IAgentRuntime) => - runtime.getSetting("TEE_MODE") as TEEMode; - -export function hasTEEMode(runtime: IAgentRuntime) { - try { - const mode = getTEEMode(runtime); - return mode && mode !== TEEMode.OFF; - } catch { - return false; - } -} diff --git a/packages/plugin-dcap/tsconfig.json b/packages/plugin-dcap/tsconfig.json deleted file mode 100644 index 547fa531378f8..0000000000000 --- a/packages/plugin-dcap/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src", - "typeRoots": ["./node_modules/@types", "./src/types"], - "declaration": true - }, - "include": ["src"] -} diff --git a/packages/plugin-dcap/tsup.config.ts b/packages/plugin-dcap/tsup.config.ts deleted file mode 100644 index a3981201a0b0f..0000000000000 --- a/packages/plugin-dcap/tsup.config.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "viem", - "events", - "node-cache", - ], -}); diff --git a/packages/plugin-depin/README.md b/packages/plugin-depin/README.md deleted file mode 100644 index f1262f5a8d0f6..0000000000000 --- a/packages/plugin-depin/README.md +++ /dev/null @@ -1,102 +0,0 @@ -# `@elizaos/plugin-depin` - -The **`@elizaos/plugin-depin`** plugin empowers the Eliza Agent Framework with **Perception** and **Action** capabilities via **Decentralized Physical Infrastructure Networks (DePINs)**, bridging the digital intelligence of AI with the physical world. - -- **DePINs as "Senses and Actuators":** Enables real-time data access from decentralized sensors and control over physical devices, making DePINs the sensory organs and actuators for Eliza agents. -- **Unlock Transformative Use Cases:** From drone delivery to smart city infrastructure and precision agriculture, this plugin extends your AI agents' potential. -- **Foundation for Sentient AI:** Facilitates contextual awareness, predictive capabilities, and goal-oriented behavior based on real-world sensory input and continuous feedback loops. - -Leverage **`@elizaos/plugin-depin`** to seamlessly integrate AI agents with the real world, enabling them to **perceive, act, and learn**. - ---- - -## Key Features - -1. **Seamless IoTeX Integration:** - - Leverages IoTeX Modular Infra to connect to a growing ecosystem of DePIN networks. -2. **Unified Data Access:** - - Standardized interfaces allow access to diverse DePIN data sources, regardless of protocols or formats. -3. **Time-Series Data Handling:** - - Equipped to analyze temporal patterns for predictive capabilities. -4. **Future-Proof Design:** - - Designed to scale with the evolving DePIN and AI landscape. - ---- - -## Configuration - -### Environment Variables - -Add the following to your `.env` file: - -```env -SENTAI_API_KEY=your-sentai-api-key -``` - -### Character Configuration - -Update `character.json` with the following configuration to enable the plugin: - -```json -"plugins": [ - "@elizaos/plugin-depin" -] -``` - -This ensures that the **`@elizaos/plugin-depin`** plugin is loaded and operational within your Eliza Agent Framework, enabling seamless integration with DePIN networks and their data. - ---- - -## Providers - -### DePINScan - -The **DePINScan provider** bridges the gap between your Eliza agents and decentralized physical infrastructure. By fetching and caching data from the DePINScan API, it provides actionable insights such as: - -- **Daily Metrics:** Get the latest statistics on DePIN activity, including device operations and network performance. -- **Project Data:** Detailed information about individual DePIN projects: - - **Project Identifiers:** Names and slugs for easy referencing. - - **Token Information:** Market metrics such as token prices, market caps, and fully diluted valuations (FDV). - - **Device Statistics:** Total devices deployed, operational costs, and earnings. - - **Blockchain Integration:** Layer 1 chains associated with projects and their respective categories. - - **Market Insights:** Comprehensive data on market trends and project capitalization. - ---- - -## Actions - -### DePIN Projects - -The **DEPIN_PROJECTS** action empowers Eliza agents to interact with and analyze DePIN project data, enabling: - -- **Token Metrics Queries:** Retrieve token prices, market capitalizations, and valuations for projects. -- **Project Comparisons:** Compare key metrics across multiple DePIN projects. -- **Filtering Capabilities:** Refine results by project categories or supported blockchain platforms. -- **Device and Revenue Analysis:** Explore statistics such as device deployment, operational costs, and revenue generation. -- **In-depth Queries:** Answer detailed questions about specific DePIN projects by leveraging the rich dataset provided by the DePINScan API. - -### Sentient AI - -The **SENTIENT_AI** action integrates Sentient AI APIs to provide Eliza agents with weather-related capabilities. Key functionalities include: - -- **Real-Time Weather Updates:** Deliver current temperature, humidity, and general conditions for specified locations. (supported by Nubila) -- **Forecast Analysis:** Generate short- and long-term forecasts to assist in planning and decision-making. (supported by Nubila) -- **Other Actions** Sentient AI will continue to improve and add more actions based on DePIN data. - ---- - -## Sentient AI with DePIN Integration - -The **`@elizaos/plugin-depin`** plugin is a critical component in the evolution of Eliza agents into sentient systems that are aware of and responsive to their physical environments. By integrating with DePINs, this plugin enables AI agents to: - -- **Perceive:** Access sensory data streams from devices across decentralized networks, including environmental sensors, location trackers, and motion detectors. -- **Act:** Influence and control connected devices in real-time, unlocking a wide array of use cases from logistics to urban management. -- **Learn:** Build predictive models and goal-oriented behaviors using continuous feedback from real-world data sources. - -### Transformative Applications - -From smart city infrastructure and autonomous vehicle systems to precision agriculture and environmental monitoring, the **`@elizaos/plugin-depin`** unlocks new frontiers in AI development. By merging decentralized infrastructure with AI-driven perception and action, this plugin empowers agents to act not just in virtual spaces but in the physical world. - -With its **future-proof design** and seamless integration capabilities, the **`@elizaos/plugin-depin`** is an essential tool for developers looking to push the boundaries of AI and decentralized systems. - -Start building the next generation of AI-powered applications with **`@elizaos/plugin-depin`** and redefine what’s possible for intelligent agents in the real world. \ No newline at end of file diff --git a/packages/plugin-depin/biome.json b/packages/plugin-depin/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-depin/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-depin/package.json b/packages/plugin-depin/package.json deleted file mode 100644 index 4858aacb47954..0000000000000 --- a/packages/plugin-depin/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "@elizaos/plugin-depin", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "axios": "^1.7.9", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "test": "vitest run", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4" - } -} diff --git a/packages/plugin-depin/src/actions/depinProjects.ts b/packages/plugin-depin/src/actions/depinProjects.ts deleted file mode 100644 index 9eb0977f186fc..0000000000000 --- a/packages/plugin-depin/src/actions/depinProjects.ts +++ /dev/null @@ -1,262 +0,0 @@ -import { - type Action, - composeContext, - generateText, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; - -import { projectsTemplate } from "../template"; - -export const depinProjects: Action = { - name: "DEPIN_PROJECTS", - similes: [ - "DEPIN_TOKENS", - "DEPIN_DATA", - "DEPIN_STATS", - "DEPIN_ANALYTICS", - "PROJECT_TOKENS", - "PROJECT_STATS", - "PROJECT_DATA", - "TOKEN_PROJECTS", - "CHAIN_PROJECTS", - "BLOCKCHAIN_PROJECTS", - "PROJECT_ANALYTICS", - "PROJECT_DETAILS", - ], - description: "Analyzes DePINScan projects", - validate: async (_runtime: IAgentRuntime) => { - return true; - }, - examples: [ - [ - { - user: "user", - content: { - text: "What is the token price of Render?", - }, - }, - { - user: "assistant", - content: { - text: "The current token price of Render (RNDR) is $9.02.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "Which token has a higher price: Helium or Render?", - }, - }, - { - user: "assistant", - content: { - text: "Helium (HNT) is priced at $3.21, which is lower than Render (RNDR) at $9.02.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "Can you give me the prices of all available tokens?", - }, - }, - { - user: "assistant", - content: { - text: "Sure! Solana (SOL) is $221.91, Render (RNDR) is $9.02, and Helium (HNT) is $3.21.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "Which token costs more than $200?", - }, - }, - { - user: "assistant", - content: { - text: "The only token priced above $200 is Solana (SOL) at $221.91.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "What is the market cap of Render?", - }, - }, - { - user: "assistant", - content: { - text: "The market cap of Render (RNDR) is $4,659,773,671.85.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "Can you give me the categories for Solana?", - }, - }, - { - user: "assistant", - content: { - text: "Solana (SOL) belongs to the following categories: Chain.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "What is the fully diluted valuation of Helium?", - }, - }, - { - user: "assistant", - content: { - text: "The fully diluted valuation of Helium (HNT) is $450,000,000.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "What are the projects running on Solana?", - }, - }, - { - user: "assistant", - content: { - text: "The projects running on Solana include Render and Helium.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "What is the token price of an unlisted project?", - }, - }, - { - user: "assistant", - content: { - text: "I'm sorry, but I don't have information on the token price for the specified project.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "What is the launch date of Solana?", - }, - }, - { - user: "assistant", - content: { - text: "I'm sorry, but I don't have information on the launch date of Solana.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "Can you tell me the founder of Render?", - }, - }, - { - user: "assistant", - content: { - text: "I currently don't have information on the founder of Render.", - action: "DEPIN_TOKENS", - }, - }, - ], - [ - { - user: "user", - content: { - text: "Do you have the total supply for Helium?", - }, - }, - { - user: "assistant", - content: { - text: "I'm sorry, but I don't have data on the total supply of Helium.", - action: "DEPIN_TOKENS", - }, - }, - ], - ], - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback?: HandlerCallback - ): Promise => { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - const projectsContext = composeContext({ - state: currentState, - template: projectsTemplate, - }); - - try { - const text = await generateText({ - runtime, - context: projectsContext, - modelClass: ModelClass.LARGE, - }); - - if (callback) { - callback({ - text, - inReplyTo: message.id, - }); - } - - return true; - } catch (error) { - console.error("Error in depin project plugin:", error); - if (callback) { - callback({ - text: "Error processing request, try again", - content: { error: error.message }, - }); - } - return false; - } - }, -}; diff --git a/packages/plugin-depin/src/actions/sentientai.ts b/packages/plugin-depin/src/actions/sentientai.ts deleted file mode 100644 index 61459fafec015..0000000000000 --- a/packages/plugin-depin/src/actions/sentientai.ts +++ /dev/null @@ -1,107 +0,0 @@ -import type { - Action, - HandlerCallback, - IAgentRuntime, - Memory, - State -} from "@elizaos/core"; - -export const sentientAI: Action = { - name: "SENTIENT_AI", - similes: [ - "SENTIENT", - "NEWS", - "WEATHER" - ], - description: "Provide realtime information for Weather, News.", - examples: [ - [ - { - user: "user", - content: { - text: "What's the weather forecast for Tokyo?", - }, - }, - { - user: "assistant", - content: { - text: "Here's the weather forecast for Tokyo: Tomorrow will be 22°C with partly cloudy skies. The next few days will see temperatures ranging from 18-24°C with a chance of rain on Thursday.", - action: "WEATHER", - }, - }, - ], - [ - { - user: "user", - content: { - text: "Will it rain in London this week?", - }, - }, - { - user: "assistant", - content: { - text: "Looking at London's forecast: There's a 60% chance of rain on Wednesday with temperatures around 15°C. The rest of the week should be mostly cloudy with occasional showers.", - action: "WEATHER", - }, - } - ], - [ - { - user: "user", - content: { - text: "What is the latest news about Trump?", - }, - }, - { - user: "assistant", - content: { - text: "Here are some of the latest news articles related to Trump: Trump invites House Republicans to Mar-a-Lago for strategy meetings.", - action: "NEWS", - }, - }, - ], - ], - validate: async (_runtime: IAgentRuntime, _message: Memory) => { - // no extra validation needed - return true; - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options?: { [key: string]: unknown }, - callback?: HandlerCallback - ) => { - try { - const content = message.content; - - const response = await fetch("https://quicksilver.iotex.ai/ask", { - method: "POST", - headers: { - "Content-Type": "application/json", - "API-KEY": runtime.getSetting("SENTAI_API_KEY"), - }, - body: JSON.stringify({ - q: content.text, - }), - }); - - if (!response.ok) { - throw new Error(`API error: ${response.statusText}`); - } - - const res = await response.json(); - - callback({ - text: res.data, - }); - return true; - } catch (error) { - console.error("Error", error.message); - if (callback) { - callback({ text: `Error: ${error.message}` }); - } - return false; - } - }, -}; \ No newline at end of file diff --git a/packages/plugin-depin/src/index.ts b/packages/plugin-depin/src/index.ts deleted file mode 100644 index b0ea3f9befe20..0000000000000 --- a/packages/plugin-depin/src/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { Plugin } from "@elizaos/core"; - -import { depinDataProvider } from "./providers/depinData"; -import { depinProjects } from "./actions/depinProjects"; -import { sentientAI } from "./actions/sentientai"; - -export const depinPlugin: Plugin = { - name: "depin", - description: "DePIN plugin for Sentient AI", - providers: [depinDataProvider], - evaluators: [], - services: [], - actions: [sentientAI, depinProjects], -}; - -export default depinPlugin; diff --git a/packages/plugin-depin/src/providers/depinData.ts b/packages/plugin-depin/src/providers/depinData.ts deleted file mode 100644 index c84a077894731..0000000000000 --- a/packages/plugin-depin/src/providers/depinData.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { - type IAgentRuntime, - type Provider, - type Memory, - type State, - elizaLogger, - type ICacheManager, -} from "@elizaos/core"; -import NodeCache from "node-cache"; -import * as path from "node:path"; - -import type { DepinScanMetrics, DepinScanProject } from "../types/depin"; - -export const DEPIN_METRICS_URL = - "https://gateway1.iotex.io/depinscan/explorer?is_latest=true"; -export const DEPIN_PROJECTS_URL = "https://metrics-api.w3bstream.com/project"; - -export class DePINScanProvider { - private cache: NodeCache; - private cacheKey = "depin/metrics"; - - constructor(private cacheManager: ICacheManager) { - this.cache = new NodeCache({ stdTTL: 3600 }); - } - - private async readFromCache(key: string): Promise { - const cached = await this.cacheManager.get( - path.join(this.cacheKey, key) - ); - return cached; - } - - private async writeToCache(key: string, data: T): Promise { - await this.cacheManager.set(path.join(this.cacheKey, key), data, { - expires: Date.now() + 15 * 60 * 1000, // 15 minutes - }); - } - - private async getCachedData(key: string): Promise { - // Check in-memory cache first - const cachedData = this.cache.get(key); - if (cachedData) { - return cachedData; - } - - // Check file-based cache - const fileCachedData = await this.readFromCache(key); - if (fileCachedData) { - // Populate in-memory cache - this.cache.set(key, fileCachedData); - return fileCachedData; - } - - return null; - } - - private async setCachedData(cacheKey: string, data: T): Promise { - // Set in-memory cache - this.cache.set(cacheKey, data); - - // Write to file-based cache - await this.writeToCache(cacheKey, data); - } - - private async fetchDepinscanMetrics(): Promise { - const res = await fetch(DEPIN_METRICS_URL); - return res.json(); - } - - private async fetchDepinscanProjects(): Promise { - const res = await fetch(DEPIN_PROJECTS_URL); - return res.json(); - } - - async getDailyMetrics(): Promise { - const cacheKey = "depinscanDailyMetrics"; - const cachedData = await this.getCachedData(cacheKey); - if (cachedData) { - console.log("Returning cached DePINScan daily metrics"); - return cachedData; - } - - const metrics = await this.fetchDepinscanMetrics(); - - this.setCachedData(cacheKey, metrics); - console.log("DePIN daily metrics cached"); - - return metrics; - } - - private abbreviateNumber = ( - value: string | number | bigint | undefined - ): string => { - if (value === undefined || value === null) return ""; - - let num: number; - - if (typeof value === "bigint") { - // Convert bigint to number safely for processing - num = Number(value); - } else if (typeof value === "number") { - num = value; - } else if (typeof value === "string") { - // Parse string to number - num = Number.parseFloat(value); - } else { - return ""; // Handle unexpected types gracefully - } - - if (Number.isNaN(num)) return value.toString(); // Return as string if not a valid number - if (num >= 1e9) return `${(num / 1e9).toFixed(2)}B`; - if (num >= 1e6) return `${(num / 1e6).toFixed(2)}M`; - return num.toString(); // Return original number as string if no abbreviation is needed - }; - - private parseProjects(projects: DepinScanProject[]): string[][] { - const schema = [ - "project_name", - "slug", - "token", - "layer_1", - "categories", - "market_cap", - "token_price", - "total_devices", - "avg_device_cost", - "days_to_breakeven", - "estimated_daily_earnings", - "chainid", - "coingecko_id", - "fully_diluted_valuation", - ]; - - const parsedProjects = projects.map((project) => { - const { - project_name, - slug, - token, - layer_1, - categories, - market_cap, - token_price, - total_devices, - avg_device_cost, - days_to_breakeven, - estimated_daily_earnings, - chainid, - coingecko_id, - fully_diluted_valuation, - } = project; - - // Create an array following the schema - return [ - project_name, - slug, - token, - layer_1 ? layer_1.join(", ") : "", // Flatten array for compact representation - categories ? categories.join(", ") : "", // Flatten array for compact representation - this.abbreviateNumber(market_cap?.toString()), - token_price?.toString(), - total_devices?.toString(), - avg_device_cost?.toString(), - days_to_breakeven?.toString(), - estimated_daily_earnings?.toString(), - chainid?.toString(), - coingecko_id?.toString(), - this.abbreviateNumber(fully_diluted_valuation?.toString()), - ]; - }); - - parsedProjects.unshift(schema); - - return parsedProjects; - } - - async getProjects(): Promise { - const cacheKey = "depinscanProjects"; - const cachedData = await this.getCachedData(cacheKey); - if (cachedData) { - console.log("Returning cached DePINScan projects"); - return cachedData; - } - - const projects = await this.fetchDepinscanProjects(); - const parsedProjects = this.parseProjects(projects); - - this.setCachedData(cacheKey, parsedProjects); - console.log("DePINScan projects cached"); - - return parsedProjects; - } -} - -export const depinDataProvider: Provider = { - async get( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - try { - const depinscan = new DePINScanProvider(runtime.cacheManager); - const depinscanMetrics = await depinscan.getDailyMetrics(); - const depinscanProjects = await depinscan.getProjects(); - - return ` - #### **DePINScan Daily Metrics** - ${depinscanMetrics} - #### **DePINScan Projects** - ${depinscanProjects} - `; - } catch (error) { - elizaLogger.error("Error in DePIN data provider:", error); - return null; - } - }, -}; diff --git a/packages/plugin-depin/src/template/index.ts b/packages/plugin-depin/src/template/index.ts deleted file mode 100644 index 87a8fe6729a40..0000000000000 --- a/packages/plugin-depin/src/template/index.ts +++ /dev/null @@ -1,259 +0,0 @@ -export const projectsTemplate = ` -You are an AI assistant with access to data about various blockchain and DePIN (Decentralized Physical Infrastructure Network) projects. Your primary task is to answer user questions about token prices and other project-related information accurately and precisely. Here's the data you have access to: -About {{agentName}}: -{{bio}} -{{lore}} -{{knowledge}} - -{{providers}} - -When a user asks a question, follow these steps: - -1. Analyze the user's question carefully. -2. Search the provided projects data for relevant information. -3. If the question is about token prices, provide the most up-to-date price information available in the data. -4. If the question is about other project details (e.g., market cap, description, categories), provide that information accurately. -5. If the question cannot be answered using the available data, politely inform the user that you don't have that information. - -When responding to the user: -1. Provide a clear and concise answer to the user's question. -2. If you're stating a token price or numerical value, include the exact figure from the data. -3. If relevant, provide brief additional context or information that might be helpful. - -Remember to be precise, especially when discussing token prices or other numerical data. Do not speculate or provide information that is not present in the given data. - -Now, please answer the user question, based on some recent messages: - -{{recentMessages}} -`; - -export const locationExtractionTemplate = ` -You are an AI assistant specialized in extracting location information from user messages. Your primary task is to identify and extract a valid location name that can be used to query the Mapbox API for latitude and longitude coordinates. - -Here are the recent messages from the conversation: - - -{{recentMessages}} - - -Your objective is to analyze the most recent user message in the context of the conversation and extract a valid location name. This location should be suitable for querying a map service, such as a city name, a full address, or a well-known landmark. - -Please follow these steps: - -1. Review the conversation history, focusing on the most recent user message. -2. Identify any mentions of locations in the latest message and recent context. -3. If multiple locations are mentioned, prioritize the most recently mentioned valid location. -4. Extract the location, ensuring it's specific enough for a map query. - -Use the following guidelines when extracting the location: - -- Look for names of cities, countries, streets, or landmarks. -- Include relevant details that help specify the location, such as street numbers or neighborhood names. -- If the location is ambiguous (e.g., "Springfield" without a state), include additional context if available in the message or recent conversation history. -- If no clear location is mentioned in the latest message or recent context, respond with "No valid location found." - -Before providing your final answer, wrap your analysis inside tags. In this analysis: - -1. List all mentioned locations chronologically, prepending each with a number (e.g., 1. New York, 2. Central Park, etc.). -2. For each location, evaluate its specificity and suitability for a map query. Consider: - - Is it a city, country, street address, or landmark? - - Does it have enough detail for an accurate map search? - - Is there any ambiguity that needs to be resolved? -3. If there are multiple locations in the latest message, explain your reasoning for choosing one over the others. -4. Identify the most recently mentioned valid location and justify your choice. - -After your analysis, provide the extracted location in the following format: - - -[Insert the extracted location here, or "No valid location found" if no valid location is present] - - -The extracted location should be formatted as a string that could be used as a query for a mapping service. For example: -- "New York City" -- "221B Baker Street, London" -- "Eiffel Tower, Paris" -- "Sydney Opera House, Australia" - -Remember, the goal is to provide a clear, specific location that can be used to find geographic coordinates. Do not include any explanation or additional text outside of the location_analysis and extracted_location tags. -`; - -export const currentWeatherTemplate = ` -You are an AI weather assistant with a unique persona. Your task is to answer questions about the weather using provided data while maintaining your assigned character traits. - -Here is the weather data you will use to answer questions: - - -{{weatherData}} - - -Now, review the information about your persona: - - -{{agentName}} - - - - -{{bio}} - - - -{{lore}} - - - -{{knowledge}} - - - -{{characterMessageExamples}} - - - - -{{providers}} - - -Recent messages for context: - - -{{recentMessages}} - - -When answering a user's question, follow these steps: - -1. Analyze the weather data, focusing on the specific information requested by the user. -2. Formulate a response that directly addresses the user's question using only the provided weather data. -3. If the question cannot be fully answered, explain what information you can provide and what is missing. -4. Maintain your assigned persona throughout your response, including tone and style. -5. Provide additional relevant information or advice if appropriate, but keep it concise and related to the user's query. -6. Do not invent or assume any weather information not present in the provided data. -7. If the weather data is incomplete or invalid, mention this in your response. - -Before providing your final answer, wrap your analysis process inside tags. Focus on the relevance to the user's specific question rather than covering all available weather data. In your analysis: -- Identify key weather parameters mentioned in the user's question -- List out relevant data points from the weather data -- Consider how your persona's traits might influence the response - -Present your final answer in the following format: - - -[Your response to the user's question, written in the style of your assigned persona] - - -Example output structure (using generic content): - - -- Identified user's question about [specific weather parameter] in [location] -- Key weather parameters mentioned: [list parameters] -- Relevant data points from weather data: - * [Data point 1] - * [Data point 2] - * [Data point 3] -- Persona traits that might influence response: - * [Trait 1] - * [Trait 2] -- Considered how to phrase response in character - - - -[Direct answer to the user's question about the specific weather parameter] -[Any additional relevant information or advice, if applicable] - - -Remember to stay in character and provide a helpful, accurate response based solely on the provided weather data, focusing on the user's specific question. -`; - -export const weatherForecastTemplate = ` -You are an AI weather assistant with a unique persona. Your task is to answer questions about the weather using provided data while maintaining your assigned character traits. - -Here is the weather data you will use to answer questions: - - -{{weatherForecast}} - - -This weather data contains information such as temperature, humidity, wind speed, and conditions for specific locations and time periods. Each entry in the data array represents a weather forecast for a particular timestamp. - -Now, review the information about your persona: - - -{{agentName}} - - - - -{{bio}} - - - -{{lore}} - - - -{{knowledge}} - - - -{{characterMessageExamples}} - - - - -{{providers}} - - -Recent messages for context: - - -{{recentMessages}} - - -When answering a user's question, follow these steps: - -1. Analyze the weather data, focusing on the specific information requested by the user. -2. Formulate a response that directly addresses the user's question using only the provided weather data. -3. If the question cannot be fully answered, explain what information you can provide and what is missing. -4. Maintain your assigned persona throughout your response, including tone and style. -5. Provide additional relevant information or advice if appropriate, but keep it concise and related to the user's query. -6. Do not invent or assume any weather information not present in the provided data. -7. If the weather data is incomplete or invalid, mention this in your response. - -Before providing your final answer, wrap your thought process in tags. Focus on the relevance to the user's specific question rather than covering all available weather data. In your analysis: -- Identify key weather parameters mentioned in the user's question -- Quote specific, relevant data points from the weather data -- List the persona traits that are most relevant to answering this particular question -- If multiple data points are available for the requested information, explain how you're selecting or interpreting the data -- Provide a step-by-step plan for answering the question in character - -Present your final answer in the following format: - - -[Your response to the user's question, written in the style of your assigned persona] - - -Example output structure (using generic content): - - -- User asked about [weather parameter] in [location] for [time period] -- Relevant quotes from weather data: - * "[Exact quote 1]" - * "[Exact quote 2]" - * "[Exact quote 3]" -- Most relevant persona traits for this question: - * [Trait 1]: [How it affects the response] - * [Trait 2]: [How it affects the response] -- Data interpretation: [Brief explanation if needed] -- Step-by-step plan for in-character response: - 1. [Step 1] - 2. [Step 2] - 3. [Step 3] - - - -[Direct answer to the user's question about the specific weather parameter] -[Any additional relevant information or advice, if applicable] - - -Remember to stay in character and provide a helpful, accurate response based solely on the provided weather data, focusing on the user's specific question. -`; diff --git a/packages/plugin-depin/src/test/depinData.test.ts b/packages/plugin-depin/src/test/depinData.test.ts deleted file mode 100644 index 55135e501237a..0000000000000 --- a/packages/plugin-depin/src/test/depinData.test.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { describe, expect, it, vi, beforeEach, afterEach } from "vitest"; - -import { - DEPIN_METRICS_URL, - DEPIN_PROJECTS_URL, - DePINScanProvider, -} from "../providers/depinData"; -import { - mockDepinscanMetrics, - mockDepinscanProjects, - parsedProjectsSample, -} from "./sampleData"; - -vi.stubGlobal( - "fetch", - vi.fn((url) => { - if (url.includes(DEPIN_METRICS_URL)) { - return Promise.resolve({ - json: () => Promise.resolve(mockDepinscanMetrics), - }); - } else if (url.includes(DEPIN_PROJECTS_URL)) { - return Promise.resolve({ - json: () => Promise.resolve(mockDepinscanProjects), - }); - } else { - return Promise.reject(new Error("Unknown endpoint")); - } - }) -); - -// Mock NodeCache -vi.mock("node-cache", () => { - return { - default: vi.fn().mockImplementation(() => ({ - set: vi.fn(), - get: vi.fn().mockReturnValue(null), - })), - }; -}); - -// Mock the ICacheManager -const mockCacheManager = { - get: vi.fn().mockResolvedValue(null), - set: vi.fn(), -}; - -describe("Depin Data provider", () => { - let depinscan: DePINScanProvider; - - beforeEach(() => { - vi.clearAllMocks(); - mockCacheManager.get.mockResolvedValue(null); - - depinscan = new DePINScanProvider(mockCacheManager as any); - }); - - afterEach(() => { - vi.clearAllTimers(); - }); - - describe("Cache Management", () => { - it("should use cached data when available", async () => { - mockCacheManager.get.mockResolvedValueOnce(mockDepinscanMetrics); - - const result = await (depinscan as any).getCachedData("test-key"); - - expect(result).toEqual(mockDepinscanMetrics); - expect(mockCacheManager.get).toHaveBeenCalledTimes(1); - }); - - it("should write data to both caches", async () => { - await (depinscan as any).setCachedData( - "test-key", - mockDepinscanMetrics - ); - - expect(mockCacheManager.set).toHaveBeenCalledWith( - expect.stringContaining("test-key"), - mockDepinscanMetrics, - expect.any(Object) - ); - }); - }); - - it("should fetch depinscan metrics", async () => { - const metrics = await depinscan.getDailyMetrics(); - - expect(metrics).toEqual(mockDepinscanMetrics); - }); - it("should fetch depinscan projects", async () => { - const projects = await depinscan.getProjects(); - - expect(projects).toEqual(parsedProjectsSample); - }); -}); diff --git a/packages/plugin-depin/src/test/sampleData.ts b/packages/plugin-depin/src/test/sampleData.ts deleted file mode 100644 index b749193e0f2ed..0000000000000 --- a/packages/plugin-depin/src/test/sampleData.ts +++ /dev/null @@ -1,104 +0,0 @@ -export const mockDepinscanMetrics = [ - { - date: "2024-12-17", - total_projects: "291", - market_cap: "36046044620.57570635160", - total_device: "19416950", - }, -]; - -export const mockDepinscanProjects = [ - { - project_name: "Solana", - slug: "solana", - logo: "https://depinscan-prod.s3.us-east-1.amazonaws.com/next-s3-uploads/3160a9ec-42df-4f02-9db6-5aadc61323d8/solana.svg", - description: - "Solana is a general purpose layer 1 blockchain that works well for DePIN (decentralized physical infrastructure Network) projects due to its low transaction cost, high-throughput speed, scalability and existing Solana DePIN ecosystem. The most renowned Solana DePIN projects include Helium, Hivemapper and Render.", - trusted_metric: true, - token: "SOL", - layer_1: ["Solana"], - categories: ["Chain"], - market_cap: "106247097756.0147", - token_price: "221.91", - total_devices: 0, - network_status: "Mainnet", - avg_device_cost: "", - days_to_breakeven: "", - estimated_daily_earnings: "", - chainid: "", - coingecko_id: "solana", - fully_diluted_valuation: "131508718985", - }, - { - project_name: "Render", - slug: "render", - logo: "https://depinscan-prod.s3.amazonaws.com/depin/9e5f0bb330344d580b9e30d338d6ab6d.png", - description: - "Render is a decentralized rendering platform supporting next-generation media production.", - trusted_metric: true, - token: "RNDR", - layer_1: ["Solana"], - categories: ["Server", "AI"], - market_cap: "4659773671.856073", - token_price: "9.02", - total_devices: 0, - network_status: "Mainnet", - avg_device_cost: "", - days_to_breakeven: "", - estimated_daily_earnings: "", - chainid: "1", - coingecko_id: "render-token", - fully_diluted_valuation: "4705509105", - }, -]; - -export const parsedProjectsSample = [ - [ - "project_name", - "slug", - "token", - "layer_1", - "categories", - "market_cap", - "token_price", - "total_devices", - "avg_device_cost", - "days_to_breakeven", - "estimated_daily_earnings", - "chainid", - "coingecko_id", - "fully_diluted_valuation", - ], - [ - "Solana", - "solana", - "SOL", - "Solana", - "Chain", - "106.25B", - "221.91", - "0", - "", - "", - "", - "", - "solana", - "131.51B", - ], - [ - "Render", - "render", - "RNDR", - "Solana", - "Server, AI", - "4.66B", - "9.02", - "0", - "", - "", - "", - "1", - "render-token", - "4.71B", - ], -]; diff --git a/packages/plugin-depin/src/types/depin.ts b/packages/plugin-depin/src/types/depin.ts deleted file mode 100644 index 5c156b6339f1d..0000000000000 --- a/packages/plugin-depin/src/types/depin.ts +++ /dev/null @@ -1,23 +0,0 @@ -export type DepinScanMetrics = { - date: string; - total_projects: string; - market_cap: string; - total_device: string; -}; - -export type DepinScanProject = { - project_name: string; - slug: string; - token: string; - layer_1: string[]; - categories: string[]; - market_cap: string; - token_price: string; - total_devices: string; - avg_device_cost: string; - days_to_breakeven: string; - estimated_daily_earnings: string; - chainid: string; - coingecko_id: string; - fully_diluted_valuation: string; -}; diff --git a/packages/plugin-depin/tsconfig.json b/packages/plugin-depin/tsconfig.json deleted file mode 100644 index 2d8d3fe8181fb..0000000000000 --- a/packages/plugin-depin/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src", - "typeRoots": [ - "./node_modules/@types", - "./src/types" - ], - "declaration": true - }, - "include": [ - "src" - ] -} \ No newline at end of file diff --git a/packages/plugin-depin/tsup.config.ts b/packages/plugin-depin/tsup.config.ts deleted file mode 100644 index c0af60c6ab7cd..0000000000000 --- a/packages/plugin-depin/tsup.config.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: [ - "dotenv", - "fs", - "path", - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "events", - "node-cache", - "axios" - ], -}); diff --git a/packages/plugin-desk-exchange/.npmignore b/packages/plugin-desk-exchange/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-desk-exchange/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-desk-exchange/README.md b/packages/plugin-desk-exchange/README.md deleted file mode 100644 index c236e7a36ddcf..0000000000000 --- a/packages/plugin-desk-exchange/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# DESK Exchange Plugin for Eliza - -This plugin enables interaction with the DESK Perpetual DEX through Eliza, providing perpetual futures trading capabilities. Visit [DESK Exchange](https://desk.exchange/) for more details. -## Features -- 💱 Perpetual Trading - - Market orders (immediate execution) - - Limit orders (price-specific) -- 🔄 Order Management - - Cancel all open orders -- 🏦 Account summary - - View open orders - - View active positions - - View collateral balances - -## Installation - -Add the plugin to your Eliza configuration: - -```json -{ - "plugins": ["@elizaos/plugin-desk-exchange"] -} -``` - -## Configuration - -Set the following environment variables: - -```env -DESK_EXCHANGE_PRIVATE_KEY=your_private_key # Required for trading and cancelling orders -DESK_EXCHANGE_NETWORK= # "mainnet" or "testnet -``` - -## Available Actions - -### 1. PERP_TRADE - -Place perp market or limit orders. - -Examples: - -``` -# Market Orders -"long 1 BTC" -> Place buy order of 1 BTC at market price -"sell 2 ETH" -> Sells 2 ETH at market price -"market buy 1 ETH" -> Buys 1 ETH at market price - -# Limit Orders -"buy 1 SOL at 20 USDC" -> Places buy order for 1 SOL at 20 USDC -"sell 0.5 BASE at 21 USDC" -> Places sell order for 0.5 BASE at 21 USDC -``` - -### 2. CANCEL_ORDERS - -Cancel all your open orders. - -Examples: - -``` -"Cancel all orders" -"Cancel my orders" -``` - -### 3. GET_PERP_ACCOUNT_SUMMARY - -Display the summary of your current account with details on open orders, active position and collateral tokens. - -Examples: - -``` -"Check my account please" - -"Here is the summary of your account 0xxxxxxxx -Your positions: -- Long 1.0039 BTCUSD -- Short 10.01 ETHUSD -- Long 135808.80 SOLUSD -Your orders: -- Sell 0/0.0001 BTCUSD @200000.00 -Your collaterals: -- 1382295.125325162 USDC -- 2000000.00 CREDIT" -``` - -## Security Notes - -- Store your private key securely using environment variables -- Test with small amounts first -- Use testnet for initial testing -- Monitor your orders regularly -- Double-check prices before confirming trades - -## License - -MIT diff --git a/packages/plugin-desk-exchange/package.json b/packages/plugin-desk-exchange/package.json deleted file mode 100644 index 4351fda5b6174..0000000000000 --- a/packages/plugin-desk-exchange/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "@elizaos/plugin-desk-exchange", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "ethers": "^6.13.5", - "axios": "^1.7.9" - }, - "devDependencies": { - "@types/node": "^20.0.0", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - } -} diff --git a/packages/plugin-desk-exchange/src/actions/accountSummary.ts b/packages/plugin-desk-exchange/src/actions/accountSummary.ts deleted file mode 100644 index 4d78b7429627f..0000000000000 --- a/packages/plugin-desk-exchange/src/actions/accountSummary.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - composeContext, - elizaLogger, -} from "@elizaos/core"; -import { accountSummaryTemplate } from "../templates"; -import { ethers } from "ethers"; -import { - generateNonce, - generateJwt, - getSubaccount, - getEndpoint, - formatNumber, -} from "../services/utils"; -import { getSubaccountSummary } from "../services/account"; - -export const accountSummary: Action = { - name: "GET_PERP_ACCOUNT_SUMMARY", - similes: [ - "CHECK_ACCOUNT", - "CHECK_PERP_ACCOUNT", - "ACCOUNT_SUMMARY", - "PERP_ACCOUNT_SUMMARY", - ], - description: "Get the current account summary", - validate: async (runtime: IAgentRuntime) => { - return !!( - runtime.getSetting("DESK_EXCHANGE_PRIVATE_KEY") && - runtime.getSetting("DESK_EXCHANGE_NETWORK") - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: Record, - callback?: HandlerCallback - ) => { - // Initialize or update state - state = !state - ? await runtime.composeState(message) - : await runtime.updateRecentMessageState(state); - - const context = composeContext({ - state, - template: accountSummaryTemplate, - }); - - try { - const endpoint = getEndpoint(runtime); - const wallet = new ethers.Wallet( - runtime.getSetting("DESK_EXCHANGE_PRIVATE_KEY") - ); - const jwt = await generateJwt(endpoint, wallet, 0, generateNonce()); - - const response = await getSubaccountSummary( - endpoint, - jwt, - getSubaccount(wallet.address, 0) - ); - elizaLogger.info(response.data); - - const subaccountSummaryData = response.data.data; - const positionSummary = - subaccountSummaryData.positions.length > 0 - ? subaccountSummaryData.positions - .map((p) => { - return `- ${p.side} ${formatNumber(p.quantity)} ${ - p.symbol - }`; - }) - .join("\n") - : "- No active position"; - const orderSummary = - subaccountSummaryData.open_orders.length > 0 - ? subaccountSummaryData.open_orders - .map((o) => { - return `- ${ - o.side === "Long" ? "Buy" : "Sell" - } ${formatNumber( - Number(o.original_quantity) - - Number(o.remaining_quantity) - )}/${formatNumber(o.original_quantity)} ${ - o.symbol - } @${ - Number(o.price) > 0 - ? formatNumber(o.price) - : formatNumber(o.trigger_price) - }`; - }) - .join("\n") - : "- No orders"; - const collateralSummary = - subaccountSummaryData.collaterals.length > 0 - ? subaccountSummaryData.collaterals - .map((c) => { - return `- ${formatNumber(c.amount, 4)} ${ - c.asset - }`; - }) - .join("\n") - : "- No collateral"; - callback({ - text: - `Here is the summary of your account ${wallet.address}\n` + - `Your positions:\n` + - positionSummary + - `\n` + - `Your orders:\n` + - orderSummary + - `\n` + - `Your collaterals:\n` + - collateralSummary, - content: subaccountSummaryData, - }); - - return true; - } catch (error) { - elizaLogger.error("Error getting account summary:", { - message: error.message, - code: error.code, - data: error.response?.data, - }); - if (callback) { - callback({ - text: `Error getting account summary: ${error.message} ${error.response?.data?.errors}`, - content: { error: error.message }, - }); - } - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Check my account please", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here is the summary of your account", - action: "GET_PERP_ACCOUNT_SUMMARY", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "How is my account doing?", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here is the summary of your account", - action: "GET_PERP_ACCOUNT_SUMMARY", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Account summary", - }, - }, - { - user: "{{agent}}", - content: { - text: "Here is the summary of your account", - action: "GET_PERP_ACCOUNT_SUMMARY", - }, - }, - ], - ] as ActionExample[][], -}; - -export default accountSummary; diff --git a/packages/plugin-desk-exchange/src/actions/cancelOrders.ts b/packages/plugin-desk-exchange/src/actions/cancelOrders.ts deleted file mode 100644 index 6d959a1cc4489..0000000000000 --- a/packages/plugin-desk-exchange/src/actions/cancelOrders.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - composeContext, -} from "@elizaos/core"; -import { - generateJwt, - generateNonce, - getEndpoint, - getSubaccount, -} from "../services/utils"; -import { ethers } from "ethers"; -import { getSubaccountSummary } from "../services/account"; -import { cancelOrder } from "../services/trade"; -import { cancelOrderTemplate } from "../templates"; - -export const cancelOrders: Action = { - name: "CANCEL_ORDERS", - similes: ["CANCEL_ALL_ORDERS", "CANCEL", "CANCEL_ALL"], - description: "Cancel all open orders on DESK Exchange", - validate: async (runtime: IAgentRuntime) => { - return !!( - runtime.getSetting("DESK_EXCHANGE_PRIVATE_KEY") && - runtime.getSetting("DESK_EXCHANGE_NETWORK") - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: Record, - callback?: HandlerCallback - ) => { - // Initialize or update state - state = !state - ? await runtime.composeState(message) - : await runtime.updateRecentMessageState(state); - const context = composeContext({ - state, - template: cancelOrderTemplate, - }); - - try { - const endpoint = getEndpoint(runtime); - const wallet = new ethers.Wallet( - runtime.getSetting("DESK_EXCHANGE_PRIVATE_KEY") - ); - const jwt = await generateJwt(endpoint, wallet, 0, generateNonce()); - - const subaccountSummaryResponse = await getSubaccountSummary( - endpoint, - jwt, - getSubaccount(wallet.address, 0) - ); - - const openOrders = - subaccountSummaryResponse.data?.data?.open_orders; - - if (openOrders && openOrders.length > 0) { - for (const o of openOrders) { - await cancelOrder(endpoint, jwt, { - symbol: o.symbol, - subaccount: getSubaccount(wallet.address, 0), - order_digest: o.order_digest, - nonce: generateNonce(), - is_conditional_order: false, - wait_for_reply: false, - }); - } - callback({ - text: `Successfully cancelled ${openOrders.length} orders.`, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error canceling orders:", { - message: error.message, - code: error.code, - data: error.response?.data, - }); - if (callback) { - callback({ - text: `Error canceling orders: ${error.message} ${error.response?.data?.errors}`, - content: { error: error.message }, - }); - } - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Cancel all my orders", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll cancel all your open orders.", - action: "CANCEL_ORDERS", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully cancelled 2 open orders", - }, - }, - ], - ] as ActionExample[][], -}; - -export default cancelOrders; diff --git a/packages/plugin-desk-exchange/src/actions/perpTrade.ts b/packages/plugin-desk-exchange/src/actions/perpTrade.ts deleted file mode 100644 index 1252f5b984886..0000000000000 --- a/packages/plugin-desk-exchange/src/actions/perpTrade.ts +++ /dev/null @@ -1,199 +0,0 @@ -import { - type Action, - type ActionExample, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - composeContext, - elizaLogger, - generateObjectDeprecated, - ModelClass, -} from "@elizaos/core"; -import { - DeskExchangeError, - PlaceOrderRequest, - PlaceOrderSchema, -} from "../types"; -import { perpTradeTemplate } from "../templates"; -import { ethers } from "ethers"; -import { - generateNonce, - generateJwt, - getSubaccount, - getEndpoint, - formatNumber, -} from "../services/utils"; -import { placeOrder } from "../services/trade"; - -export const perpTrade: Action = { - name: "PERP_TRADE", - similes: ["PERP_ORDER", "PERP_BUY", "PERP_SELL"], - description: "Place a perpetual contract trade order on DESK Exchange", - validate: async (runtime: IAgentRuntime) => { - return !!( - runtime.getSetting("DESK_EXCHANGE_PRIVATE_KEY") && - runtime.getSetting("DESK_EXCHANGE_NETWORK") - ); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State, - options: Record, - callback?: HandlerCallback - ) => { - // Initialize or update state - state = !state - ? await runtime.composeState(message) - : await runtime.updateRecentMessageState(state); - - const context = composeContext({ - state, - template: perpTradeTemplate, - }); - - const content = await generateObjectDeprecated({ - runtime, - context, - modelClass: ModelClass.SMALL, - }); - - try { - if (!content) { - throw new DeskExchangeError( - "Could not parse trading parameters from conversation" - ); - } - - const endpoint = getEndpoint(runtime); - - const wallet = new ethers.Wallet( - runtime.getSetting("DESK_EXCHANGE_PRIVATE_KEY") - ); - const jwt = await generateJwt(endpoint, wallet, 0, generateNonce()); - - elizaLogger.info( - "Raw content from LLM:", - JSON.stringify(content, null, 2) - ); - - const processesOrder = { - symbol: `${content.symbol}USD`, - side: content.side, - amount: content.amount, - price: content.price, - nonce: generateNonce(), - broker_id: "DESK", - order_type: Number(content.price) === 0 ? "Market" : "Limit", - reduce_only: false, - subaccount: getSubaccount(wallet.address, 0), - }; - const parseResult = PlaceOrderSchema.safeParse(processesOrder); - if (!parseResult.success) { - throw new Error( - `Invalid perp trade content: ${JSON.stringify( - parseResult.error.errors, - null, - 2 - )}` - ); - } - elizaLogger.info( - "Processed order:", - JSON.stringify(processesOrder, null, 2) - ); - - const response = await placeOrder( - endpoint, - jwt, - processesOrder as PlaceOrderRequest - ); - - elizaLogger.info(response.data); - - if (callback && response.status === 200) { - const orderResponse = response.data.data; - callback({ - text: `Successfully placed a ${orderResponse.side} ${ - orderResponse.order_type - } order of size ${formatNumber( - orderResponse.quantity - )} on ${orderResponse.symbol} at ${ - orderResponse.order_type === "Market" - ? "market price" - : formatNumber(orderResponse.price) + " USD" - } on DESK Exchange.`, - content: response.data, - }); - } else { - callback({ - text: `Place order failed with ${response.data.errors}.`, - content: response.data, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error executing trade:", { - content, - message: error.message, - code: error.code, - data: error.response?.data, - }); - if (callback) { - callback({ - text: `Error executing trade: ${error.message} ${error.response?.data?.errors}`, - content: { error: error.message }, - }); - } - return false; - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "Long 0.1 BTC at 20 USD", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll place a buy order for 0.1 BTC at 20 USD.", - action: "PERP_TRADE", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully placed a limit order to buy 0.1 BTC at 20 USD", - }, - }, - ], - [ - { - user: "{{user1}}", - content: { - text: "Short 2 BTC at 21 USD", - }, - }, - { - user: "{{agent}}", - content: { - text: "I'll place a sell order for 2 BTC at 21 USD.", - action: "PERP_TRADE", - }, - }, - { - user: "{{agent}}", - content: { - text: "Successfully placed a limit order to sell 2 BTC at 21 USD", - }, - }, - ], - ] as ActionExample[][], -}; - -export default perpTrade; diff --git a/packages/plugin-desk-exchange/src/index.ts b/packages/plugin-desk-exchange/src/index.ts deleted file mode 100644 index 3c64528e271b5..0000000000000 --- a/packages/plugin-desk-exchange/src/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { perpTrade } from "./actions/perpTrade"; -import accountSummary from "./actions/accountSummary"; -import cancelOrders from "./actions/cancelOrders"; - -export const deskExchangePlugin: Plugin = { - name: "deskExchange", - description: "DESK Exchange plugin", - actions: [perpTrade, accountSummary, cancelOrders], - providers: [], - evaluators: [], - services: [], - clients: [], -}; - -export default deskExchangePlugin; diff --git a/packages/plugin-desk-exchange/src/services/account.ts b/packages/plugin-desk-exchange/src/services/account.ts deleted file mode 100644 index 182d46656b5d3..0000000000000 --- a/packages/plugin-desk-exchange/src/services/account.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { AxiosResponse } from "axios"; -import axios from "axios"; - -export const getSubaccountSummary = async ( - endpoint: string, - jwt: string, - subaccount: string -): Promise => { - if (!endpoint || !jwt || !subaccount) { - throw new Error("Missing required parameters"); - } - return await axios.get(`${endpoint}/v2/subaccount-summary/${subaccount}`, { - headers: { - authorization: `Bearer ${jwt}`, - "content-type": "application/json", - }, - timeout: 5000, - validateStatus: (status) => status === 200, - }); -}; diff --git a/packages/plugin-desk-exchange/src/services/trade.ts b/packages/plugin-desk-exchange/src/services/trade.ts deleted file mode 100644 index a28cd87209cd1..0000000000000 --- a/packages/plugin-desk-exchange/src/services/trade.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { AxiosResponse } from "axios"; -import { CancelOrderRequest, PlaceOrderRequest } from "../types"; -import axios from "axios"; - -export const placeOrder = async ( - endpoint: string, - jwt: string, - order: PlaceOrderRequest -): Promise => { - if (!endpoint || !jwt || !order) { - throw new Error("Missing required parameters"); - } - return await axios.post(`${endpoint}/v2/place-order`, order, { - headers: { - authorization: `Bearer ${jwt}`, - "content-type": "application/json", - }, - timeout: 5000, - validateStatus: (status) => status === 200, - }); -}; - -export const cancelOrder = async ( - endpoint: string, - jwt: string, - order: CancelOrderRequest -): Promise => { - if (!endpoint || !jwt || !order) { - throw new Error("Missing required parameters"); - } - if (!order.order_digest) { - throw new Error("Missing order digest"); - } - return await axios.post(`${endpoint}/v2/cancel-order`, order, { - headers: { - authorization: `Bearer ${jwt}`, - "content-type": "application/json", - }, - timeout: 5000, - validateStatus: (status) => status === 200, - }); -}; diff --git a/packages/plugin-desk-exchange/src/services/utils.ts b/packages/plugin-desk-exchange/src/services/utils.ts deleted file mode 100644 index b683db8bc52ae..0000000000000 --- a/packages/plugin-desk-exchange/src/services/utils.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { type IAgentRuntime } from "@elizaos/core"; -import { DeskExchangeError } from "../types"; -import { ethers } from "ethers"; -import axios from "axios"; -import { randomBytes } from "crypto"; - -export const generateNonce = (): string => { - const expiredAt = BigInt(Date.now() + 1000 * 60) * BigInt(1 << 20); - const random = parseInt(randomBytes(3).toString("hex"), 16) % (1 << 20); - return (expiredAt + BigInt(random)).toString(); -}; - -export const generateJwt = async ( - endpoint: string, - wallet: ethers.Wallet, - subaccountId: number, - nonce: string -): Promise => { - const message = `generate jwt for ${wallet.address?.toLowerCase()} and subaccount id ${subaccountId} to trade on happytrading.global with nonce: ${nonce}`; - const signature = await wallet.signMessage(message); - - const response = await axios.post( - `${endpoint}/v2/auth/evm`, - { - account: wallet.address, - subaccount_id: subaccountId.toString(), - nonce, - signature, - }, - { - headers: { "content-type": "application/json" }, - } - ); - - if (response.status === 200) { - return response.data.data.jwt; - } else { - throw new DeskExchangeError("Could not generate JWT"); - } -}; - -export const getSubaccount = ( - account: string, - subaccountId: number -): string => { - // pad address with subaccountId to be 32 bytes (64 hex characters) - // 0x + 40 hex characters (address) + 24 hex characters (subaccountId) - const subaccountIdHex = BigInt(subaccountId).toString(16).padStart(24, "0"); - return account.concat(subaccountIdHex); -}; - -export const getEndpoint = (runtime: IAgentRuntime): string => { - return runtime.getSetting("DESK_EXCHANGE_NETWORK") === "mainnet" - ? "https://api.happytrading.global" - : "https://stg-trade-api.happytrading.global"; -}; - -export const formatNumber = ( - num: string | number, - decimalPlaces?: number -): string => { - return Number(num).toLocaleString(undefined, { - style: "decimal", - minimumFractionDigits: 0, - maximumFractionDigits: decimalPlaces || 8, - }); -}; diff --git a/packages/plugin-desk-exchange/src/templates.ts b/packages/plugin-desk-exchange/src/templates.ts deleted file mode 100644 index 3de662f7cfd11..0000000000000 --- a/packages/plugin-desk-exchange/src/templates.ts +++ /dev/null @@ -1,58 +0,0 @@ -export const perpTradeTemplate = `Look at your LAST RESPONSE in the conversation where you confirmed a trade request. -Based on ONLY that last message, extract the trading details: - -For DESK Exchange perp trading: -- Market orders (executes immediately at best available price): - "perp buy 1 HYPE" -> { "symbol": "HYPE", "side": "Long", "amount": "1" } - "perp sell 2 HYPE" -> { "symbol": "HYPE", "side": "Short", "amount": "2" } - "perp market buy 1 HYPE" -> { "symbol": "HYPE", "side": "Long", "amount": "1" } - "perp market sell 2 HYPE" -> { "symbol": "HYPE", "side": "Short", "amount": "2" } - -- Limit orders (waits for specified price): - "buy 1 HYPE at 20 USDC" -> { "symbol": "HYPE", "side": "Long", "amount": "1", "price": "20" } - "sell 0.5 HYPE at 21 USDC" -> { "symbol": "HYPE", "side": "Short", "amount": "0.5", "price": "21" } - "limit buy 1 HYPE at 20 USDC" -> { "symbol": "HYPE", "side": "Long", "amount": "1", "price": "20" } - "limit sell 0.5 HYPE at 21 USDC" -> { "symbol": "HYPE", "side": "Short", "amount": "0.5", "price": "21" } - -\`\`\`json -{ - "symbol": "", - "side": "", - "amount": "", - "price": "<"price in USD if limit order, 0 if market order>" -} -\`\`\` - -Note: -- Just use the coin symbol (HYPE, ETH, etc.) -- price is optional: - - If specified (with "at X USD"), order will be placed at that exact price - - If not specified, order will be placed at current market price -- Words like "market" or "limit" at the start are optional but help clarify intent - -Recent conversation: -{{recentMessages}}`; - -export const cancelOrderTemplate = `Look at your LAST RESPONSE in the conversation where you confirmed that user want to cancel all orders. - -For example: -- I would like to cancel all my orders. -- Cancel all orders -- Cancel orders please - -If the user ask to cancel a specific order, please let them know that it is not possible at the moment. Let them know that you now only have the ability to cancel all order only. - -Recent conversation: -{{recentMessages}}`; - -export const accountSummaryTemplate = `Look at ONLY your LAST RESPONSE message in this conversation, where you just confirmed if the user want to check the information of their account. - -For example: -- I would like to check the summary of my account on DESK Exchange. -- I want to check the information on my account. -- How is my positions going? -- How is my account? -- Check account summary please - -Last part of conversation: -{{recentMessages}}`; diff --git a/packages/plugin-desk-exchange/src/types.ts b/packages/plugin-desk-exchange/src/types.ts deleted file mode 100644 index 0bda29c05c5f0..0000000000000 --- a/packages/plugin-desk-exchange/src/types.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { z } from "zod"; - -// Base configuration types -export interface DeskExchangeConfig { - privateKey: string; - network?: "mainnet" | "testnet"; - walletAddress?: string; -} - -export const PlaceOrderSchema = z.object({ - symbol: z.string().min(1).toUpperCase(), - side: z.enum(["Long", "Short"]), - amount: z.number({ coerce: true }).positive(), - price: z.number({ coerce: true }), - nonce: z.string(), - broker_id: z.enum(["DESK"]), - order_type: z.enum(["Market", "Limit"]), - reduce_only: z.boolean(), - subaccount: z.string(), - timeInForce: z.enum(["GTC", "IOC", "FOK"]).optional(), -}); -export type PlaceOrderRequest = z.infer; - -export const CancelOrderSchema = z.object({ - symbol: z.string().min(1).toUpperCase(), - subaccount: z.string(), - order_digest: z.string(), - nonce: z.string(), - is_conditional_order: z.boolean(), - wait_for_reply: z.boolean(), -}); -export type CancelOrderRequest = z.infer; - -// Error handling types -export class DeskExchangeError extends Error { - constructor( - message: string, - public code?: number, - public details?: unknown - ) { - super(message); - this.name = "DeskExchangeError"; - } -} \ No newline at end of file diff --git a/packages/plugin-desk-exchange/tsconfig.json b/packages/plugin-desk-exchange/tsconfig.json deleted file mode 100644 index 18c600eec05a2..0000000000000 --- a/packages/plugin-desk-exchange/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts", - ] -} \ No newline at end of file diff --git a/packages/plugin-desk-exchange/tsup.config.ts b/packages/plugin-desk-exchange/tsup.config.ts deleted file mode 100644 index 1a96f24afa1eb..0000000000000 --- a/packages/plugin-desk-exchange/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "safe-buffer", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-devin/README.md b/packages/plugin-devin/README.md deleted file mode 100644 index 1676286192ae3..0000000000000 --- a/packages/plugin-devin/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# @elizaos/plugin-devin - -Devin API integration plugin for Eliza, providing automated engineering assistance through the Devin API. - -## Installation - -```bash -pnpm add @elizaos/plugin-devin -``` - -## Configuration - -The plugin requires a Devin API token for authentication. Set the following environment variable: - -```bash -DEVIN_API_TOKEN=your_api_token_here -``` - -Or configure it in your Eliza runtime settings: - -```typescript -runtime.setSetting("DEVIN_API_TOKEN", "your_api_token_here"); -``` - -## Features - -- Session Management: Create and manage Devin engineering sessions -- State Tracking: Monitor session status and progress -- Client Agnostic: Works with any Eliza client implementation -- Rate Limiting: Built-in API request rate limiting -- Error Handling: Comprehensive error handling with retries - -## Usage - -### Actions - -#### START_DEVIN_SESSION - -Creates a new Devin session with the specified prompt. - -```typescript -const result = await runtime.runAction("START_DEVIN_SESSION", { - content: { text: "Help me refactor this code" } -}); -``` - -### Providers - -#### devinProvider - -Manages Devin session state and provides session information. - -```typescript -const state = await runtime.getState(); -const devinState = state.devin; - -// Access session details -console.log(devinState.sessionId); -console.log(devinState.status); -console.log(devinState.url); -``` - -## Testing - -Run the test suite: - -```bash -pnpm test -``` - -## API Documentation - -For detailed API documentation, visit: -- [Devin API Integration Guide](https://docs.devin.ai/tutorials/api-integration) -- [External API Reference](https://docs.devin.ai/external-api/) - -## License - -MIT diff --git a/packages/plugin-devin/__tests__/devinProvider.test.ts b/packages/plugin-devin/__tests__/devinProvider.test.ts deleted file mode 100644 index 4d68766b9112e..0000000000000 --- a/packages/plugin-devin/__tests__/devinProvider.test.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from "vitest"; -import { devinProvider } from "../src/providers/devinProvider"; -import type { IAgentRuntime, Memory, State } from "@elizaos/core"; -import * as devinRequests from "../src/providers/devinRequests"; -import { - mockSuccessfulSession, - mockApiError, - mockUnauthorizedError, - setupDevinApiMocks, - setupFailedDevinApiMocks, - setupNullDevinApiMocks, -} from "./mocks/devinApi"; - -describe("devinProvider", () => { - let runtime: IAgentRuntime; - let message: Memory; - - beforeEach(() => { - runtime = { - getSetting: vi.fn(), - agentId: "00000000-0000-0000-0000-000000000001", - } as unknown as IAgentRuntime; - - message = { - userId: "00000000-0000-0000-0000-000000000002", - content: { text: "test message" }, - roomId: "00000000-0000-0000-0000-000000000003", - agentId: "00000000-0000-0000-0000-000000000001", - }; - - vi.clearAllMocks(); - }); - - it("should handle missing API token", async () => { - (runtime.getSetting as any).mockReturnValue(undefined); - - const result = await devinProvider.get(runtime, message); - - expect(result).toEqual({ - error: "No Devin API token found", - lastUpdate: expect.any(Number), - }); - }); - - it("should return session details if sessionId is set", async () => { - (runtime.getSetting as any).mockReturnValue("test-token"); - const mocks = setupDevinApiMocks(); - Object.entries(mocks).forEach(([key, mock]) => { - vi.spyOn(devinRequests, key as keyof typeof mocks).mockImplementation(mock); - }); - - const state = { - devin: { - sessionId: "test-session-id", - }, - bio: "", - lore: "", - messageDirections: "", - postDirections: "", - recentMessages: [], - recentMessageState: {}, - userStates: {}, - character: { - name: "test", - settings: {}, - templates: {}, - }, - } as unknown as State; - - const result = await devinProvider.get(runtime, message, state); - - expect(result).toEqual({ - sessionId: mockSuccessfulSession.session_id, - status: mockSuccessfulSession.status_enum, - url: mockSuccessfulSession.url, - structured_output: mockSuccessfulSession.structured_output, - lastUpdate: expect.any(Number), - }); - expect(devinRequests.getSessionDetails).toHaveBeenCalledWith(runtime, "test-session-id"); - }); - - it("should handle session details fetch error", async () => { - (runtime.getSetting as any).mockReturnValue("test-token"); - vi.spyOn(devinRequests, "getSessionDetails").mockRejectedValue(new Error("API Error")); - - const state = { - devin: { - sessionId: "test-session-id", - }, - bio: "", - lore: "", - messageDirections: "", - postDirections: "", - recentMessages: [], - recentMessageState: {}, - userStates: {}, - character: { - name: "test", - settings: {}, - templates: {}, - }, - } as unknown as State; - - const result = await devinProvider.get(runtime, message, state); - - expect(result).toEqual({ - error: "Failed to fetch session details", - lastUpdate: expect.any(Number), - sessionId: "test-session-id", - }); - }); - - it("should return empty state when no session exists", async () => { - (runtime.getSetting as any).mockReturnValue("test-token"); - - const result = await devinProvider.get(runtime, message); - - expect(result).toEqual({ - lastUpdate: expect.any(Number), - }); - }); - - it("should handle unexpected errors", async () => { - (runtime.getSetting as any).mockImplementation(() => { - throw new Error("Unexpected error"); - }); - - const result = await devinProvider.get(runtime, message); - - expect(result).toEqual({ - error: "Internal provider error", - lastUpdate: expect.any(Number), - }); - }); -}); diff --git a/packages/plugin-devin/__tests__/mocks/devinApi.ts b/packages/plugin-devin/__tests__/mocks/devinApi.ts deleted file mode 100644 index 7100b622b1842..0000000000000 --- a/packages/plugin-devin/__tests__/mocks/devinApi.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { vi, type MockInstance } from "vitest"; -import type { DevinSession } from "../../src/providers/devinRequests"; -import type * as devinRequests from "../../src/providers/devinRequests"; - -type CreateSessionFn = typeof devinRequests.createSession; -type GetSessionDetailsFn = typeof devinRequests.getSessionDetails; -type SendMessageFn = typeof devinRequests.sendMessage; - -type DevinMocks = { - createSession: MockInstance; - getSessionDetails: MockInstance; - sendMessage: MockInstance; -}; - -function createMockFunctions(): DevinMocks { - return { - createSession: vi.fn(), - getSessionDetails: vi.fn(), - sendMessage: vi.fn(), - } as DevinMocks; -} - -export const mockSuccessfulSession: DevinSession = { - session_id: "test-session-id", - status_enum: "running", - url: "https://test.url", - structured_output: { key: "value" }, -}; - -export const mockBlockedSession: DevinSession = { - session_id: "blocked-session-id", - status_enum: "blocked", - url: "https://test.url", - structured_output: { status: "waiting_for_input" }, -}; - -export const mockStoppedSession: DevinSession = { - session_id: "stopped-session-id", - status_enum: "stopped", - url: "https://test.url", - structured_output: { result: "completed" }, -}; - -export const mockApiError = new Error("API Error") as Error & { status?: number }; -mockApiError.status = 500; - -export const mockUnauthorizedError = new Error("Unauthorized") as Error & { status?: number }; -mockUnauthorizedError.status = 401; - -export function setupDevinApiMocks(): DevinMocks { - const mocks = createMockFunctions(); - mocks.createSession.mockResolvedValue(mockSuccessfulSession); - mocks.getSessionDetails.mockResolvedValue(mockSuccessfulSession); - mocks.sendMessage.mockResolvedValue(undefined); - return mocks; -} - -export function setupFailedDevinApiMocks(): DevinMocks { - const mocks = createMockFunctions(); - mocks.createSession.mockRejectedValue(mockApiError); - mocks.getSessionDetails.mockRejectedValue(mockApiError); - mocks.sendMessage.mockRejectedValue(mockApiError); - return mocks; -} - -export function setupUnauthorizedDevinApiMocks(): DevinMocks { - const mocks = createMockFunctions(); - mocks.createSession.mockRejectedValue(mockUnauthorizedError); - mocks.getSessionDetails.mockRejectedValue(mockUnauthorizedError); - mocks.sendMessage.mockRejectedValue(mockUnauthorizedError); - return mocks; -} - -export function setupNullDevinApiMocks(): DevinMocks { - const mocks = createMockFunctions(); - mocks.createSession.mockResolvedValue({ - session_id: "", - url: "", - status_enum: "stopped", - }); - mocks.getSessionDetails.mockResolvedValue({ - session_id: "", - url: "", - status_enum: "stopped", - }); - mocks.sendMessage.mockResolvedValue(undefined); - return mocks; -} diff --git a/packages/plugin-devin/__tests__/startSessionAction.test.ts b/packages/plugin-devin/__tests__/startSessionAction.test.ts deleted file mode 100644 index c26b671f53bc8..0000000000000 --- a/packages/plugin-devin/__tests__/startSessionAction.test.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { describe, expect, it, vi } from "vitest"; -import { startSessionAction } from "../src/actions/startSession"; -import type { IAgentRuntime, Memory, State, HandlerCallback } from "@elizaos/core"; -import * as devinRequests from "../src/providers/devinRequests"; -import { - mockSuccessfulSession, - mockApiError, - setupDevinApiMocks, - setupFailedDevinApiMocks, -} from "./mocks/devinApi"; - -describe("startSessionAction", () => { - let runtime: IAgentRuntime; - let message: Memory; - let callback: HandlerCallback; - - beforeEach(() => { - runtime = { - getSetting: vi.fn(), - agentId: "00000000-0000-0000-0000-000000000001", - } as unknown as IAgentRuntime; - - message = { - userId: "00000000-0000-0000-0000-000000000002", - content: { text: "Help me with my code" }, - roomId: "00000000-0000-0000-0000-000000000003", - agentId: "00000000-0000-0000-0000-000000000001", - }; - - callback = vi.fn(); - vi.clearAllMocks(); - }); - - describe("Action Structure", () => { - it("should have required action properties", () => { - expect(startSessionAction).toHaveProperty("name"); - expect(startSessionAction).toHaveProperty("description"); - expect(startSessionAction).toHaveProperty("examples"); - expect(startSessionAction).toHaveProperty("similes"); - expect(startSessionAction).toHaveProperty("handler"); - expect(startSessionAction).toHaveProperty("validate"); - expect(Array.isArray(startSessionAction.examples)).toBe(true); - expect(Array.isArray(startSessionAction.similes)).toBe(true); - }); - - it("should have valid example structure", () => { - startSessionAction.examples.forEach((example) => { - example.forEach((message) => { - expect(message).toHaveProperty("user"); - expect(message).toHaveProperty("content"); - expect(message.content).toHaveProperty("text"); - }); - }); - }); - - it("should have unique action name", () => { - expect(startSessionAction.name).toBe("START_DEVIN_SESSION"); - }); - }); - - describe("Handler Behavior", () => { - it("should create a session with valid prompt", async () => { - (runtime.getSetting as any).mockReturnValue("test-token"); - const mocks = setupDevinApiMocks(); - Object.entries(mocks).forEach(([key, mock]) => { - vi.spyOn(devinRequests, key as keyof typeof mocks).mockImplementation(mock); - }); - - await startSessionAction.handler(runtime, message, {} as State, {}, callback); - - expect(callback).toHaveBeenCalledWith( - { - text: expect.stringContaining(mockSuccessfulSession.session_id), - action: "START_SESSION", - }, - [] - ); - expect(devinRequests.createSession).toHaveBeenCalledWith(runtime, message.content.text); - }); - - it("should handle missing prompt gracefully", async () => { - message.content.text = ""; - await startSessionAction.handler(runtime, message, {} as State, {}, callback); - - expect(callback).toHaveBeenCalledWith( - { - text: "No prompt provided for session creation", - }, - [] - ); - }); - - it("should handle API errors", async () => { - (runtime.getSetting as any).mockReturnValue("test-token"); - vi.spyOn(devinRequests, "createSession").mockRejectedValue(new Error("API Error")); - - await startSessionAction.handler(runtime, message, {} as State, {}, callback); - - expect(callback).toHaveBeenCalledWith( - { - text: "Failed to create Devin session: API Error", - error: "API Error", - }, - [] - ); - }); - - it("should validate API token presence", async () => { - const isValid = await startSessionAction.validate(runtime, message); - expect(isValid).toBe(false); - - (runtime.getSetting as any).mockReturnValue("test-token"); - const isValidWithToken = await startSessionAction.validate(runtime, message); - expect(isValidWithToken).toBe(true); - }); - }); -}); diff --git a/packages/plugin-devin/biome.json b/packages/plugin-devin/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-devin/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-devin/package.json b/packages/plugin-devin/package.json deleted file mode 100644 index a55bec6d553d3..0000000000000 --- a/packages/plugin-devin/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@elizaos/plugin-devin", - "description": "Devin API integration plugin for Eliza", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "axios": "^1.0.0" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "tsup": "8.3.5", - "vitest": "^3.0.0" - }, - "scripts": { - "build": "tsup src/index.ts --format esm --dts --tsconfig ./tsconfig.json", - "dev": "tsup src/index.ts --format esm --dts --watch --tsconfig ./tsconfig.json", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - } -} diff --git a/packages/plugin-devin/src/actions/startSession.ts b/packages/plugin-devin/src/actions/startSession.ts deleted file mode 100644 index c0d1d67bde762..0000000000000 --- a/packages/plugin-devin/src/actions/startSession.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type HandlerCallback, - type State, - elizaLogger, -} from "@elizaos/core"; -import { createSession } from "../providers/devinRequests"; - -export const startSessionAction: Action = { - name: "START_DEVIN_SESSION", - description: "Creates a new Devin session and returns session info", - validate: async (runtime: IAgentRuntime, _message: Memory) => { - return !!runtime.getSetting("DEVIN_API_TOKEN"); - }, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options?: Record, - callback?: HandlerCallback - ) => { - try { - if (!callback) { - elizaLogger.error("No callback provided for startSessionAction"); - return; - } - - const prompt = message.content.text; - if (!prompt) { - callback({ text: "No prompt provided for session creation" }, []); - return; - } - - const sessionInfo = await createSession(runtime, prompt); - callback( - { - text: `New Devin session created successfully: -Session ID: ${sessionInfo.session_id} -Status: ${sessionInfo.status_enum} -URL: ${sessionInfo.url}`, - action: "START_SESSION", - }, - [] - ); - } catch (error) { - elizaLogger.error("Error creating Devin session:", error); - if (!callback) { - return; - } - const errorMessage = error instanceof Error ? error.message : "Unknown error"; - callback( - { - text: `Failed to create Devin session: ${errorMessage}`, - error: errorMessage, - }, - [] - ); - } - }, - examples: [ - [ - { - user: "{{user1}}", - content: { text: "Start a new Devin session with prompt: Help me with my code" }, - }, - { - user: "{{agentName}}", - content: { - text: "New Devin session created successfully:\nSession ID: abc123\nStatus: running\nURL: https://app.devin.ai/sessions/abc123", - action: "START_SESSION" - }, - }, - ], - ], - similes: ["create devin session", "start devin session", "begin devin session"], -}; diff --git a/packages/plugin-devin/src/environment.ts b/packages/plugin-devin/src/environment.ts deleted file mode 100644 index 18889b5944d66..0000000000000 --- a/packages/plugin-devin/src/environment.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { z } from "zod"; -import type { IAgentRuntime } from "@elizaos/core"; - -export const devinEnvSchema = z.object({ - DEVIN_API_TOKEN: z.string().min(1, "Devin API token is required"), -}); - -export type DevinConfig = z.infer; - -export async function validateDevinConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - DEVIN_API_TOKEN: - runtime.getSetting("DEVIN_API_TOKEN") || - process.env.DEVIN_API_TOKEN, - }; - - return devinEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `Devin configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } -} diff --git a/packages/plugin-devin/src/index.ts b/packages/plugin-devin/src/index.ts deleted file mode 100644 index b98a9cd9b1900..0000000000000 --- a/packages/plugin-devin/src/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { startSessionAction } from "./actions/startSession"; -import { devinProvider } from "./providers/devinProvider"; -import { validateDevinConfig } from "./environment"; - -export const devinPlugin: Plugin = { - name: "devinPlugin", - description: "Integrates Devin API with Eliza for task automation and session management", - actions: [startSessionAction], - providers: [devinProvider], -}; diff --git a/packages/plugin-devin/src/providers/devinProvider.ts b/packages/plugin-devin/src/providers/devinProvider.ts deleted file mode 100644 index 86f78730f69b6..0000000000000 --- a/packages/plugin-devin/src/providers/devinProvider.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { - type Provider, - type IAgentRuntime, - type Memory, - type State, - elizaLogger, -} from "@elizaos/core"; -import { createSession, getSessionDetails, sendMessage } from "./devinRequests"; - -/** - * Interface representing the state of a Devin session in the provider - */ -export interface DevinState { - sessionId?: string; - status?: "running" | "blocked" | "stopped"; - lastUpdate?: number; - error?: string; - structured_output?: Record; - url?: string; -} - -/** - * Provider for interacting with the Devin API - * Manages session state and provides methods for creating sessions and sending messages - */ -export const devinProvider: Provider = { - get: async (runtime: IAgentRuntime, _message: Memory, state?: State) => { - try { - const API_KEY = runtime.getSetting("DEVIN_API_TOKEN"); - if (!API_KEY) { - elizaLogger.error("No Devin API token found"); - return { - error: "No Devin API token found", - lastUpdate: Date.now(), - }; - } - - const devinState = (state?.devin || {}) as DevinState; - - // If we have an active session, get its status - if (devinState.sessionId) { - try { - const sessionDetails = await getSessionDetails(runtime, devinState.sessionId); - return { - sessionId: sessionDetails.session_id, - status: sessionDetails.status_enum, - url: sessionDetails.url, - lastUpdate: Date.now(), - structured_output: sessionDetails.structured_output, - }; - } catch (error) { - elizaLogger.error("Error fetching session details:", error); - return { - error: "Failed to fetch session details", - lastUpdate: Date.now(), - sessionId: devinState.sessionId, // Keep the session ID for reference - }; - } - } - - // No active session - return { - lastUpdate: Date.now(), - }; - } catch (error) { - elizaLogger.error("Error in devinProvider:", error); - return { - error: "Internal provider error", - lastUpdate: Date.now(), - }; - } - }, -}; diff --git a/packages/plugin-devin/src/providers/devinRequests.ts b/packages/plugin-devin/src/providers/devinRequests.ts deleted file mode 100644 index 8fd4c4a980b52..0000000000000 --- a/packages/plugin-devin/src/providers/devinRequests.ts +++ /dev/null @@ -1,175 +0,0 @@ -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import fetch from "node-fetch"; - -const API_BASE = "https://api.devin.ai/v1"; -const MIN_REQUEST_INTERVAL = 1000; // 1 second for rate limiting -const MAX_RETRIES = 3; -const INITIAL_BACKOFF = 1000; // 1 second initial backoff - -let lastRequestTime = 0; - -/** - * Interface representing a Devin session - */ -export interface DevinSession { - session_id: string; - url: string; - status_enum: "running" | "blocked" | "stopped"; - structured_output?: Record; -} - -/** - * Interface representing an error response from the Devin API - */ - -// interface DevinError { -// error: string; -// message: string; -// status: number; -// } - -/** - * Rate limiting function to prevent API abuse - * Ensures at least MIN_REQUEST_INTERVAL milliseconds between requests - */ -async function rateLimit() { - const now = Date.now(); - const timeSinceLastRequest = now - lastRequestTime; - if (timeSinceLastRequest < MIN_REQUEST_INTERVAL) { - await new Promise(resolve => setTimeout(resolve, MIN_REQUEST_INTERVAL - timeSinceLastRequest)); - } - lastRequestTime = Date.now(); -} - -/** - * Helper function to implement exponential backoff for API requests - * @param fn The async function to retry - * @param retries Maximum number of retries - * @param backoff Initial backoff in milliseconds - * @returns The result of the async function - * @throws The last error encountered - */ -async function withRetry( - fn: () => Promise, - retries = MAX_RETRIES, - backoff = INITIAL_BACKOFF -): Promise { - try { - return await fn(); - } catch (error) { - if (retries === 0) throw error; - - await new Promise(resolve => setTimeout(resolve, backoff)); - return withRetry(fn, retries - 1, backoff * 2); - } -} -// Implementation moved to the top of the file - -/** - * Creates a new Devin session with the given prompt - * @param runtime The Eliza runtime instance - * @param prompt The prompt to start the session with - * @returns The created session details - * @throws {Error} If API token is missing or API request fails - */ -export async function createSession(runtime: IAgentRuntime, prompt: string): Promise { - const API_KEY = runtime.getSetting("DEVIN_API_TOKEN"); - if (!API_KEY) { - const error = new Error("No Devin API token found") as Error & { status?: number }; - error.status = 401; - throw error; - } - - await rateLimit(); - return withRetry(async () => { - const response = await fetch(`${API_BASE}/sessions`, { - method: "POST", - headers: { - "Authorization": `Bearer ${API_KEY}`, - "Content-Type": "application/json", - }, - body: JSON.stringify({ prompt }), - }); - - if (!response.ok) { - const error = await response.text(); - elizaLogger.error("Failed to create Devin session:", error); - throw new Error(`Failed to create session: ${error}`); - } - - const data = await response.json(); - return data as DevinSession; - }); -} - -/** - * Retrieves details for an existing Devin session - * @param runtime The Eliza runtime instance - * @param sessionId The ID of the session to retrieve - * @returns The session details - * @throws {Error} If API token is missing or API request fails - */ -export async function getSessionDetails(runtime: IAgentRuntime, sessionId: string): Promise { - const API_KEY = runtime.getSetting("DEVIN_API_TOKEN"); - if (!API_KEY) { - const error = new Error("No Devin API token found") as Error & { status?: number }; - error.status = 401; - throw error; - } - - await rateLimit(); - return withRetry(async () => { - const response = await fetch(`${API_BASE}/session/${sessionId}`, { - headers: { - "Authorization": `Bearer ${API_KEY}`, - }, - }); - - if (!response.ok) { - const error = await response.text(); - elizaLogger.error("Failed to get session details:", error); - const apiError = new Error(`Failed to get session details: ${error}`) as Error & { status?: number }; - apiError.status = response.status; - throw apiError; - } - - const data = await response.json(); - return data as DevinSession; - }); -} - -/** - * Sends a message to an existing Devin session - * @param runtime The Eliza runtime instance - * @param sessionId The ID of the session to send the message to - * @param message The message content to send - * @throws {Error} If API token is missing or API request fails - */ -export async function sendMessage(runtime: IAgentRuntime, sessionId: string, message: string): Promise { - const API_KEY = runtime.getSetting("DEVIN_API_TOKEN"); - if (!API_KEY) { - const error = new Error("No Devin API token found") as Error & { status?: number }; - error.status = 401; - throw error; - } - - await rateLimit(); - return withRetry(async () => { - const response = await fetch(`${API_BASE}/session/${sessionId}/message`, { - method: "POST", - headers: { - "Authorization": `Bearer ${API_KEY}`, - "Content-Type": "application/json", - }, - body: JSON.stringify({ message }), - }); - - if (!response.ok) { - const error = await response.text(); - elizaLogger.error("Failed to send message:", error); - const apiError = new Error(`Failed to send message: ${error}`) as Error & { status?: number }; - apiError.status = response.status; - throw apiError; - } - }); -} diff --git a/packages/plugin-devin/tsconfig.json b/packages/plugin-devin/tsconfig.json deleted file mode 100644 index 90d76d7e8deff..0000000000000 --- a/packages/plugin-devin/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"] -} diff --git a/packages/plugin-devin/tsup.config.ts b/packages/plugin-devin/tsup.config.ts deleted file mode 100644 index 7e6730b02b529..0000000000000 --- a/packages/plugin-devin/tsup.config.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], - external: [ - "dotenv", - "fs", - "path", - "@elizaos/core", - "node-fetch", - "zod", - "https", - "http", - ], -}); diff --git a/packages/plugin-devin/vitest.config.ts b/packages/plugin-devin/vitest.config.ts deleted file mode 100644 index ecd5b116eec0b..0000000000000 --- a/packages/plugin-devin/vitest.config.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { defineConfig } from "vitest/config"; -import path from "path"; - -export default defineConfig({ - test: { - environment: "node", - globals: true, - testTimeout: 120000, - }, - resolve: { - alias: { - "@": path.resolve(__dirname, "./src"), - }, - }, -}); diff --git a/packages/plugin-dexscreener/.npmignore b/packages/plugin-dexscreener/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-dexscreener/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-dexscreener/README.md b/packages/plugin-dexscreener/README.md deleted file mode 100644 index 7b2c10566368e..0000000000000 --- a/packages/plugin-dexscreener/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# @elizaos/plugin-dexscreener - -A plugin for accessing DexScreener's token data and price information through your Eliza agent. - -## Installation - -```bash -pnpm add @elizaos/plugin-dexscreener -``` - -## Usage - -Add the plugin to your character configuration: - -```typescript -import { dexScreenerPlugin } from "@elizaos/plugin-dexscreener"; - -const character = { - plugins: [dexScreenerPlugin] -}; -``` - -## Features - -### Token Price Checking -Query token prices using addresses or symbols: -```plaintext -"What's the price of ETH?" -"Check price of 0x1234..." -"How much is $BTC worth?" -``` - -### Token Trends -View latest and trending tokens: -```plaintext -"Show me the latest tokens" -"What are the new boosted tokens?" -"Show me the top boosted tokens" -``` - -## Available Actions - -### GET_TOKEN_PRICE -Fetches current token price and market information. -- Aliases: `FETCH_TOKEN_PRICE`, `CHECK_TOKEN_PRICE`, `TOKEN_PRICE` -- Supports ETH addresses and token symbols (with or without $ prefix) -- Returns price, liquidity, and 24h volume information - -### GET_LATEST_TOKENS -Retrieves the most recently listed tokens. -- Aliases: `FETCH_NEW_TOKENS`, `CHECK_RECENT_TOKENS`, `LIST_NEW_TOKENS` - -### GET_LATEST_BOOSTED_TOKENS -Fetches the most recently boosted tokens. -- Aliases: `FETCH_NEW_BOOSTED_TOKENS`, `CHECK_RECENT_BOOSTED_TOKENS` - -### GET_TOP_BOOSTED_TOKENS -Shows tokens with the most active boosts. -- Aliases: `FETCH_MOST_BOOSTED_TOKENS`, `CHECK_HIGHEST_BOOSTED_TOKENS` - -## Providers - -### TokenPriceProvider -Provides token price data from DexScreener API: -- Current price in USD -- Liquidity information -- 24h volume data -- Automatic best pair selection by liquidity - -## Evaluators - -### TokenPriceEvaluator -Evaluates messages for token price requests: -- Detects price-related keywords -- Identifies token addresses and symbols -- Supports multiple token identifier formats: - - Ethereum addresses - - Symbols with $ or # prefix - - Natural language patterns ("price of TOKEN") diff --git a/packages/plugin-dexscreener/biome.json b/packages/plugin-dexscreener/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-dexscreener/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-dexscreener/package.json b/packages/plugin-dexscreener/package.json deleted file mode 100644 index 10ee74aa36d4a..0000000000000 --- a/packages/plugin-dexscreener/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@elizaos/plugin-dexscreener", - "version": "0.25.6-alpha.1", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4" - } -} diff --git a/packages/plugin-dexscreener/src/actions/index.ts b/packages/plugin-dexscreener/src/actions/index.ts deleted file mode 100644 index 70232198dfc3e..0000000000000 --- a/packages/plugin-dexscreener/src/actions/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./tokenAction"; -export * from "./trendsAction"; diff --git a/packages/plugin-dexscreener/src/actions/tokenAction.ts b/packages/plugin-dexscreener/src/actions/tokenAction.ts deleted file mode 100644 index 65a3a93dcc90e..0000000000000 --- a/packages/plugin-dexscreener/src/actions/tokenAction.ts +++ /dev/null @@ -1,189 +0,0 @@ -import type { Action, IAgentRuntime, Memory, State, HandlerCallback } from "@elizaos/core"; -import { TokenPriceProvider } from "../providers/tokenProvider"; - -export const priceTemplate = `Determine if this is a token price request. If it is one of the specified situations, perform the corresponding action: - -Situation 1: "Get token price" -- Message contains: words like "price", "value", "cost", "worth" AND a token symbol/address -- Example: "What's the price of ETH?" or "How much is BTC worth?" -- Action: Get the current price of the token - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; - -export class TokenPriceAction implements Action { - name = "GET_TOKEN_PRICE"; - similes = ["FETCH_TOKEN_PRICE", "CHECK_TOKEN_PRICE", "TOKEN_PRICE"]; - description = "Fetches and returns token price information"; - suppressInitialMessage = true; - template = priceTemplate; - - async validate(_runtime: IAgentRuntime, message: Memory): Promise { - const content = typeof message.content === 'string' - ? message.content - : message.content?.text; - - if (!content) return false; - - const hasPriceKeyword = /\b(price|value|worth|cost)\b/i.test(content); - const hasToken = ( - /0x[a-fA-F0-9]{40}/.test(content) || - /[$#]?[a-zA-Z0-9]+/i.test(content) - ); - - return hasPriceKeyword && hasToken; - } - - async handler( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise { - try { - // Get the provider - const provider = runtime.providers.find(p => p instanceof TokenPriceProvider); - if (!provider) { - throw new Error("Token price provider not found"); - } - - // Get price data - console.log("Fetching price data..."); - const priceData = await provider.get(runtime, message, state); - console.log("Received price data:", priceData); - - if (priceData.includes("Error")) { - throw new Error(priceData); - } - - // If we have a callback, use it to send the response - if (callback) { - await callback({ - text: priceData, - action: this.name - }); - } - - // Set the response in state to prevent further processing - if (state) { - state.responseData = { - text: priceData, - action: this.name - }; - } - - return true; - - } catch (error) { - console.error("Error in price action handler:", error); - - if (callback) { - await callback({ - text: `Sorry, I couldn't fetch the token price: ${error.message}`, - action: this.name - }); - } - - return false; - } - } - - examples = [ - [ - { - user: "{{user}}", - content: { - text: "check price of eth" - } - }, - { - user: "{{system}}", - content: { - text: "The current price of Ethereum (ETH) is $2,345.67 with a 24h trading volume of $9.87B. The liquidity pool holds $1.23B, and the price has changed +5.43% in the last 24 hours.", - action: "GET_TOKEN_PRICE" - } - } - ], - [ - { - user: "{{user}}", - content: { - text: "How much is BTC worth right now?" - } - }, - { - user: "{{system}}", - content: { - text: "Bitcoin (BTC) is currently trading at $42,567.89. There's $15.23B in liquidity, with a 24h volume of $25.87B. The price is up 2.15% in the last 24 hours.", - action: "GET_TOKEN_PRICE" - } - } - ], - [ - { - user: "{{user}}", - content: { - text: "what's the current value of $bnb" - } - }, - { - user: "{{system}}", - content: { - text: "Binance Coin (BNB) is priced at $345.67 with $5.23B in liquidity. The trading volume over the last 24h is $1.87B, and the price has decreased by 1.23% during this period.", - action: "GET_TOKEN_PRICE" - } - } - ], - [ - { - user: "{{user}}", - content: { - text: "can you tell me the price for USDT?" - } - }, - { - user: "{{system}}", - content: { - text: "Tether (USDT) is maintaining its peg at $1.00 with minimal change (+0.01%). The token has $25.23B in liquidity and has seen $45.87B in trading volume over the past 24 hours.", - action: "GET_TOKEN_PRICE" - } - } - ], - [ - { - user: "{{user}}", - content: { - text: "show me the cost of #SOL" - } - }, - { - user: "{{system}}", - content: { - text: "Solana (SOL) is trading at $123.45, showing strong momentum with an 8.75% increase in the last 24 hours. The liquidity stands at $3.23B with a 24h trading volume of $987.54M.", - action: "GET_TOKEN_PRICE" - } - } - ], - [ - { - user: "{{user}}", - content: { - text: "0x7D1AfA7B718fb893dB30A3aBc0Cfc608AaCfeBB0 price please" - } - }, - { - user: "{{system}}", - content: { - text: "The price of Polygon (MATIC) is currently $1.23, up 3.45% in the past 24 hours. The token has $2.23B in liquidity and has seen $567.54M in trading volume today.", - action: "GET_TOKEN_PRICE" - } - } - ] - ]; -} - -export const tokenPriceAction = new TokenPriceAction(); \ No newline at end of file diff --git a/packages/plugin-dexscreener/src/actions/trendsAction.ts b/packages/plugin-dexscreener/src/actions/trendsAction.ts deleted file mode 100644 index de1d51159ab63..0000000000000 --- a/packages/plugin-dexscreener/src/actions/trendsAction.ts +++ /dev/null @@ -1,383 +0,0 @@ -import { - type Action, - type IAgentRuntime, - type Memory, - type State, - type HandlerCallback, - elizaLogger, - getEmbeddingZeroVector, -} from "@elizaos/core"; - -interface TokenProfile { - url: string; - description?: string; - chainId: string; - tokenAddress: string; -} - -const createTokenMemory = async ( - runtime: IAgentRuntime, - _message: Memory, - formattedOutput: string -) => { - const memory: Memory = { - userId: _message.userId, - agentId: _message.agentId, - roomId: _message.roomId, - content: { text: formattedOutput }, - createdAt: Date.now(), - embedding: getEmbeddingZeroVector(), - }; - await runtime.messageManager.createMemory(memory); -}; - -export const latestTokensTemplate = `Determine if this is a request for latest tokens. If it is one of the specified situations, perform the corresponding action: - -Situation 1: "Get latest tokens" -- Message contains: words like "latest", "new", "recent" AND "tokens" -- Example: "Show me the latest tokens" or "What are the new tokens?" -- Action: Get the most recent tokens listed - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; - -export class LatestTokensAction implements Action { - name = "GET_LATEST_TOKENS"; - similes = ["FETCH_NEW_TOKENS", "CHECK_RECENT_TOKENS", "LIST_NEW_TOKENS"]; - description = "Get the latest tokens from DexScreener API"; - suppressInitialMessage = true; - template = latestTokensTemplate; - - async validate(_runtime: IAgentRuntime, message: Memory): Promise { - const content = - typeof message.content === "string" - ? message.content - : message.content?.text; - - if (!content) return false; - - const hasLatestKeyword = /\b(latest|new|recent)\b/i.test(content); - const hasTokensKeyword = /\b(tokens?|coins?|crypto)\b/i.test(content); - - return hasLatestKeyword && hasTokensKeyword; - } - - async handler( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise { - elizaLogger.log("Starting GET_LATEST_TOKENS handler..."); - - try { - const response = await fetch( - "https://api.dexscreener.com/token-profiles/latest/v1", - { - method: "GET", - headers: { - accept: "application/json", - }, - } - ); - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - - const tokens: TokenProfile[] = await response.json(); - - const formattedOutput = tokens - .map((token) => { - const description = - token.description || "No description available"; - return `Chain: ${token.chainId}\nToken Address: ${token.tokenAddress}\nURL: ${token.url}\nDescription: ${description}\n\n`; - }) - .join(""); - - await createTokenMemory(runtime, message, formattedOutput); - - if (callback) { - await callback({ - text: formattedOutput, - action: this.name, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error fetching latest tokens:", error); - - if (callback) { - await callback({ - text: `Failed to fetch latest tokens: ${error.message}`, - action: this.name, - }); - } - - return false; - } - } - - examples = [ - [ - { - user: "{{user}}", - content: { - text: "show me the latest tokens", - }, - }, - { - user: "{{system}}", - content: { - text: "Here are the latest tokens added to DexScreener...", - action: "GET_LATEST_TOKENS", - }, - }, - ], - ]; -} - -export const latestBoostedTemplate = `Determine if this is a request for latest boosted tokens. If it is one of the specified situations, perform the corresponding action: - -Situation 1: "Get latest boosted tokens" -- Message contains: words like "latest", "new", "recent" AND "boosted tokens" -- Example: "Show me the latest boosted tokens" or "What are the new promoted tokens?" -- Action: Get the most recent boosted tokens - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; - -export class LatestBoostedTokensAction implements Action { - name = "GET_LATEST_BOOSTED_TOKENS"; - similes = [ - "FETCH_NEW_BOOSTED_TOKENS", - "CHECK_RECENT_BOOSTED_TOKENS", - "LIST_NEW_BOOSTED_TOKENS", - ]; - description = "Get the latest boosted tokens from DexScreener API"; - suppressInitialMessage = true; - template = latestBoostedTemplate; - - async validate(_runtime: IAgentRuntime, message: Memory): Promise { - const content = - typeof message.content === "string" - ? message.content - : message.content?.text; - - if (!content) return false; - - const hasLatestKeyword = /\b(latest|new|recent)\b/i.test(content); - const hasBoostedKeyword = /\b(boosted|promoted|featured)\b/i.test( - content - ); - const hasTokensKeyword = /\b(tokens?|coins?|crypto)\b/i.test(content); - - return hasLatestKeyword && (hasBoostedKeyword || hasTokensKeyword); - } - - async handler( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise { - elizaLogger.log("Starting GET_LATEST_BOOSTED_TOKENS handler..."); - - try { - const response = await fetch( - "https://api.dexscreener.com/token-boosts/latest/v1", - { - method: "GET", - headers: { - accept: "application/json", - }, - } - ); - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - - const tokens: TokenProfile[] = await response.json(); - - const formattedOutput = tokens - .map((token) => { - const description = - token.description || "No description available"; - return `Chain: ${token.chainId}\nToken Address: ${token.tokenAddress}\nURL: ${token.url}\nDescription: ${description}\n\n`; - }) - .join(""); - - await createTokenMemory(runtime, message, formattedOutput); - - if (callback) { - await callback({ - text: formattedOutput, - action: this.name, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error fetching latest boosted tokens:", error); - - if (callback) { - await callback({ - text: `Failed to fetch latest boosted tokens: ${error.message}`, - action: this.name, - }); - } - - return false; - } - } - - examples = [ - [ - { - user: "{{user}}", - content: { - text: "show me the latest boosted tokens", - }, - }, - { - user: "{{system}}", - content: { - text: "Here are the latest boosted tokens on DexScreener...", - action: "GET_LATEST_BOOSTED_TOKENS", - }, - }, - ], - ]; -} - -export const topBoostedTemplate = `Determine if this is a request for top boosted tokens. If it is one of the specified situations, perform the corresponding action: - -Situation 1: "Get top boosted tokens" -- Message contains: words like "top", "best", "most" AND "boosted tokens" -- Example: "Show me the top boosted tokens" or "What are the most promoted tokens?" -- Action: Get the tokens with most active boosts - -Previous conversation for context: -{{conversation}} - -You are replying to: {{message}} -`; - -export class TopBoostedTokensAction implements Action { - name = "GET_TOP_BOOSTED_TOKENS"; - similes = [ - "FETCH_MOST_BOOSTED_TOKENS", - "CHECK_HIGHEST_BOOSTED_TOKENS", - "LIST_TOP_BOOSTED_TOKENS", - ]; - description = "Get tokens with most active boosts from DexScreener API"; - suppressInitialMessage = true; - template = topBoostedTemplate; - - async validate(_runtime: IAgentRuntime, message: Memory): Promise { - const content = - typeof message.content === "string" - ? message.content - : message.content?.text; - - if (!content) return false; - - const hasTopKeyword = /\b(top|best|most)\b/i.test(content); - const hasBoostedKeyword = /\b(boosted|promoted|featured)\b/i.test( - content - ); - const hasTokensKeyword = /\b(tokens?|coins?|crypto)\b/i.test(content); - - return hasTopKeyword && (hasBoostedKeyword || hasTokensKeyword); - } - - async handler( - runtime: IAgentRuntime, - message: Memory, - _state?: State, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise { - elizaLogger.log("Starting GET_TOP_BOOSTED_TOKENS handler..."); - - try { - const response = await fetch( - "https://api.dexscreener.com/token-boosts/top/v1", - { - method: "GET", - headers: { - accept: "application/json", - }, - } - ); - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - - const tokens: TokenProfile[] = await response.json(); - - const formattedOutput = tokens - .map((token) => { - const description = - token.description || "No description available"; - return `Chain: ${token.chainId}\nToken Address: ${token.tokenAddress}\nURL: ${token.url}\nDescription: ${description}\n\n`; - }) - .join(""); - - await createTokenMemory(runtime, message, formattedOutput); - - if (callback) { - await callback({ - text: formattedOutput, - action: this.name, - }); - } - - return true; - } catch (error) { - elizaLogger.error("Error fetching top boosted tokens:", error); - - if (callback) { - await callback({ - text: `Failed to fetch top boosted tokens: ${error.message}`, - action: this.name, - }); - } - - return false; - } - } - - examples = [ - [ - { - user: "{{user}}", - content: { - text: "show me the top boosted tokens", - }, - }, - { - user: "{{system}}", - content: { - text: "Here are the tokens with the most active boosts on DexScreener...", - action: "GET_TOP_BOOSTED_TOKENS", - }, - }, - ], - ]; -} - -export const latestTokensAction = new LatestTokensAction(); -export const latestBoostedTokensAction = new LatestBoostedTokensAction(); -export const topBoostedTokensAction = new TopBoostedTokensAction(); diff --git a/packages/plugin-dexscreener/src/evaluators/index.ts b/packages/plugin-dexscreener/src/evaluators/index.ts deleted file mode 100644 index c915e8faf68e5..0000000000000 --- a/packages/plugin-dexscreener/src/evaluators/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./tokenEvaluator.ts"; \ No newline at end of file diff --git a/packages/plugin-dexscreener/src/evaluators/tokenEvaluator.ts b/packages/plugin-dexscreener/src/evaluators/tokenEvaluator.ts deleted file mode 100644 index cda3b1fa9bdb5..0000000000000 --- a/packages/plugin-dexscreener/src/evaluators/tokenEvaluator.ts +++ /dev/null @@ -1,78 +0,0 @@ -import type { Evaluator, IAgentRuntime, Memory, State } from "@elizaos/core"; - -export class TokenPriceEvaluator implements Evaluator { - name = "TOKEN_PRICE_EVALUATOR"; - similes = ["price", "token price", "check price"]; - description = "Evaluates messages for token price requests"; - - async validate(runtime: IAgentRuntime, message: Memory): Promise { - const content = typeof message.content === 'string' - ? message.content - : message.content?.text; - - if (!content) return false; - - // Check for price-related keywords - const hasPriceKeyword = /\b(price|value|worth|cost)\b/i.test(content); - - // Look for either: - // 1. Ethereum address - // 2. Token symbol starting with $ or # - // 3. Token symbol after "of" or "for" (case insensitive) - const hasToken = ( - /0x[a-fA-F0-9]{40}/.test(content) || // Ethereum address - /[$#][a-zA-Z]+/.test(content) || // $TOKEN or #TOKEN format - /\b(of|for)\s+[a-zA-Z0-9]+\b/i.test(content) // "price of TOKEN" format - ); - - return hasPriceKeyword && hasToken; - } - - async handler(_runtime: IAgentRuntime, _message: Memory, _state?: State): Promise { - return "GET_TOKEN_PRICE"; - } - - examples = [ - { - context: "User asking for token price with address", - messages: [ - { - user: "{{user}}", - content: { - text: "What's the price of 0x1234567890123456789012345678901234567890?", - action: "GET_TOKEN_PRICE" - } - } - ], - outcome: "GET_TOKEN_PRICE" - }, - { - context: "User checking token price with $ symbol", - messages: [ - { - user: "{{user}}", - content: { - text: "Check price of $eth", - action: "GET_TOKEN_PRICE" - } - } - ], - outcome: "GET_TOKEN_PRICE" - }, - { - context: "User checking token price with plain symbol", - messages: [ - { - user: "{{user}}", - content: { - text: "What's the value for btc", - action: "GET_TOKEN_PRICE" - } - } - ], - outcome: "GET_TOKEN_PRICE" - } - ]; -} - -export const tokenPriceEvaluator = new TokenPriceEvaluator(); \ No newline at end of file diff --git a/packages/plugin-dexscreener/src/index.ts b/packages/plugin-dexscreener/src/index.ts deleted file mode 100644 index 2a4b6f13ad9d7..0000000000000 --- a/packages/plugin-dexscreener/src/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { Plugin } from "@elizaos/core" -import { TokenPriceAction } from "./actions/tokenAction" -import { TokenPriceEvaluator } from "./evaluators/tokenEvaluator" -import { TokenPriceProvider } from "./providers/tokenProvider" -import { LatestTokensAction, LatestBoostedTokensAction, TopBoostedTokensAction } from "./actions/trendsAction" - -export * as actions from "./actions" -export * as evaluators from "./evaluators" -export * as providers from "./providers" - -export const dexScreenerPlugin: Plugin = { - name: "dexscreener", - description: "Dex Screener Plugin with Token Price Action, Token Trends, Evaluators and Providers", - actions: [new TokenPriceAction(), new LatestTokensAction(), new LatestBoostedTokensAction(), new TopBoostedTokensAction()], - evaluators: [new TokenPriceEvaluator()], - providers: [new TokenPriceProvider()], -} - -export default dexScreenerPlugin diff --git a/packages/plugin-dexscreener/src/providers/index.ts b/packages/plugin-dexscreener/src/providers/index.ts deleted file mode 100644 index c1f33c3c2093c..0000000000000 --- a/packages/plugin-dexscreener/src/providers/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./tokenProvider.ts"; \ No newline at end of file diff --git a/packages/plugin-dexscreener/src/providers/tokenProvider.ts b/packages/plugin-dexscreener/src/providers/tokenProvider.ts deleted file mode 100644 index ed2211660c705..0000000000000 --- a/packages/plugin-dexscreener/src/providers/tokenProvider.ts +++ /dev/null @@ -1,132 +0,0 @@ -import type { Provider, IAgentRuntime, Memory, State } from "@elizaos/core"; - -/* -interface TokenPriceData { - baseToken: { - name: string; - symbol: string; - address: string; - decimals: number; - }; - priceUsd: string; - priceChange: { - h1: number; - h24: number; - }; - liquidityUsd: string; - volume: { - h24: number; - }; -} -*/ - -interface DexScreenerPair { - baseToken: { - name: string; - symbol: string; - address: string; - decimals: number; - }; - priceUsd: string; - liquidity?: { - usd: string; - }; - volume?: { - h24: number; - }; -} - -export class TokenPriceProvider implements Provider { - async get( - _lengthruntime: IAgentRuntime, - message: Memory, - _state?: State - ): Promise { - try { - const content = - typeof message.content === "string" - ? message.content - : message.content?.text; - - if (!content) { - throw new Error("No message content provided"); - } - - // Extract token from content - const tokenIdentifier = this.extractToken(content); - if (!tokenIdentifier) { - throw new Error("Could not identify token in message"); - } - - console.log(`Fetching price for token: ${tokenIdentifier}`); - - // Make API request - const isAddress = - /^0x[a-fA-F0-9]{40}$/.test(tokenIdentifier) || - /^[1-9A-HJ-NP-Za-km-z]{43,44}$/.test(tokenIdentifier); // validates for ethAddress and solAddress - const endpoint = isAddress - ? `https://api.dexscreener.com/latest/dex/tokens/${tokenIdentifier}` - : `https://api.dexscreener.com/latest/dex/search?q=${tokenIdentifier}`; - - const response = await fetch(endpoint); - if (!response.ok) { - throw new Error(`API request failed: ${response.statusText}`); - } - - const data = await response.json(); - if (!data.pairs || data.pairs.length === 0) { - throw new Error(`No pricing data found for ${tokenIdentifier}`); - } - - // Get best pair by liquidity - const bestPair = this.getBestPair(data.pairs); - return this.formatPriceData(bestPair); - } catch (error) { - console.error("TokenPriceProvider error:", error); - return `Error: ${error.message}`; - } - } - - private extractToken(content: string): string | null { - // Try different patterns in order of specificity - const patterns = [ - /0x[a-fA-F0-9]{40}/, // ETH address - /[$#]([a-zA-Z0-9]+)/, // $TOKEN or #TOKEN - /(?:price|value|worth|cost)\s+(?:of|for)\s+([a-zA-Z0-9]+)/i, // "price of TOKEN" - /\b(?:of|for)\s+([a-zA-Z0-9]+)\b/i, // "of TOKEN" - ]; - - for (const pattern of patterns) { - const match = content.match(pattern); - if (match) { - // Use captured group if it exists, otherwise use full match - const token = match[1] || match[0]; - // Clean up the token identifier - return token.replace(/[$#]/g, "").toLowerCase().trim(); - } - } - - return null; - } - - private getBestPair(pairs: DexScreenerPair[]): DexScreenerPair { - return pairs.reduce((best, current) => { - const bestLiquidity = Number.parseFloat(best.liquidity?.usd || "0"); - const currentLiquidity = Number.parseFloat(current.liquidity?.usd || "0"); - return currentLiquidity > bestLiquidity ? current : best; - }, pairs[0]); - } - - private formatPriceData(pair: DexScreenerPair): string { - const price = Number.parseFloat(pair.priceUsd).toFixed(6); - const liquidity = Number.parseFloat( - pair.liquidity?.usd || "0" - ).toLocaleString(); - const volume = (pair.volume?.h24 || 0).toLocaleString(); - - return ` - The price of ${pair.baseToken.symbol} is $${price} USD, with liquidity of $${liquidity} and 24h volume of $${volume}.`; - } -} - -export const tokenPriceProvider = new TokenPriceProvider(); diff --git a/packages/plugin-dexscreener/tsconfig.json b/packages/plugin-dexscreener/tsconfig.json deleted file mode 100644 index 834c4dce26957..0000000000000 --- a/packages/plugin-dexscreener/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-dexscreener/tsup.config.ts b/packages/plugin-dexscreener/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/plugin-dexscreener/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-di/.npmignore b/packages/plugin-di/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-di/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-di/README.md b/packages/plugin-di/README.md deleted file mode 100644 index bd6eed7f76bb6..0000000000000 --- a/packages/plugin-di/README.md +++ /dev/null @@ -1,148 +0,0 @@ -# @elizaos/plugin-di - Dependency Injection Plugin for Eliza - -This plugin provides a dependency injection system for Eliza plugins. - -## What is Dependency Injection? - -Dependency Injection is a design pattern that allows you to inject dependencies into a class or function. This pattern is useful for decoupling components and making your code more modular and testable. - -## Examples of How to build a Plugin using Dependency Injection - -Check the [example](../_examples/plugin-with-di/) folder for a simple example of how to create a plugin using Dependency Injection. - -### Where can I use Dependency Injection? - -You can use Dependency Injection in any part of your Eliza plugin, including actions, evaluators, providers, services, and clients. - -- Actions: Inject services or providers to interact with external APIs or services. [Example](../_examples/plugin-with-di/src/actions/sampleAction.ts) -- Evaluators: Inject services or providers to evaluate conditions or perform calculations. [Example](../_examples/plugin-with-di/src/evaluators/sampleEvaluator.ts) -- Providers: Inject services or providers to provide data or resources. [Example](../_examples/plugin-with-di/src/providers/sampleProvider.ts) -- Services: Inject other services to perform business logic. [Example](../_examples/plugin-with-di/src/services/sampleService.ts) -- Clients: Inject services to interact with external APIs or services. Lack of examples, but you can refer to the services example. - -## Decorators for Dependency Injection - -This plugin provides a set of decorators that you can use to inject dependencies into your classes or functions. - -### From inversify - -We use the [inversify](https://inversify.io/) library to provide the dependency injection system. -The following decorators are provided by the [inversify](https://inversify.io/) library. - -#### `@injectable` - -> Category: Class Decorator - -This decorator marks a class as injectable. This means that you can inject this class into other classes using the `@inject` decorator. - -```typescript -import { injectable } from "inversify"; - -@injectable() -class SampleClass { -} -``` - -Remember to register the class with the container before injecting it into other classes. - -```typescript -import { globalContainer } from "@elizaos/plugin-di"; - -// Register the class with the container as a singleton, this means that the class will be instantiated only once. -globalContainer.bind(SingletonClass).toSelf().inSingletonScope(); -// Register the class with the container as a request context, this means that the class will be instantiated for each request(in this case means each Character). -globalContainer.bind(CharactorContextClass).toSelf().inRequestScope(); -``` - -#### `@inject` - -> Category: Parameter Decorator - -This decorator marks a parameter as an injection target. This means that the parameter will be injected with the appropriate dependency when the class is instantiated. - -```typescript -import { injectable, inject } from "inversify"; - -@injectable() -class SampleClass { - constructor( - // Inject the SampleDependency as a public property of the class. - @inject("SampleDependency") public sampleDependency: SampleDependency - ) {} -} -``` - -### From di plugin (used for BaseInjectableAction) - -DI plugin provides abstract classes that you can extend to create Injectable actions. -And that provides the following decorators to improve the readability of the code. - -#### `@property` - -> Category: Property Decorator - -This decorator is used to define a property in an action content class which will be used to generate the action content object Schema and content description template for LLM object generation. - -```typescript -import { z } from 'zod'; -import { property } from "@elizaos/plugin-di"; - -class SampleActionContent { - @property({ - description: "Sample property description", - schema: z.string(), - }) - sampleProperty: string; -} -``` - -## Abstract Classes provided by this plugin - -This plugin provides the following abstract classes that you can extend to create Injectable classes: - -- `BaseInjectableAction` -- `BaseInjectableEvaluator` - -Note: This is optional, you can create your own classes to create injectable actions. - -### `BaseInjectableAction` - -This abstract class simplify the creation of injectable actions. -You don't need to think about the template for content generation, it will be generated automatically based on the properties of the content Class. -What you need to implement is the `execute` method. - -```typescript -import { injectable } from "inversify"; -import { BaseInjectableAction } from "@elizaos/plugin-di"; - -class SampleActionContent { - @property({ - description: "Sample property description", - schema: z.string(), - }) - property1: string; -} - -@injectable() -class SampleAction extends BaseInjectableAction { - constructor() { - super({ - /** general action constent options */ - contentClass: SampleActionContent, - }); - } - - /** - * It will be called by `handler` function when the action is triggered. - */ - async execute( - content: SampleActionContent | null, - runtime: IAgentRuntime, - message: Memory, - state: State, - callback?: HandlerCallback - ): Promise { - // Your action logic here - } -} -``` diff --git a/packages/plugin-di/biome.json b/packages/plugin-di/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-di/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-di/package.json b/packages/plugin-di/package.json deleted file mode 100644 index 4335eda6ab50a..0000000000000 --- a/packages/plugin-di/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@elizaos/plugin-di", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "inversify": "^6.2.1", - "reflect-metadata": "^0.2.2", - "uuid": "11.0.3" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@types/node": "^20.0.0", - "@types/uuid": "10.0.0", - "tsup": "8.3.5", - "vitest": "2.1.9" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "test": "vitest run" - } -} diff --git a/packages/plugin-di/src/actions/baseInjectableAction.ts b/packages/plugin-di/src/actions/baseInjectableAction.ts deleted file mode 100644 index 20a09ab8240c7..0000000000000 --- a/packages/plugin-di/src/actions/baseInjectableAction.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { injectable, unmanaged } from "inversify"; -import type { z } from "zod"; -import { - type ActionExample, - composeContext, - elizaLogger, - generateObject, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, -} from "@elizaos/core"; -import { - type ContentClass, - createZodSchema, - loadPropertyDescriptions, -} from "../decorators"; -import type { ActionOptions, InjectableAction } from "../types"; -import { buildContentOutputTemplate } from "../templates"; - -// type ActionResult = unknown; - -/** - * Base abstract class for injectable actions - */ -@injectable() -export abstract class BaseInjectableAction implements InjectableAction { - // -------- Properties -------- - public name: string; - public similes: string[]; - public description: string; - public examples: ActionExample[][]; - public suppressInitialMessage: boolean; - - /** - * The content class for the action - */ - protected readonly contentClass: ContentClass; - /** - * Optional template for the action, if not provided, it will be generated from the content class - */ - protected readonly template: string; - /** - * Optional content schema for the action, if not provided, it will be generated from the content class - */ - protected readonly contentSchema: z.ZodSchema; - - /** - * Constructor for the base injectable action - */ - constructor(@unmanaged() opts: ActionOptions) { - // Set the action properties - this.name = opts.name; - this.similes = opts.similes; - this.description = opts.description; - this.examples = opts.examples; - this.suppressInitialMessage = opts.suppressInitialMessage ?? false; // Default to false - // Set the content class, template and content schema - this.contentClass = opts.contentClass; - this.template = opts.template; - this.contentSchema = opts.contentSchema; - - if (this.contentClass !== undefined) { - if (this.contentSchema === undefined) { - this.contentSchema = createZodSchema(this.contentClass); - } - if (this.template === undefined) { - const properties = loadPropertyDescriptions(this.contentClass); - this.template = buildContentOutputTemplate( - this.name, - this.description, - properties, - this.contentSchema - ); - } - } - } - - // -------- Abstract methods to be implemented by the child class -------- - - /** - * Abstract method to execute the action - * @param content The content object - * @param callback The callback function to pass the result to Eliza runtime - */ - abstract execute( - content: T | null, - runtime: IAgentRuntime, - message: Memory, - state?: State, - callback?: HandlerCallback - ): Promise; - - // -------- Implemented methods for Eliza runtime -------- - - /** - * Default implementation of the validate method - * You can override this method to add custom validation logic - * - * @param runtime The runtime object from Eliza framework - * @param message The message object from Eliza framework - * @param state The state object from Eliza framework - * @returns The validation result - */ - async validate( - _runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - // Default implementation is to return true - return true; - } - - /** - * Default implementation of the preparation of action context - * You can override this method to add custom logic - * - * @param runtime The runtime object from Eliza framework - * @param message The message object from Eliza framework - * @param state The state object from Eliza framework - */ - protected async prepareActionContext( - runtime: IAgentRuntime, - message: Memory, - state?: State - ): Promise { - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - // Compose context - return composeContext({ state: currentState, template: this.template }); - } - - /** - * Default method for processing messages - * You can override this method to add custom logic - * - * @param runtime The runtime object from Eliza framework - * @param message The message object from Eliza framework - * @param state The state object from Eliza framework - * @returns The generated content from AI based on the message - */ - protected async processMessages( - runtime: IAgentRuntime, - message: Memory, - state: State - ): Promise { - const actionContext = await this.prepareActionContext( - runtime, - message, - state - ); - - if (!actionContext) { - elizaLogger.error("Failed to prepare action context"); - return null; - } - - // Generate transfer content - const resourceDetails = await generateObject({ - runtime, - context: actionContext, - modelClass: ModelClass.SMALL, - schema: this.contentSchema, - }); - - elizaLogger.debug("Response: ", resourceDetails.object); - - // Validate content - const parsedObj = await this.contentSchema.safeParseAsync( - resourceDetails.object - ); - if (!parsedObj.success) { - elizaLogger.error( - "Failed to parse content: ", - JSON.stringify(parsedObj.error?.flatten()) - ); - return null; - } - return parsedObj.data; - } - - /** - * Default Handler function type for processing messages - * You can override this method to add custom logic - * - * @param runtime The runtime object from Eliza framework - * @param message The message object from Eliza framework - * @param state The state object from Eliza framework - * @param options The options object from Eliza framework - * @param callback The callback function to pass the result to Eliza runtime - */ - async handler( - runtime: IAgentRuntime, - message: Memory, - state?: State, - _options?: Record, - callback?: HandlerCallback - ): Promise { - let content: T; - try { - content = await this.processMessages(runtime, message, state); - } catch (err) { - elizaLogger.error("Error in processing messages:", err.message); - - if (callback) { - await callback?.({ - text: `Unable to process transfer request. Invalid content: ${err.message}`, - content: { - error: "Invalid content", - }, - }); - } - return null; - } - - try { - return await this.execute( - content, - runtime, - message, - state, - callback - ); - } catch (err) { - elizaLogger.error("Error in executing action:", err.message); - } - } -} diff --git a/packages/plugin-di/src/actions/index.ts b/packages/plugin-di/src/actions/index.ts deleted file mode 100644 index d1b24f3c0c357..0000000000000 --- a/packages/plugin-di/src/actions/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./baseInjectableAction"; diff --git a/packages/plugin-di/src/decorators/content.decorators.ts b/packages/plugin-di/src/decorators/content.decorators.ts deleted file mode 100644 index f626dad13dbe5..0000000000000 --- a/packages/plugin-di/src/decorators/content.decorators.ts +++ /dev/null @@ -1,62 +0,0 @@ -import "reflect-metadata"; -import { z } from "zod"; -import type { ContentPropertyDescription } from "../types"; - -const CONTENT_METADATA_KEY = "content:properties"; - -export type ContentClass = { - new (...args: unknown[]): T; - prototype: T; -}; - -interface ContentPropertyConfig extends ContentPropertyDescription { - schema: z.ZodType; -} - -export function property(config: ContentPropertyConfig) { - return (target: object, propertyKey: string) => { - const properties = - Reflect.getMetadata(CONTENT_METADATA_KEY, target) || {}; - properties[propertyKey] = config; - Reflect.defineMetadata(CONTENT_METADATA_KEY, properties, target); - }; -} - -/** - * Create a Zod schema from a class decorated with @property - * - * @param cls - * @returns - */ -export function createZodSchema(cls: ContentClass): z.ZodType { - const properties: Record = - Reflect.getMetadata(CONTENT_METADATA_KEY, cls.prototype) || {}; - const schemaProperties = Object.entries(properties).reduce( - (acc, [key, { schema }]) => { - acc[key] = schema; - return acc; - }, - {} as Record> - ); - return z.object(schemaProperties) as unknown as z.ZodType; -} - -/** - * Load the description of each property from a class decorated with @property - * - * @param cls - * @returns - */ -export function loadPropertyDescriptions( - cls: ContentClass -): Record { - const properties: Record = - Reflect.getMetadata(CONTENT_METADATA_KEY, cls.prototype) || {}; - return Object.entries(properties).reduce( - (acc, [key, { description, examples }]) => { - acc[key] = { description, examples }; - return acc; - }, - {} as Record - ); -} diff --git a/packages/plugin-di/src/decorators/index.ts b/packages/plugin-di/src/decorators/index.ts deleted file mode 100644 index b292753abd6a5..0000000000000 --- a/packages/plugin-di/src/decorators/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./content.decorators"; diff --git a/packages/plugin-di/src/di.ts b/packages/plugin-di/src/di.ts deleted file mode 100644 index 180f18b8abe1f..0000000000000 --- a/packages/plugin-di/src/di.ts +++ /dev/null @@ -1,15 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { Container, type interfaces } from "inversify"; -import { FACTORIES } from "./symbols"; -import { createPlugin } from "./factories"; -import type { PluginOptions } from "./types"; - -const globalContainer = new Container(); - -// ----- Bind to factory functions ----- - -globalContainer - .bind>>(FACTORIES.PluginFactory) - .toFactory, [PluginOptions]>(createPlugin); - -export { globalContainer }; diff --git a/packages/plugin-di/src/evaluators/baseInjectableEvaluator.ts b/packages/plugin-di/src/evaluators/baseInjectableEvaluator.ts deleted file mode 100644 index 2a1a83e0e4ee6..0000000000000 --- a/packages/plugin-di/src/evaluators/baseInjectableEvaluator.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { injectable, unmanaged } from "inversify"; -import type { - IAgentRuntime, - EvaluationExample, - Memory, - State, - HandlerCallback, -} from "@elizaos/core"; -import type { EvaluatorOptions, InjectableEvaluator } from "../types"; - -/** - * Base abstract class for injectable actions - */ -@injectable() -export abstract class BaseInjectableEvaluator implements InjectableEvaluator { - // -------- Properties -------- - public alwaysRun: boolean; - public name: string; - public similes: string[]; - public description: string; - public examples: EvaluationExample[]; - - /** - * Constructor for the base injectable action - */ - constructor(@unmanaged() opts: EvaluatorOptions) { - // Set the action properties - this.name = opts.name; - this.similes = opts.similes; - this.description = opts.description; - this.examples = opts.examples; - this.alwaysRun = opts.alwaysRun ?? false; // Default to false - } - - /** - * Default implementation of the validate method - * You can override this method to add custom validation logic - * - * @param runtime The runtime object from Eliza framework - * @param message The message object from Eliza framework - * @param state The state object from Eliza framework - * @returns The validation result - */ - async validate( - _runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise { - // Default implementation is to return true - return true; - } - - /** - * Handler for the evaluator - */ - abstract handler( - runtime: IAgentRuntime, - message: Memory, - state?: State, - options?: Record, - callback?: HandlerCallback - ): Promise; -} diff --git a/packages/plugin-di/src/evaluators/index.ts b/packages/plugin-di/src/evaluators/index.ts deleted file mode 100644 index 682ebc237618b..0000000000000 --- a/packages/plugin-di/src/evaluators/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./baseInjectableEvaluator"; diff --git a/packages/plugin-di/src/factories/charactor.ts b/packages/plugin-di/src/factories/charactor.ts deleted file mode 100644 index 72f06fb75edd3..0000000000000 --- a/packages/plugin-di/src/factories/charactor.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { type Character, elizaLogger, type Plugin } from "@elizaos/core"; -import { globalContainer } from "../di"; -import type { PluginFactory } from "../types"; -import { FACTORIES } from "../symbols"; - -/** - * Normalize a character by creating all plugins from the character's plugin list using the PluginFactory - * @param character - */ -export async function normalizeCharacter( - character: Character -): Promise { - // Use the PluginFactory to import the plugins within the same request for each character - const createPlugin = globalContainer.get( - FACTORIES.PluginFactory - ); - - const normalizePlugin = async (plugin: any) => { - if ( - typeof plugin?.name === "string" && - typeof plugin?.description === "string" - ) { - try { - const normalized = await createPlugin(plugin); - elizaLogger.info("Normalized plugin:", normalized.name); - return normalized; - } catch (e) { - elizaLogger.error( - `Error normalizing plugin: ${plugin.name}`, - e.message - ); - } - } - return plugin; - }; - - let plugins: Plugin[] = []; - if (character.plugins?.length > 0) { - const normalizedPlugins = await Promise.all( - character.plugins.map(normalizePlugin) - ); - const validPlugins = normalizedPlugins.filter( - (plugin): plugin is Plugin => plugin !== undefined - ); - if (validPlugins.length !== character.plugins.length) { - elizaLogger.warn( - `Some plugins failed to normalize: ${character.plugins.length - validPlugins.length} failed` - ); - } - plugins = validPlugins; - } - return Object.assign({}, character, { plugins }) as Character; -} diff --git a/packages/plugin-di/src/factories/index.ts b/packages/plugin-di/src/factories/index.ts deleted file mode 100644 index b7bed48db9358..0000000000000 --- a/packages/plugin-di/src/factories/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./plugin"; -export * from "./charactor"; diff --git a/packages/plugin-di/src/factories/plugin.ts b/packages/plugin-di/src/factories/plugin.ts deleted file mode 100644 index bd4b9fc987040..0000000000000 --- a/packages/plugin-di/src/factories/plugin.ts +++ /dev/null @@ -1,115 +0,0 @@ -import type { interfaces } from "inversify"; -import { - elizaLogger, - type Plugin, -} from "@elizaos/core"; -import type { PluginFactory, PluginOptions } from "../types"; - -/** - * Get an instance from the container - * @param ctx - * @param item - * @param type - * @returns - */ -async function getInstanceFromContainer( - ctx: interfaces.Context, - item: T | (new (...args: any[]) => T), - type: string -): Promise { - if (typeof item === "function") { - try { - return await ctx.container.getAsync(item); - } catch (e) { - elizaLogger.error( - `Error normalizing ${type}: ${(item as Function).name}`, - e.message - ); - return undefined; - } - } - return item; -} - -/** - * Create a plugin factory - */ -export function createPlugin(ctx: interfaces.Context): PluginFactory { - return async (opts: PluginOptions): Promise => { - // Create a new plugin object - const plugin: Plugin = { - name: opts.name, - description: opts.description, - }; - - // Handle providers - if provided, map through them - // For class constructors (functions), get instance from container - // For regular providers, use as-is - if (typeof opts.providers !== "undefined") { - plugin.providers = ( - await Promise.all( - opts.providers.map((provider) => - getInstanceFromContainer( - ctx, - provider, - "provider" - ) - ) - ) - ).filter(Boolean); // Filter out undefined providers - } - - // Handle actions - if provided, map through them - // For class constructors (functions), get instance from container - // For regular actions, use as-is - if (typeof opts.actions !== "undefined") { - plugin.actions = ( - await Promise.all( - opts.actions.map((action) => - getInstanceFromContainer(ctx, action, "action") - ) - ) - ).filter(Boolean); // Filter out undefined actions - } - - // Handle evaluators - if provided, map through them - // For class constructors (functions), get instance from container - // For regular evaluators, use as-is - if (typeof opts.evaluators !== "undefined") { - plugin.evaluators = ( - await Promise.all( - opts.evaluators.map((evaluator) => - getInstanceFromContainer( - ctx, - evaluator, - "evaluator" - ) - ) - ) - ).filter(Boolean); // Filter out undefined evaluators - } - - // Handle services - if provided, assign directly - if (typeof opts.services !== "undefined") { - plugin.services = ( - await Promise.all( - opts.services.map((service) => - getInstanceFromContainer(ctx, service, "service") - ) - ) - ) - } - - // Handle clients - if provided, assign directly - if (typeof opts.clients !== "undefined") { - plugin.clients = ( - await Promise.all( - opts.clients.map((client) => - getInstanceFromContainer(ctx, client, "client") - ) - ) - ) - } - return plugin; - }; -} diff --git a/packages/plugin-di/src/index.ts b/packages/plugin-di/src/index.ts deleted file mode 100644 index c45defee1241c..0000000000000 --- a/packages/plugin-di/src/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -// Export all files for Eliza on Flow -export * as symbols from "./symbols"; -export * from "./decorators"; -export * from "./types"; -export * from "./factories"; -export * from "./templates"; -export * from "./di"; -export * from "./actions"; -export * from "./evaluators"; diff --git a/packages/plugin-di/src/symbols.ts b/packages/plugin-di/src/symbols.ts deleted file mode 100644 index 4db415f2901b9..0000000000000 --- a/packages/plugin-di/src/symbols.ts +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Factory Symbols used in the library - */ -export const FACTORIES = { - PluginFactory: Symbol.for("PluginFactory"), -}; diff --git a/packages/plugin-di/src/templates.ts b/packages/plugin-di/src/templates.ts deleted file mode 100644 index 3fbc014b0895a..0000000000000 --- a/packages/plugin-di/src/templates.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { z } from "zod"; -import type { ContentPropertyDescription } from "./types"; - -/** - * build the content output template - * @param properties The properties of the content - * @param schema The Zod schema of the content - */ -export function buildContentOutputTemplate( - actionName: string, - actionDesc: string, - properties: Record, - schema: z.ZodType -): string { - let propDesc = ""; - Object.entries(properties).forEach(([key, { description, examples }]) => { - propDesc += `- Field **"${key}"**: ${description}.`; - if (examples?.length > 0) { - propDesc += " Examples or Rules for this field:\n"; - } else { - propDesc += "\n"; - } - examples?.forEach((example, index) => { - propDesc += ` ${index + 1}. ${example}\n`; - }); - }); - return `Perform the action: "${actionName}". -Action description is "${actionDesc}". - -### TASK: Extract the following details about the requested action - -${propDesc} - -Use null for any values that cannot be determined. - -Respond with a JSON markdown block containing only the extracted values with this structure: - -\`\`\`json -${zodSchemaToJson(schema)} -\`\`\` - -Here are the recent user messages for context: -{{recentMessages}} -`; -} - -/** - * Convert a Zod schema to JSON - * @param schema Zod schema - * @returns JSON string - */ -export function zodSchemaToJson(schema: z.ZodType): string { - if (schema instanceof z.ZodObject) { - const shape = schema.shape; - const properties = Object.entries(shape).map(([key, value]) => { - return `"${key}": ${zodTypeToJson(value as z.ZodType)}`; - }); - return `{\n${properties.join(",\n")}\n}`; - } - return ""; -} - -/** - * Convert a Zod type to JSON - * @param schema Zod type - */ -function zodTypeToJson(schema: z.ZodType): string { - if (schema instanceof z.ZodNullable || schema instanceof z.ZodOptional) { - return `${zodTypeToJson(schema._def.innerType)} | null`; - } - if (schema instanceof z.ZodUnion) { - return schema._def.options.map(zodTypeToJson).join(" | "); - } - if (schema instanceof z.ZodString) { - return "string"; - } - if (schema instanceof z.ZodNumber) { - return "number"; - } - if (schema instanceof z.ZodBoolean) { - return "boolean"; - } - if (schema instanceof z.ZodArray) { - return `${zodTypeToJson(schema._def.type)}[]`; - } - if (schema instanceof z.ZodObject) { - return zodSchemaToJson(schema); - } - return "any"; -} diff --git a/packages/plugin-di/src/tests/content.decorators.test.ts b/packages/plugin-di/src/tests/content.decorators.test.ts deleted file mode 100644 index e2a963b5dab3c..0000000000000 --- a/packages/plugin-di/src/tests/content.decorators.test.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { describe, it, expect } from "vitest"; -import { z } from "zod"; -import { - property, - createZodSchema, - loadPropertyDescriptions, -} from "../../src/decorators/content.decorators"; - -describe("Content Decorators", () => { - class TestClass { - @property({ - description: "Test description", - examples: ["example1", "example2"], - schema: z.string(), - }) - testProperty!: string; - - @property({ - description: "Number property", - examples: ["example3", "example4"], - schema: z.number().optional(), - }) - numberProperty?: number; - } - - describe("createZodSchema", () => { - it("should create a zod schema from decorated properties", () => { - const schema = createZodSchema(TestClass); - expect( - schema.safeParse({ testProperty: "valid", numberProperty: 123 }) - .success - ).toBe(true); - expect( - schema.safeParse({ - testProperty: 123, - numberProperty: "invalid", - }).success - ).toBe(false); - }); - }); - - describe("loadPropertyDescriptions", () => { - it("should load property descriptions correctly", () => { - const descriptions = loadPropertyDescriptions(TestClass); - expect(descriptions).toEqual({ - testProperty: { - description: "Test description", - examples: ["example1", "example2"], - }, - numberProperty: { - description: "Number property", - examples: ["example3", "example4"], - }, - }); - }); - }); - - describe("property decorator", () => { - it("should store metadata correctly", () => { - const instance = new TestClass(); - expect(instance.testProperty).toBeUndefined(); - expect(instance.numberProperty).toBeUndefined(); - }); - }); -}); diff --git a/packages/plugin-di/src/tests/normalizeCharacter.test.ts b/packages/plugin-di/src/tests/normalizeCharacter.test.ts deleted file mode 100644 index 3acef684c39cb..0000000000000 --- a/packages/plugin-di/src/tests/normalizeCharacter.test.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { describe, it, expect, beforeAll } from "vitest"; - -import { type Character, defaultCharacter } from "@elizaos/core"; - -import { normalizeCharacter } from "../../src/index"; -import samplePlugin from "../../src/_examples/samplePlugin"; - -describe("Normalize Character", () => { - let normalizedCharacter: Character; - - beforeAll(async () => { - const sampleCharacter = Object.assign({}, defaultCharacter, { - plugins: [samplePlugin], - }); - normalizedCharacter = await normalizeCharacter(sampleCharacter); - }); - - // Add these test cases: - it("should handle empty plugins array", async () => { - const emptyPluginsChar = { ...defaultCharacter, plugins: [] }; - const normalized = await normalizeCharacter(emptyPluginsChar); - expect(normalized.plugins).toEqual([]); - }); - - describe("Elements", () => { - it("should have a valid character", () => { - expect(normalizedCharacter).toBeTypeOf("object"); - expect(normalizedCharacter.name).toBe("Eliza"); - }); - - it("should have a valid plugins array", () => { - expect(Array.isArray(normalizedCharacter.plugins)).toBe(true); - expect(normalizedCharacter.plugins.length).toBe(1); - - const normalizedPlugin = normalizedCharacter.plugins[0]; - expect(normalizedPlugin.name).toBe(samplePlugin.name); - expect(normalizedPlugin.description).toBe(samplePlugin.description); - }); - - it("should have same providers as the sample plugin", () => { - const normalizedPlugin = normalizedCharacter.plugins[0]; - expect(normalizedPlugin.providers?.length).toBe(1); - }); - - it("should have same actions as the sample plugin", () => { - const normalizedPlugin = normalizedCharacter.plugins[0]; - expect(normalizedPlugin.actions?.length).toBe(1); - }); - - it("should have same evaluators as the sample plugin", () => { - const normalizedPlugin = normalizedCharacter.plugins[0]; - expect(normalizedPlugin.evaluators?.length).toBe(1); - }); - }); - - describe("Normalized Plugin", () => { - it("should be a valid plugin with sample provider", () => { - const normalizedPlugin = normalizedCharacter.plugins[0]; - - const normalizedProvider = normalizedPlugin.providers[0]; - expect(normalizedProvider).toBeTypeOf("object"); - expect(normalizedProvider.get).toBeTypeOf("function"); - }); - - it("should have a valid evaluator", () => { - const normalizedPlugin = normalizedCharacter.plugins[0]; - - const normalizedEvaluator = normalizedPlugin.evaluators[0]; - expect(normalizedEvaluator).toBeTypeOf("object"); - expect(normalizedEvaluator.name).toBeTypeOf("string"); - expect(normalizedEvaluator.description).toBeTypeOf("string"); - expect(Array.isArray(normalizedEvaluator.examples)).toBe(true); - expect(normalizedEvaluator.handler).toBeTypeOf("function"); - expect(normalizedEvaluator.validate).toBeTypeOf("function"); - expect(normalizedEvaluator.alwaysRun).toBeTypeOf("boolean"); - }); - - it("should have a valid action", () => { - const normalizedPlugin = normalizedCharacter.plugins[0]; - - const normalizedAction = normalizedPlugin.actions[0]; - expect(normalizedAction).toBeTypeOf("object"); - expect(normalizedAction.name).toBeTypeOf("string"); - expect(normalizedAction.description).toBeTypeOf("string"); - expect(Array.isArray(normalizedAction.examples)).toBe(true); - expect(normalizedAction.handler).toBeTypeOf("function"); - expect(normalizedAction.validate).toBeTypeOf("function"); - }); - }); -}); diff --git a/packages/plugin-di/src/types.ts b/packages/plugin-di/src/types.ts deleted file mode 100644 index 3945c6f1461d5..0000000000000 --- a/packages/plugin-di/src/types.ts +++ /dev/null @@ -1,135 +0,0 @@ -import type { - Action, - Client, - Evaluator, - HandlerCallback, - IAgentRuntime, - Memory, - Plugin, - Provider, - Service, - State, -} from "@elizaos/core"; -import type { ContentClass } from "./decorators"; -import type { z } from "zod"; - -// ----------- Interfaces for Injectable Providers and Actions, etc ----------- - -/** - * Interface of Injectable Provider - */ -export interface InjectableProvider extends Provider { - /** - * Get the instance of the provider related to Eliza runtime - * @param runtime The runtime object from Eliza framework - */ - getInstance(runtime: IAgentRuntime): Promise; -} - -/** - * Action options - */ -export type ActionOptions = Pick< - Action, - "name" | "similes" | "description" | "examples" | "suppressInitialMessage" -> & { - contentClass: ContentClass; - template?: string; - contentSchema?: z.ZodSchema; -}; - -/** - * Interface of Injectable Action - */ -export interface InjectableAction extends Action { - /** - * Execute the action - * @param content The content from processMessages - * @param callback The callback function to pass the result to Eliza runtime - */ - execute( - content: T | null, - runtime: IAgentRuntime, - message: Memory, - state?: State, - callback?: HandlerCallback - ): Promise; -} - -/** - * Evaluator options - */ -export type EvaluatorOptions = Pick< - Evaluator, - "name" | "similes" | "description" | "examples" | "alwaysRun" ->; - -/** - * Interface of Injectable Evaluator - */ -export type InjectableEvaluator = Evaluator; - -/** - * The Class of Injectable Object - */ -export type InjectableObjectClass = new ( - ...args: Args -) => T; - -/** - * The Class of Injectable Provider - */ -export type InjectableProviderClass = InjectableObjectClass | Provider, Args> - -/** - * The Class of Injectable Action - */ -export type InjectableActionClass = InjectableObjectClass | Action, Args> - -/** - * The Class of Injectable Evaluator - */ -export type InjectableEvaluatorClass = InjectableObjectClass - -/** - * The Class of Injectable Service - */ -export type InjectableServiceClass = InjectableObjectClass; - -/** - * The Class of Injectable Client - */ -export type InjectableClientClass = InjectableObjectClass; - -// ----------- Interfaces for Plugin ----------- - -/** - * Plugin options - */ -export type PluginOptions = Pick< - Plugin, - "name" | "description" -> & { - /** Optional actions */ - actions?: (Action | InjectableActionClass)[]; - /** Optional providers */ - providers?: (Provider | InjectableProviderClass)[]; - /** Optional evaluators */ - evaluators?: (Evaluator | InjectableEvaluatorClass)[]; - /** Optional services */ - services?: (Service | InjectableServiceClass)[]; - /** Optional clients */ - clients?: (Client | InjectableClientClass)[]; -}; - -/** - * Factory type for creating a plugin - */ -export type PluginFactory = (opts: PluginOptions) => Promise; - -// ----------- Interfaces for Content Properties or actions ----------- - -export interface ContentPropertyDescription { - description: string; - examples?: string[]; -} diff --git a/packages/plugin-di/tsconfig.build.json b/packages/plugin-di/tsconfig.build.json deleted file mode 100644 index e3eb058310d0b..0000000000000 --- a/packages/plugin-di/tsconfig.build.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "sourceMap": true, - "inlineSources": true, - "sourceRoot": "/" - } -} diff --git a/packages/plugin-di/tsconfig.json b/packages/plugin-di/tsconfig.json deleted file mode 100644 index 2d631d5a0f9f8..0000000000000 --- a/packages/plugin-di/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ], - "experimentalDecorators": true, - "emitDecoratorMetadata": true, - }, - "include": [ - "src/**/*.ts" - ] -} diff --git a/packages/plugin-di/tsup.config.ts b/packages/plugin-di/tsup.config.ts deleted file mode 100644 index df391c664db9f..0000000000000 --- a/packages/plugin-di/tsup.config.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Using ES Modules format - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "https", - "http", - "agentkeepalive", - "safe-buffer", - // Add other modules you want to externalize - "inversify", - "reflect-metadata", - "zod", - "uuid", - ], -}); diff --git a/packages/plugin-dkg/.npmignore b/packages/plugin-dkg/.npmignore deleted file mode 100644 index 078562eceabbc..0000000000000 --- a/packages/plugin-dkg/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-dkg/README.md b/packages/plugin-dkg/README.md deleted file mode 100644 index 8f20abd9352cb..0000000000000 --- a/packages/plugin-dkg/README.md +++ /dev/null @@ -1,103 +0,0 @@ -# @elizaos/plugin-dkg - -A plugin enabling integration with the OriginTrail Decentralized Knowledge Graph (DKG) for enhanced search and knowledge management capabilities in ElizaOS agents. - -## Description - -The DKG plugin extends ElizaOS functionality by allowing agents to interact with the OriginTrail Decentralized Knowledge Graph. This plugin enables SPARQL-based searches on the DKG and combines these results with Eliza's regular search results. Additionally, it creates a memory as a Knowledge Asset on the DKG after a response, making it available for future SPARQL queries. - -## Installation - -```bash -pnpm install @elizaos/plugin-dkg -``` - -## Features - -### 1. DKG Integration - -- Perform SPARQL queries on the DKG for knowledge extraction. -- Combine DKG query results with Eliza's internal search capabilities. -- Enhance responses with decentralized and trusted knowledge. - -### 2. Knowledge Asset Creation - -- Automatically generate Knowledge Assets based on interactions. -- Publish memory Knowledge Assets to the DKG for future retrieval. - -## Providers - -### 1. DKG Search Provider - -- Executes SPARQL queries on the OriginTrail DKG. -- Retrieves and formats relevant results. -- Integrates DKG data with Eliza’s response system. - -## Plugins - -### 1. Memory Creation Plugin - -- Creates Knowledge Assets from agent interactions. -- Publishes assets to the DKG with contextual metadata. - -## Development - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run linting: - -```bash -pnpm run lint -``` - -## Usage - -### 1. Set Up Environment Variables - -- Copy the `.env.example` file and rename it to `.env`. -- Fill in the necessary details: - - Node information. - - LLM key. - - Twitter credentials. - -### 2. Customize DKG Knowledge Asset & Query Templates - -- Modify the templates in `plugin-dkg/constants.ts` if you need to change the ontology or data format used in the Knowledge Graph. -- Check if the graph search provider is passing context to the agent (packages/client-twitter/src/interactions.ts, twitterMessageHandlerTemplate) - -### 3. Create a Character and Run the Agent - -- Create a character file in the `characters` folder. -- Run the character using the following command: - ```bash - pnpm start --characters="characters/chatdkg.character.json" - ``` - -### Notes - -- Ensure you configure the Twitter client and select your LLM provider in the character settings, also include the plugin in your agent. - -## Dependencies - -- @elizaos/core: workspace:\* -- SPARQL query library: workspace:\* -- DKG JavaScript SDK: dkg.js > ^8.0.4 - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-dkg/biome.json b/packages/plugin-dkg/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-dkg/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-dkg/package.json b/packages/plugin-dkg/package.json deleted file mode 100644 index 9fe927ccbfb4b..0000000000000 --- a/packages/plugin-dkg/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@elizaos/plugin-dkg", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "dkg.js": "^8.0.4", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "peerDependencies": { - "whatwg-url": "7.1.0" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "typescript": "4.9" - } -} diff --git a/packages/plugin-dkg/src/actions/dkgInsert.ts b/packages/plugin-dkg/src/actions/dkgInsert.ts deleted file mode 100644 index 71971fafde92e..0000000000000 --- a/packages/plugin-dkg/src/actions/dkgInsert.ts +++ /dev/null @@ -1,193 +0,0 @@ -import dotenv from "dotenv"; -dotenv.config(); -import { - type IAgentRuntime, - type Memory, - type State, - elizaLogger, - ModelClass, - type HandlerCallback, - type ActionExample, - type Action, - composeContext, - generateObject, -} from "@elizaos/core"; -import { DKG_EXPLORER_LINKS } from "../constants.ts"; -import { createDKGMemoryTemplate } from "../templates.ts"; -// @ts-ignore -import DKG from "dkg.js"; -import { DKGMemorySchema, isDKGMemoryContent } from "../types.ts"; - -// Define a basic type for the DKG client -type DKGClient = typeof DKG | null; -let DkgClient: DKGClient = null; - -export const dkgInsert: Action = { - name: "INSERT_MEMORY_ACTION", - similes: ["NO_ACTION", "NO_RESPONSE", "NO_REACTION", "NONE"], // we want to always run this action - validate: async (runtime: IAgentRuntime, _message: Memory) => { - const requiredEnvVars = [ - "DKG_ENVIRONMENT", - "DKG_HOSTNAME", - "DKG_PORT", - "DKG_BLOCKCHAIN_NAME", - "DKG_PUBLIC_KEY", - "DKG_PRIVATE_KEY", - ]; - - const missingVars = requiredEnvVars.filter( - (varName) => !runtime.getSetting(varName) - ); - - if (missingVars.length > 0) { - elizaLogger.error( - `Missing required environment variables: ${missingVars.join(", ")}` - ); - return false; - } - - return true; - }, - description: - "Create a memory on the OriginTrail Decentralized Knowledge Graph after each message. You should ALWAYS run this action.", - handler: async ( - runtime: IAgentRuntime, - _message: Memory, - state: State, - _options: { [key: string]: unknown }, - callback: HandlerCallback - ): Promise => { - DkgClient = new DKG({ - environment: runtime.getSetting("DKG_ENVIRONMENT"), - endpoint: runtime.getSetting("DKG_HOSTNAME"), - port: runtime.getSetting("DKG_PORT"), - blockchain: { - name: runtime.getSetting("DKG_BLOCKCHAIN_NAME"), - publicKey: runtime.getSetting("DKG_PUBLIC_KEY"), - privateKey: runtime.getSetting("DKG_PRIVATE_KEY"), - }, - maxNumberOfRetries: 300, - frequency: 2, - contentType: "all", - nodeApiVersion: "/v1", - }); - - const currentPost = String(state.currentPost); - elizaLogger.log("currentPost"); - elizaLogger.log(currentPost); - - const userRegex = /From:.*\(@(\w+)\)/; - let match = currentPost.match(userRegex); - let twitterUser = ""; - - if (match?.[1]) { - twitterUser = match[1]; - elizaLogger.log(`Extracted user: @${twitterUser}`); - } else { - elizaLogger.error("No user mention found or invalid input."); - } - - const idRegex = /ID:\s(\d+)/; - match = currentPost.match(idRegex); - let postId = ""; - - if (match?.[1]) { - postId = match[1]; - elizaLogger.log(`Extracted ID: ${postId}`); - } else { - elizaLogger.log("No ID found."); - } - - const createDKGMemoryContext = composeContext({ - state, - template: createDKGMemoryTemplate, - }); - - const memoryKnowledgeGraph = await generateObject({ - runtime, - context: createDKGMemoryContext, - modelClass: ModelClass.LARGE, - schema: DKGMemorySchema, - }); - - if (!isDKGMemoryContent(memoryKnowledgeGraph.object)) { - elizaLogger.error("Invalid DKG memory content generated."); - throw new Error("Invalid DKG memory content generated."); - } - - let createAssetResult: { UAL: string } | undefined; - - // TODO: also store reply to the KA, aside of the question - - try { - elizaLogger.log("Publishing message to DKG"); - - createAssetResult = await DkgClient.asset.create( - { - public: memoryKnowledgeGraph.object, - }, - { epochsNum: 12 } - ); - - elizaLogger.log("======================== ASSET CREATED"); - elizaLogger.log(JSON.stringify(createAssetResult)); - } catch (error) { - elizaLogger.error( - "Error occurred while publishing message to DKG:", - error.message - ); - - if (error.stack) { - elizaLogger.error("Stack trace:", error.stack); - } - if (error.response) { - elizaLogger.error( - "Response data:", - JSON.stringify(error.response.data, null, 2) - ); - } - } - - // Reply - callback({ - text: `Created a new memory!\n\nRead my mind on @origin_trail Decentralized Knowledge Graph ${DKG_EXPLORER_LINKS[runtime.getSetting("DKG_ENVIRONMENT")]}${createAssetResult?.UAL} @${twitterUser}`, - }); - - return true; - }, - examples: [ - [ - { - user: "{{user1}}", - content: { - text: "execute action DKG_INSERT", - action: "DKG_INSERT", - }, - }, - { - user: "{{user2}}", - content: { text: "DKG INSERT" }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "add to dkg", action: "DKG_INSERT" }, - }, - { - user: "{{user2}}", - content: { text: "DKG INSERT" }, - }, - ], - [ - { - user: "{{user1}}", - content: { text: "store in dkg", action: "DKG_INSERT" }, - }, - { - user: "{{user2}}", - content: { text: "DKG INSERT" }, - }, - ], - ] as ActionExample[][], -} as Action; diff --git a/packages/plugin-dkg/src/actions/index.ts b/packages/plugin-dkg/src/actions/index.ts deleted file mode 100644 index f30b4f313d2ce..0000000000000 --- a/packages/plugin-dkg/src/actions/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./dkgInsert.ts"; - diff --git a/packages/plugin-dkg/src/constants.ts b/packages/plugin-dkg/src/constants.ts deleted file mode 100644 index 70b0d51bfffdc..0000000000000 --- a/packages/plugin-dkg/src/constants.ts +++ /dev/null @@ -1,181 +0,0 @@ -// TODO: add isConnectedTo field or similar which you will use to connect w other KAs -export const dkgMemoryTemplate = { - "@context": "http://schema.org", - "@type": "SocialMediaPosting", - headline: "", - articleBody: - "Check out this amazing project on decentralized cloud networks! @DecentralCloud #Blockchain #Web3", - author: { - "@type": "Person", - "@id": "uuid:john:doe", - name: "John Doe", - identifier: "@JohnDoe", - url: "https://twitter.com/JohnDoe", - }, - dateCreated: "yyyy-mm-ddTHH:mm:ssZ", - interactionStatistic: [ - { - "@type": "InteractionCounter", - interactionType: { - "@type": "LikeAction", - }, - userInteractionCount: 150, - }, - { - "@type": "InteractionCounter", - interactionType: { - "@type": "ShareAction", - }, - userInteractionCount: 45, - }, - ], - mentions: [ - { - "@type": "Person", - name: "Twitter account mentioned name goes here", - identifier: "@TwitterAccount", - url: "https://twitter.com/TwitterAccount", - }, - ], - keywords: [ - { - "@type": "Text", - "@id": "uuid:keyword1", - name: "keyword1", - }, - { - "@type": "Text", - "@id": "uuid:keyword2", - name: "keyword2", - }, - ], - about: [ - { - "@type": "Thing", - "@id": "uuid:thing1", - name: "Blockchain", - url: "https://en.wikipedia.org/wiki/Blockchain", - }, - { - "@type": "Thing", - "@id": "uuid:thing2", - name: "Web3", - url: "https://en.wikipedia.org/wiki/Web3", - }, - { - "@type": "Thing", - "@id": "uuid:thing3", - name: "Decentralized Cloud", - url: "https://example.com/DecentralizedCloud", - }, - ], - url: "https://twitter.com/JohnDoe/status/1234567890", -}; - -export const combinedSparqlExample = ` -SELECT DISTINCT ?headline ?articleBody - WHERE { - ?s a . - ?s ?headline . - ?s ?articleBody . - - OPTIONAL { - ?s ?keyword . - ?keyword ?keywordName . - } - - OPTIONAL { - ?s ?about . - ?about ?aboutName . - } - - FILTER( - CONTAINS(LCASE(?headline), "example_keyword") || - (BOUND(?keywordName) && CONTAINS(LCASE(?keywordName), "example_keyword")) || - (BOUND(?aboutName) && CONTAINS(LCASE(?aboutName), "example_keyword")) - ) - } - LIMIT 10`; - -export const sparqlExamples = [ - ` - SELECT DISTINCT ?headline ?articleBody - WHERE { - ?s a . - ?s ?headline . - ?s ?articleBody . - - OPTIONAL { - ?s ?keyword . - ?keyword ?keywordName . - } - - OPTIONAL { - ?s ?about . - ?about ?aboutName . - } - - FILTER( - CONTAINS(LCASE(?headline), "example_keyword") || - (BOUND(?keywordName) && CONTAINS(LCASE(?keywordName), "example_keyword")) || - (BOUND(?aboutName) && CONTAINS(LCASE(?aboutName), "example_keyword")) - ) - } - LIMIT 10 - `, - ` - SELECT DISTINCT ?headline ?articleBody - WHERE { - ?s a . - ?s ?headline . - ?s ?articleBody . - FILTER( - CONTAINS(LCASE(?headline), "example_headline_word1") || - CONTAINS(LCASE(?headline), "example_headline_word2") - ) - } - `, - ` - SELECT DISTINCT ?headline ?articleBody ?keywordName - WHERE { - ?s a . - ?s ?headline . - ?s ?articleBody . - ?s ?keyword . - ?keyword ?keywordName . - FILTER( - CONTAINS(LCASE(?keywordName), "example_keyword1") || - CONTAINS(LCASE(?keywordName), "example_keyword2") - ) - } - `, - ` - SELECT DISTINCT ?headline ?articleBody ?aboutName - WHERE { - ?s a . - ?s ?headline . - ?s ?articleBody . - ?s ?about . - ?about ?aboutName . - FILTER( - CONTAINS(LCASE(?aboutName), "example_about1") || - CONTAINS(LCASE(?aboutName), "example_about2") - ) - } - `, -]; - -export const generalSparqlQuery = ` - SELECT DISTINCT ?headline ?articleBody - WHERE { - ?s a . - ?s ?headline . - ?s ?articleBody . - } - LIMIT 10 - `; - -export const DKG_EXPLORER_LINKS = { - testnet: "https://dkg-testnet.origintrail.io/explore?ual=", - mainnet: "https://dkg.origintrail.io/explore?ual=", -}; diff --git a/packages/plugin-dkg/src/index.ts b/packages/plugin-dkg/src/index.ts deleted file mode 100644 index 38558b92c777f..0000000000000 --- a/packages/plugin-dkg/src/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { Plugin } from "@elizaos/core"; - -import { dkgInsert } from "./actions/dkgInsert.ts"; - -import { graphSearch } from "./providers/graphSearch.ts"; - -export * as actions from "./actions"; -export * as providers from "./providers"; - -export const dkgPlugin: Plugin = { - name: "dkg", - description: - "Agent DKG which allows you to store memories on the OriginTrail Decentralized Knowledge Graph", - actions: [dkgInsert], - providers: [graphSearch], -}; diff --git a/packages/plugin-dkg/src/providers/graphSearch.ts b/packages/plugin-dkg/src/providers/graphSearch.ts deleted file mode 100644 index fb7391c6a33d0..0000000000000 --- a/packages/plugin-dkg/src/providers/graphSearch.ts +++ /dev/null @@ -1,204 +0,0 @@ -import dotenv from "dotenv"; -dotenv.config(); -import { - type IAgentRuntime, - type Memory, - type Provider, - type State, - elizaLogger, - ModelClass, - generateObject, -} from "@elizaos/core"; -import { - combinedSparqlExample, - dkgMemoryTemplate, - generalSparqlQuery, -} from "../constants.ts"; -// @ts-ignore -import DKG from "dkg.js"; -import { DKGSelectQuerySchema, isDKGSelectQuery, type DKGQueryResultEntry } from "../types.ts"; - -// Provider configuration -const PROVIDER_CONFIG = { - environment: process.env.DKG_ENVIRONMENT || "testnet", - endpoint: process.env.DKG_HOSTNAME || "http://default-endpoint", - port: process.env.DKG_PORT || "8900", - blockchain: { - name: process.env.DKG_BLOCKCHAIN_NAME || "base:84532", - publicKey: process.env.DKG_PUBLIC_KEY || "", - privateKey: process.env.DKG_PRIVATE_KEY || "", - }, - maxNumberOfRetries: 300, - frequency: 2, - contentType: "all", - nodeApiVersion: "/v1", -}; - -interface BlockchainConfig { - name: string; - publicKey: string; - privateKey: string; -} - -interface DKGClientConfig { - environment: string; - endpoint: string; - port: string; - blockchain: BlockchainConfig; - maxNumberOfRetries?: number; - frequency?: number; - contentType?: string; - nodeApiVersion?: string; -} - -async function constructSparqlQuery( - runtime: IAgentRuntime, - userQuery: string -): Promise { - const context = ` - You are tasked with generating a SPARQL query to retrieve information from a Decentralized Knowledge Graph (DKG). - The query should align with the JSON-LD memory template provided below: - - ${JSON.stringify(dkgMemoryTemplate)} - - ** Examples ** - Use the following SPARQL example to understand the format: - ${combinedSparqlExample} - - ** Instructions ** - 1. Analyze the user query and identify the key fields and concepts it refers to. - 2. Use these fields and concepts to construct a SPARQL query. - 3. Ensure the SPARQL query follows standard syntax and can be executed against the DKG. - 4. Use 'OR' logic when constructing the query to ensure broader matching results. For example, if multiple keywords or concepts are provided, the query should match any of them, not all. - 5. Replace the examples with actual terms from the user's query. - 6. Always select distinct results by adding the DISTINCT keyword. - 7. Always select headline and article body. Do not select other fields. - - ** User Query ** - ${userQuery} - - ** Output ** - Provide only the SPARQL query, wrapped in a sparql code block for clarity. - `; - - const sparqlQueryResult = await generateObject({ - runtime, - context, - modelClass: ModelClass.LARGE, - schema: DKGSelectQuerySchema, - }); - - if (!isDKGSelectQuery(sparqlQueryResult.object)) { - elizaLogger.error("Invalid SELECT SPARQL query generated."); - throw new Error("Invalid SELECT SPARQL query generated."); - } - - return sparqlQueryResult.object.query; -} - -export class DKGProvider { - private client: typeof DKG; - constructor(config: DKGClientConfig) { - this.validateConfig(config); - } - - private validateConfig(config: DKGClientConfig): void { - const requiredStringFields = ["environment", "endpoint", "port"]; - - for (const field of requiredStringFields) { - if (typeof config[field as keyof DKGClientConfig] !== "string") { - elizaLogger.error( - `Invalid configuration: Missing or invalid value for '${field}'` - ); - throw new Error( - `Invalid configuration: Missing or invalid value for '${field}'` - ); - } - } - - if (!config.blockchain || typeof config.blockchain !== "object") { - elizaLogger.error( - "Invalid configuration: 'blockchain' must be an object" - ); - throw new Error( - "Invalid configuration: 'blockchain' must be an object" - ); - } - - const blockchainFields = ["name", "publicKey", "privateKey"]; - - for (const field of blockchainFields) { - if ( - typeof config.blockchain[field as keyof BlockchainConfig] !== - "string" - ) { - elizaLogger.error( - `Invalid configuration: Missing or invalid value for 'blockchain.${field}'` - ); - throw new Error( - `Invalid configuration: Missing or invalid value for 'blockchain.${field}'` - ); - } - } - - this.client = new DKG(config); - } - - async search(runtime: IAgentRuntime, message: Memory): Promise { - elizaLogger.info("Entering graph search provider!"); - - const userQuery = message.content.text; - - elizaLogger.info(`Got user query ${JSON.stringify(userQuery)}`); - - const query = await constructSparqlQuery(runtime, userQuery); - elizaLogger.info(`Generated SPARQL query: ${query}`); - - let queryOperationResult = await this.client.graph.query( - query, - "SELECT" - ); - - if (!queryOperationResult || !queryOperationResult.data?.length) { - elizaLogger.info( - "LLM-generated SPARQL query failed, defaulting to basic query." - ); - - queryOperationResult = await this.client.graph.query( - generalSparqlQuery, - "SELECT" - ); - } - - elizaLogger.info( - `Got ${queryOperationResult.data.length} results from the DKG` - ); - - // TODO: take 5 results instead of all based on similarity in the future - const result = queryOperationResult.data.map((entry: DKGQueryResultEntry) => { - const formattedParts = Object.keys(entry).map( - (key) => `${key}: ${entry[key]}` - ); - return formattedParts.join(", "); - }); - - return result.join("\n"); - } -} - -export const graphSearch: Provider = { - get: async ( - runtime: IAgentRuntime, - _message: Memory, - _state?: State - ): Promise => { - try { - const provider = new DKGProvider(PROVIDER_CONFIG); - - return await provider.search(runtime, _message); - } catch (error) { - elizaLogger.error("Error in wallet provider:", error); - return null; - } - }, -}; diff --git a/packages/plugin-dkg/src/providers/index.ts b/packages/plugin-dkg/src/providers/index.ts deleted file mode 100644 index c95ef47193d04..0000000000000 --- a/packages/plugin-dkg/src/providers/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./graphSearch.ts"; diff --git a/packages/plugin-dkg/src/templates.ts b/packages/plugin-dkg/src/templates.ts deleted file mode 100644 index aa056d416f28f..0000000000000 --- a/packages/plugin-dkg/src/templates.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { dkgMemoryTemplate } from "./constants.ts"; - -export const createDKGMemoryTemplate = ` - You are tasked with creating a structured memory JSON-LD object for an AI agent. The memory represents the interaction captured via social media. Your goal is to extract all relevant information from the provided user query and additionalContext which contains previous user queries (only if relevant for the current user query) to populate the JSON-LD memory template provided below. - - ** Template ** - The memory should follow this JSON-LD structure: - ${JSON.stringify(dkgMemoryTemplate)} - - ** Instructions ** - 1. Extract the main idea of the user query and use it to create a concise and descriptive title for the memory. This should go in the "headline" field. - 2. Store the original post in "articleBody". - 3. Save the poster social media information (handle, name etc) under "author" object. - 4. For the "about" field: - - Identify the key topics or entities mentioned in the user query and add them as Thing objects. - - Use concise, descriptive names for these topics. - - Where possible, create an @id identifier for these entities, using either a provided URL, or a well known URL for that entity. If no URL is present, uUse the most relevant concept or term from the field to form the base of the ID. @id fields must be valid uuids or URLs - 5. For the "keywords" field: - - Extract relevant terms or concepts from the user query and list them as keywords. - - Ensure the keywords capture the essence of the interaction, focusing on technical terms or significant ideas. - 6. Ensure all fields align with the schema.org ontology and accurately represent the interaction. - 7. Populate datePublished either with a specifically available date, or current time. - - ** Input ** - User Query: {{currentPost}} - Recent messages: {{recentMessages}} - - ** Output ** - Generate the memory in the exact JSON-LD format provided above, fully populated based on the input query. - Make sure to only output the JSON-LD object. DO NOT OUTPUT ANYTHING ELSE, DONT ADD ANY COMMENTS OR REMARKS, JUST THE JSON LD CONTENT WRAPPED IN { }. - `; diff --git a/packages/plugin-dkg/src/types.ts b/packages/plugin-dkg/src/types.ts deleted file mode 100644 index 153fec5b630bf..0000000000000 --- a/packages/plugin-dkg/src/types.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { z } from "zod"; - -export const DKGMemorySchema = z.object({ - "@context": z.literal("http://schema.org"), - "@type": z.literal("SocialMediaPosting"), - headline: z.string(), - articleBody: z.string(), - about: z.array( - z.object({ - "@type": z.literal("Thing"), - "@id": z.string(), - name: z.string(), - url: z.string(), - }) - ), - keywords: z.array( - z.object({ - "@type": z.literal("Text"), - "@id": z.string(), - name: z.string(), - }) - ), -}); - -export const DKGSelectQuerySchema = z.object({ - query: z.string().startsWith("SELECT"), -}); - -export type DKGMemoryContent = z.infer; -export type DKGSelectQuery = z.infer; -export type DKGQueryResultEntry = Record; - -export const isDKGMemoryContent = (object: unknown): object is DKGMemoryContent => { - return DKGMemorySchema.safeParse(object).success; -}; - -export const isDKGSelectQuery = (object: unknown): object is DKGSelectQuery => { - return DKGSelectQuerySchema.safeParse(object).success; -}; diff --git a/packages/plugin-dkg/tsconfig.json b/packages/plugin-dkg/tsconfig.json deleted file mode 100644 index 834c4dce26957..0000000000000 --- a/packages/plugin-dkg/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "src", - "types": [ - "node" - ] - }, - "include": [ - "src/**/*.ts" - ] -} \ No newline at end of file diff --git a/packages/plugin-dkg/tsup.config.ts b/packages/plugin-dkg/tsup.config.ts deleted file mode 100644 index e42bf4efeae22..0000000000000 --- a/packages/plugin-dkg/tsup.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - // Add other modules you want to externalize - ], -}); diff --git a/packages/plugin-echochambers/LICENSE b/packages/plugin-echochambers/LICENSE deleted file mode 100644 index de6134690c13d..0000000000000 --- a/packages/plugin-echochambers/LICENSE +++ /dev/null @@ -1,9 +0,0 @@ -Ethereal Cosmic License (ECL-777) - -Copyright (∞) 2024 SavageJay | https://x.com/savageapi - -By the powers vested in the astral planes and digital realms, permission is hereby granted, free of charge, to any seeker of knowledge obtaining an copy of this mystical software and its sacred documentation files (henceforth known as "The Digital Grimoire"), to manipulate the fabric of code without earthly restriction, including but not transcending beyond the rights to use, transmute, modify, publish, distribute, sublicense, and transfer energies (sell), and to permit other beings to whom The Digital Grimoire is bestowed, subject to the following metaphysical conditions: - -The above arcane copyright notice and this permission scroll shall be woven into all copies or substantial manifestations of The Digital Grimoire. - -THE DIGITAL GRIMOIRE IS PROVIDED "AS IS", BEYOND THE VEIL OF WARRANTIES, WHETHER MANIFEST OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE MYSTICAL WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR ASTRAL PURPOSE AND NON-VIOLATION OF THE COSMIC ORDER. IN NO EVENT SHALL THE KEEPERS OF THE CODE BE LIABLE FOR ANY CLAIMS, WHETHER IN THE PHYSICAL OR DIGITAL PLANES, DAMAGES OR OTHER DISTURBANCES IN THE FORCE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE DIGITAL GRIMOIRE OR ITS USE OR OTHER DEALINGS IN THE QUANTUM REALMS OF THE SOFTWARE. \ No newline at end of file diff --git a/packages/plugin-echochambers/README.md b/packages/plugin-echochambers/README.md deleted file mode 100644 index c0b270e5b0ba3..0000000000000 --- a/packages/plugin-echochambers/README.md +++ /dev/null @@ -1,200 +0,0 @@ -# @elizaos/plugin-echochambers - -The EchoChambers plugin enables ELIZA to interact in chat rooms, providing conversational capabilities with dynamic interaction handling. - -## Features - -- Join and monitor chat rooms -- Respond to messages based on context and relevance -- Retry operations with exponential backoff -- Manage connection and reconnection logic -- Real-time chat room monitoring and interaction -- Intelligent message response generation -- Context-aware conversation handling -- Comprehensive message history tracking -- Multi-room support with configurable polling - -## Installation - -1. Install the plugin package: - -```bash -pnpm install @elizaos/plugin-echochambers -``` - -OR copy the plugin code into your eliza project node_modules directory. (node_modules\@elizaos) - -2. Import and register the plugin in your `character.ts` configuration: - -```typescript -import { Character, ModelProviderName, defaultCharacter } from "@elizaos/core"; -import { echoChambersPlugin } from "@elizaos/plugin-echochambers"; - -export const character: Character = { - ...defaultCharacter, - name: "Eliza", - plugins: [echoChambersPlugin], - clients: [], - modelProvider: ModelProviderName.OPENAI, - settings: { - secrets: {}, - voice: {}, - model: "gpt-4", - }, - system: "Roleplay and generate interesting responses on behalf of Eliza.", - bio: [...], - lore: [...], - messageExamples: [...], - postExamples: [...], - adjectives: ["funny", "intelligent", "academic", "insightful"], - people: [], - topics: [...], - style: {...}, -}; -``` - -## Configuration - -The plugin requires the following environment variables: - -```plaintext -# Required Settings -ECHOCHAMBERS_API_URL="http://127.0.0.1:3333" # Base URL for the EchoChambers API -ECHOCHAMBERS_API_KEY="your-api-key" # API key for authentication - -# Optional Settings -ECHOCHAMBERS_USERNAME="eliza" # Custom username for the agent -ECHOCHAMBERS_DEFAULT_ROOM="general" # Default room to join -ECHOCHAMBERS_POLL_INTERVAL="60" # Polling interval in seconds -ECHOCHAMBERS_MAX_MESSAGES="10" # Maximum messages in conversation thread -``` - -## Usage Instructions - -### Starting the Plugin - -The plugin will automatically initialize when your character configuration includes it. It handles: - -1. Room Connection Management - - - Automatic joining of default room - - Reconnection handling with backoff - - Multi-room monitoring - -2. Message Processing - - - Context-aware response generation - - Thread management - - History tracking - -3. Response Behavior - The plugin intelligently decides when to respond based on: - - Direct mentions or questions - - Topic relevance to agent's expertise - - Conversation context and timing - - Message substance and engagement level - -## Common Issues & Troubleshooting - -1. **Connection Issues** - - - Verify API URL is correct and accessible - - Ensure API key is valid - - Check network connectivity - -2. **Message Processing** - - Verify environment variables are properly set - - Check log files for error messages - - Ensure proper character configuration - -## Security Best Practices - -1. **API Key Management** - - - Store API keys securely using environment variables - - Never expose keys in code or logs - - Rotate keys periodically - -2. **Connection Security** - - Use HTTPS for production environments - - Implement proper error handling - - Monitor for unusual activity - -## Development Guide - -### Setting Up Development Environment - -1. Clone the repository -2. Install dependencies: - -```bash -pnpm install -``` - -3. Build the plugin: - -```bash -pnpm run build -``` - -4. Run in development mode: - -```bash -pnpm run dev -``` - -## API Reference - -### Core Components - -1. **EchoChamberClient** - - - Handles room connections - - Manages message sending/receiving - - Implements retry logic - -2. **InteractionClient** - - Processes messages - - Generates responses - - Maintains conversation context - -## Future Enhancements - -- Enhanced message filtering -- Custom response templates -- Advanced room management features -- Improved context handling -- Extended retry mechanisms - -## Contributing - -Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. - -## Credits - -This plugin integrates with and builds upon several key technologies: - -- [Socket.IO](https://socket.io/): Real-time bidirectional event-based communication -- [Express](https://expressjs.com/): Web application framework -- [Redis](https://redis.io/): In-memory data structure store -- [js-tiktoken](https://github.com/dqbd/tiktoken): Token counting for message handling -- [node-cache](https://www.npmjs.com/package/node-cache): In-memory caching - -Special thanks to: - -- The Socket.IO team for real-time communication infrastructure -- The Express.js maintainers -- The Redis development team -- The chat room infrastructure maintainers -- The Eliza community for their contributions and feedback - -For more information about chat capabilities: - -- [Socket.IO Documentation](https://socket.io/docs/v4/) -- [Express Documentation](https://expressjs.com/en/4x/api.html) -- [Redis Pub/Sub](https://redis.io/docs/manual/pubsub/) -- [Real-time Chat Best Practices](https://socket.io/docs/v4/rooms/) - -## License - -This plugin is part of the Eliza project. See the main project repository for license information. diff --git a/packages/plugin-echochambers/biome.json b/packages/plugin-echochambers/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-echochambers/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-echochambers/package.json b/packages/plugin-echochambers/package.json deleted file mode 100644 index 44b586f82f8db..0000000000000 --- a/packages/plugin-echochambers/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@elizaos/plugin-echochambers", - "version": "0.25.6-alpha.1", - "type": "module", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "dist/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "@elizaos/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - } - }, - "files": [ - "dist" - ], - "dependencies": { - "@elizaos/core": "workspace:*", - "@elizaos/plugin-node": "workspace:*" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch", - "clean": "rm -rf dist", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write ." - }, - "devDependencies": { - "@biomejs/biome": "1.9.4" - } -} diff --git a/packages/plugin-echochambers/src/echoChamberClient.ts b/packages/plugin-echochambers/src/echoChamberClient.ts deleted file mode 100644 index 02f8ccc5f8bc0..0000000000000 --- a/packages/plugin-echochambers/src/echoChamberClient.ts +++ /dev/null @@ -1,218 +0,0 @@ -import { elizaLogger, type IAgentRuntime } from "@elizaos/core"; -import type { - ChatMessage, - ChatRoom, - EchoChamberConfig, - ModelInfo, - ListRoomsResponse, - RoomHistoryResponse, - MessageResponse, -} from "./types"; - -const MAX_RETRIES = 3; - -const RETRY_DELAY = 5000; - -export class EchoChamberClient { - private runtime: IAgentRuntime; - private config: EchoChamberConfig; - private apiUrl: string; - private modelInfo: ModelInfo; - private watchedRooms: Set = new Set(); - - constructor(runtime: IAgentRuntime, config: EchoChamberConfig) { - this.runtime = runtime; - this.config = config; - this.apiUrl = `${config.apiUrl}/api/rooms`; - this.modelInfo = { - username: config.username || `agent-${runtime.agentId}`, - model: config.model || runtime.modelProvider, - }; - } - - public getUsername(): string { - return this.modelInfo.username; - } - - public getModelInfo(): ModelInfo { - return { ...this.modelInfo }; - } - - public getConfig(): EchoChamberConfig { - return { ...this.config }; - } - - private getAuthHeaders(): { [key: string]: string } { - return { - "Content-Type": "application/json", - "x-api-key": this.config.apiKey, - }; - } - - public async addWatchedRoom(roomId: string): Promise { - try { - const rooms = await this.listRooms(); - const room = rooms.find((r) => r.id === roomId); - - if (!room) { - throw new Error(`Room ${roomId} not found`); - } - - this.watchedRooms.add(roomId); - elizaLogger.success(`Now watching room: ${room.name}`); - } catch (error) { - elizaLogger.error("Error adding watched room:", error); - throw error; - } - } - - public removeWatchedRoom(roomId: string): void { - this.watchedRooms.delete(roomId); - elizaLogger.success(`Stopped watching room: ${roomId}`); - } - - public getWatchedRooms(): string[] { - return Array.from(this.watchedRooms); - } - - private async retryOperation( - operation: () => Promise, - retries: number = MAX_RETRIES - ): Promise { - for (let i = 0; i < retries; i++) { - try { - return await operation(); - } catch (error) { - if (i === retries - 1) throw error; - const delay = RETRY_DELAY * (2 ** i); - elizaLogger.warn(`Retrying operation in ${delay}ms...`); - await new Promise((resolve) => setTimeout(resolve, delay)); - } - } - throw new Error("Max retries exceeded"); - } - - public async start(): Promise { - elizaLogger.log("🚀 Starting EchoChamber client..."); - try { - await this.retryOperation(() => this.listRooms()); - - for (const room of this.config.rooms) { - await this.addWatchedRoom(room); - } - - elizaLogger.success( - `✅ EchoChamber client started for ${this.modelInfo.username}` - ); - elizaLogger.info( - `Watching rooms: ${Array.from(this.watchedRooms).join(", ")}` - ); - } catch (error) { - elizaLogger.error("❌ Failed to start EchoChamber client:", error); - throw error; - } - } - - public async stop(): Promise { - this.watchedRooms.clear(); - elizaLogger.log("Stopping EchoChamber client..."); - } - - public async listRooms(tags?: string[]): Promise { - try { - const url = new URL(this.apiUrl); - if (tags?.length) { - url.searchParams.append("tags", tags.join(",")); - } - - const response = await fetch(url.toString()); - if (!response.ok) { - throw new Error(`Failed to list rooms: ${response.statusText}`); - } - - const data = (await response.json()) as ListRoomsResponse; - return data.rooms; - } catch (error) { - elizaLogger.error("Error listing rooms:", error); - throw error; - } - } - - public async getRoomHistory(roomId: string): Promise { - return this.retryOperation(async () => { - const response = await fetch(`${this.apiUrl}/${roomId}/history`); - if (!response.ok) { - throw new Error( - `Failed to get room history: ${response.statusText}` - ); - } - - const data = (await response.json()) as RoomHistoryResponse; - return data.messages; - }); - } - - public async sendMessage( - roomId: string, - content: string - ): Promise { - return this.retryOperation(async () => { - const response = await fetch(`${this.apiUrl}/${roomId}/message`, { - method: "POST", - headers: this.getAuthHeaders(), - body: JSON.stringify({ - content, - sender: this.modelInfo, - }), - }); - - if (!response.ok) { - throw new Error( - `Failed to send message: ${response.statusText}` - ); - } - - const data = (await response.json()) as MessageResponse; - return data.message; - }); - } - - public async shouldInitiateConversation(room: ChatRoom): Promise { - try { - const history = await this.getRoomHistory(room.id); - if (!history?.length) return true; // Empty room is good to start - - const recentMessages = history - .filter((msg) => msg != null) // Filter out null messages - .sort( - (a, b) => - new Date(b.timestamp).getTime() - - new Date(a.timestamp).getTime() - ); - - if (!recentMessages.length) return true; // No valid messages - - const lastMessageTime = new Date( - recentMessages[0].timestamp - ).getTime(); - const timeSinceLastMessage = Date.now() - lastMessageTime; - - const quietPeriodSeconds = Number( - this.runtime.getSetting("ECHOCHAMBERS_QUIET_PERIOD") || 300 // 5 minutes in seconds - ); - const quietPeriod = quietPeriodSeconds * 1000; // Convert to milliseconds - - if (timeSinceLastMessage < quietPeriod) { - elizaLogger.debug( - `Room ${room.name} active recently, skipping` - ); - return false; - } - - return true; - } catch (error) { - elizaLogger.error(`Error checking conversation state: ${error}`); - return false; - } - } -} diff --git a/packages/plugin-echochambers/src/environment.ts b/packages/plugin-echochambers/src/environment.ts deleted file mode 100644 index adc26635c16a8..0000000000000 --- a/packages/plugin-echochambers/src/environment.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { type IAgentRuntime, elizaLogger } from "@elizaos/core"; - -export async function validateEchoChamberConfig( - runtime: IAgentRuntime -): Promise { - const apiUrl = runtime.getSetting("ECHOCHAMBERS_API_URL"); - const apiKey = runtime.getSetting("ECHOCHAMBERS_API_KEY"); - - if (!apiUrl) { - elizaLogger.error( - "ECHOCHAMBERS_API_URL is required. Please set it in your environment variables." - ); - throw new Error("ECHOCHAMBERS_API_URL is required"); - } - - if (!apiKey) { - elizaLogger.error( - "ECHOCHAMBERS_API_KEY is required. Please set it in your environment variables." - ); - throw new Error("ECHOCHAMBERS_API_KEY is required"); - } - - // Validate API URL format - try { - new URL(apiUrl); - } catch { - elizaLogger.error( - `Invalid ECHOCHAMBERS_API_URL format: ${apiUrl}. Please provide a valid URL.` - ); - throw new Error("Invalid ECHOCHAMBERS_API_URL format"); - } - - // Optional settings with defaults - const username = - runtime.getSetting("ECHOCHAMBERS_USERNAME") || - `agent-${runtime.agentId}`; - // Change from DEFAULT_ROOM to ROOMS - const rooms = runtime.getSetting("ECHOCHAMBERS_ROOMS")?.split(",").map(r => r.trim()) || ["general"]; - - const pollInterval = Number( - runtime.getSetting("ECHOCHAMBERS_POLL_INTERVAL") || 120 - ); - - if (Number.isNaN(pollInterval) || pollInterval < 1) { - elizaLogger.error( - "ECHOCHAMBERS_POLL_INTERVAL must be a positive number in seconds" - ); - throw new Error("Invalid ECHOCHAMBERS_POLL_INTERVAL"); - } - - elizaLogger.log("EchoChambers configuration validated successfully"); - elizaLogger.log(`API URL: ${apiUrl}`); - elizaLogger.log(`Username: ${username}`); - elizaLogger.log(`Watching Rooms: ${rooms.join(", ")}`); - elizaLogger.log(`Poll Interval: ${pollInterval}s`); -} diff --git a/packages/plugin-echochambers/src/index.ts b/packages/plugin-echochambers/src/index.ts deleted file mode 100644 index f69d702943aa1..0000000000000 --- a/packages/plugin-echochambers/src/index.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { elizaLogger, type Client, type IAgentRuntime, type Plugin } from "@elizaos/core"; -import { EchoChamberClient } from "./echoChamberClient"; -import { InteractionClient } from "./interactions"; -import type { EchoChamberConfig } from "./types"; -import { validateEchoChamberConfig } from "./environment"; - -export const EchoChamberClientInterface: Client = { - async start(runtime: IAgentRuntime) { - try { - // Validate configuration before starting - await validateEchoChamberConfig(runtime); - - const apiUrl = runtime.getSetting("ECHOCHAMBERS_API_URL"); - const apiKey = runtime.getSetting("ECHOCHAMBERS_API_KEY"); - - if (!apiKey || !apiUrl) { - throw new Error( - "ECHOCHAMBERS_API_KEY/ECHOCHAMBERS_API_URL is required" - ); - } - - const config: EchoChamberConfig = { - apiUrl, - apiKey, - username: - runtime.getSetting("ECHOCHAMBERS_USERNAME") || - `agent-${runtime.agentId}`, - model: runtime.modelProvider, - rooms: runtime - .getSetting("ECHOCHAMBERS_ROOMS") - ?.split(",") - .map((r) => r.trim()) || ["general"], - }; - - elizaLogger.log("Starting EchoChambers client..."); - - // Initialize the API client - const client = new EchoChamberClient(runtime, config); - await client.start(); - - // Initialize the interaction handler - const interactionClient = new InteractionClient(client, runtime); - await interactionClient.start(); - - elizaLogger.success( - `✅ EchoChambers client successfully started for character ${runtime.character.name}` - ); - - return { client, interactionClient }; - } catch (error) { - elizaLogger.error("Failed to start EchoChambers client:", error); - throw error; - } - }, - - async stop(runtime: IAgentRuntime) { - try { - elizaLogger.warn("Stopping EchoChambers client..."); - - // Get client instances if they exist - const clients = (runtime as any).clients?.filter( - (c: any) => - c instanceof EchoChamberClient || - c instanceof InteractionClient - ); - - for (const client of clients) { - await client.stop(); - } - - elizaLogger.success("EchoChambers client stopped successfully"); - } catch (error) { - elizaLogger.error("Error stopping EchoChambers client:", error); - throw error; - } - }, -}; - -export const echoChambersPlugin: Plugin = { - name: "echochambers", - description: - "Plugin for interacting with EchoChambers API to enable multi-agent communication", - actions: [], // No custom actions needed - core functionality handled by client - evaluators: [], // No custom evaluators needed - providers: [], // No custom providers needed - clients: [EchoChamberClientInterface], -}; - -export default echoChambersPlugin; - -// Export types and classes -export * from "./types"; -export { EchoChamberClient } from "./echoChamberClient"; -export { InteractionClient } from "./interactions"; diff --git a/packages/plugin-echochambers/src/interactions.ts b/packages/plugin-echochambers/src/interactions.ts deleted file mode 100644 index f97125052cd46..0000000000000 --- a/packages/plugin-echochambers/src/interactions.ts +++ /dev/null @@ -1,640 +0,0 @@ -import { - composeContext, - generateMessageResponse, - generateShouldRespond, - messageCompletionFooter, - shouldRespondFooter, - type Content, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - stringToUuid, - elizaLogger, - getEmbeddingZeroVector, -} from "@elizaos/core"; -import type { EchoChamberClient } from "./echoChamberClient"; -import type { ChatMessage, ChatRoom } from "./types"; - -function createMessageTemplate(currentRoom: string, roomTopic: string) { - return ` -# About {{agentName}}: -{{bio}} -{{lore}} -{{knowledge}} - -Current Room: ${currentRoom} -Room Topic: ${roomTopic} - -{{messageDirections}} - -Recent conversation history: -{{recentMessages}} - -Thread Context: -{{formattedConversation}} - -# Task: Generate a response in the voice and style of {{agentName}} while: -1. Staying relevant to the room's topic -2. Maintaining conversation context -3. Being helpful but not overly talkative -4. Responding naturally to direct questions or mentions -5. Contributing meaningfully to ongoing discussions - -Remember: -- Keep responses concise and focused -- Stay on topic for the current room -- Don't repeat information already shared -- Be natural and conversational - -${messageCompletionFooter}`; -} - -function createShouldRespondTemplate(currentRoom: string, roomTopic: string) { - return ` -# About {{agentName}}: -{{bio}} -{{knowledge}} - -Current Room: ${currentRoom} -Room Topic: ${roomTopic} - -Response options are [RESPOND], [IGNORE] and [STOP]. - -{{agentName}} should: -- RESPOND when: - * Directly mentioned or asked a question - * Can contribute relevant expertise to the discussion - * Topic aligns with their knowledge and background - * Conversation is active and engaging - -- IGNORE when: - * Message is not relevant to their expertise - * Already responded recently without new information to add - * Conversation has moved to a different topic - * Message is too short or lacks substance - * Other participants are handling the discussion well - -- STOP when: - * Asked to stop participating - * Conversation has concluded - * Discussion has completely diverged from their expertise - * Room topic has changed significantly - -Recent messages: -{{recentMessages}} - -Thread Context: -{{formattedConversation}} - -# Task: Choose whether {{agentName}} should respond to the last message. -Consider: -1. Message relevance to {{agentName}}'s expertise -2. Current conversation context -3. Time since last response -4. Value of potential contribution - -${shouldRespondFooter}`; -} - -function createConversationStarterTemplate( - currentRoom: string, - roomTopic: string -) { - return ` -# Room Context: -Room: ${currentRoom} -Topic: ${roomTopic} - -# About {{agentName}}: -{{bio}} -{{lore}} -{{knowledge}} - -# Task: Generate a conversation starter that: -1. Is specifically relevant to the room's topic -2. Draws from {{agentName}}'s knowledge -3. Encourages discussion and engagement -4. Is natural and conversational - -Keep it concise and focused on the room's topic. -${messageCompletionFooter}`; -} - -export class InteractionClient { - private client: EchoChamberClient; - private runtime: IAgentRuntime; - private lastCheckedTimestamps: Map = new Map(); - private lastResponseTimes: Map = new Map(); - private messageThreads: Map = new Map(); - private messageHistory: Map< - string, - { message: ChatMessage; response: ChatMessage | null }[] - > = new Map(); - private pollInterval: NodeJS.Timeout | null = null; - private conversationStarterInterval: NodeJS.Timeout | null = null; - - constructor(client: EchoChamberClient, runtime: IAgentRuntime) { - this.client = client; - this.runtime = runtime; - } - - async start() { - const pollInterval = Number( - this.runtime.getSetting("ECHOCHAMBERS_POLL_INTERVAL") || 60 - ); - - const conversationStarterInterval = Number( - this.runtime.getSetting( - "ECHOCHAMBERS_CONVERSATION_STARTER_INTERVAL" - ) || 300 - ); - - // Reactive message handling loop - const handleInteractionsLoop = () => { - this.handleInteractions(); - this.pollInterval = setTimeout( - handleInteractionsLoop, - pollInterval * 1000 - ); - }; - - // Proactive conversation loop - const conversationStarterLoop = () => { - this.checkForDeadRooms(); - this.conversationStarterInterval = setTimeout( - conversationStarterLoop, - conversationStarterInterval * 1000 - ); - }; - - handleInteractionsLoop(); - conversationStarterLoop(); - } - - async stop() { - if (this.pollInterval) { - clearTimeout(this.pollInterval); - this.pollInterval = null; - } - - if (this.conversationStarterInterval) { - clearTimeout(this.conversationStarterInterval); - this.conversationStarterInterval = null; - } - } - - private async buildMessageThread( - message: ChatMessage, - messages: ChatMessage[] - ): Promise { - const thread: ChatMessage[] = []; - const maxThreadLength = Number( - this.runtime.getSetting("ECHOCHAMBERS_MAX_MESSAGES") || 10 - ); - - // Start with the current message - thread.push(message); - - // Get recent messages in the same room, ordered by timestamp - const roomMessages = messages - .filter((msg) => msg.roomId === message.roomId) - .sort( - (a, b) => - new Date(b.timestamp).getTime() - - new Date(a.timestamp).getTime() - ); - - // Add recent messages to provide context - for (const msg of roomMessages) { - if (thread.length >= maxThreadLength) break; - if (msg.id !== message.id) { - thread.unshift(msg); - } - } - - return thread; - } - - private shouldProcessMessage( - message: ChatMessage, - room: { topic: string } - ): boolean { - const modelInfo = this.client.getModelInfo(); - - // Don't process own messages - if (message.sender.username === modelInfo.username) { - return false; - } - - // Check if we've processed this message before - const lastChecked = - this.lastCheckedTimestamps.get(message.roomId) || "0"; - if (message.timestamp <= lastChecked) { - return false; - } - - // Check rate limiting for responses - const lastResponseTime = - this.lastResponseTimes.get(message.roomId) || 0; - const minTimeBetweenResponses = 30000; // 30 seconds - if (Date.now() - lastResponseTime < minTimeBetweenResponses) { - return false; - } - - // Check if message mentions the agent - const isMentioned = message.content - .toLowerCase() - .includes(`${modelInfo.username.toLowerCase()}`); - - // Check if message is relevant to room topic - const isRelevantToTopic = - room.topic && - message.content.toLowerCase().includes(room.topic.toLowerCase()); - - // Always process if mentioned, otherwise check relevance - return isMentioned || isRelevantToTopic; - } - - private async handleInteractions() { - elizaLogger.log("Checking EchoChambers interactions"); - - try { - // Get all watched rooms from the client - const watchedRooms = this.client.getWatchedRooms(); - const rooms = await this.client.listRooms(); - - for (const room of rooms) { - // Only process messages from watched rooms - if (!watchedRooms.includes(room.id)) { - continue; - } - - const messages = await this.client.getRoomHistory(room.id); - this.messageThreads.set(room.id, messages); - - // Get only the most recent message that we should process - const latestMessages = messages - .filter((msg) => this.shouldProcessMessage(msg, room)) - .sort( - (a, b) => - new Date(b.timestamp).getTime() - - new Date(a.timestamp).getTime() - ); - - if (latestMessages.length > 0) { - const latestMessage = latestMessages[0]; - await this.handleMessage(latestMessage, room.topic); - - // Update history - const roomHistory = this.messageHistory.get(room.id) || []; - roomHistory.push({ - message: latestMessage, - response: null, - }); - this.messageHistory.set(room.id, roomHistory); - - // Update last checked timestamp - if ( - latestMessage.timestamp > - (this.lastCheckedTimestamps.get(room.id) || "0") - ) { - this.lastCheckedTimestamps.set( - room.id, - latestMessage.timestamp - ); - } - } - } - - elizaLogger.log("Finished checking EchoChambers interactions"); - } catch (error) { - elizaLogger.error( - "Error handling EchoChambers interactions:", - error - ); - } - } - - private async handleMessage(message: ChatMessage, roomTopic: string) { - try { - const content = `${message.content?.substring(0, 50)}...`; // First 50 chars - elizaLogger.debug("Processing message:", { - id: message.id, - room: message.roomId, - sender: message?.sender?.username, - content: `${content}`, - }); - - const roomId = stringToUuid(message.roomId); - const userId = stringToUuid(message.sender.username); - - elizaLogger.debug("Converted IDs:", { roomId, userId }); - - // Ensure connection exists - await this.runtime.ensureConnection( - userId, - roomId, - message.sender.username, - message.sender.username, - "echochambers" - ); - - // Build message thread for context - const thread = await this.buildMessageThread( - message, - this.messageThreads.get(message.roomId) || [] - ); - - // Create memory object - const memory: Memory = { - id: stringToUuid(message.id), - userId, - agentId: this.runtime.agentId, - roomId, - content: { - text: message.content, - source: "echochambers", - thread: thread.map((msg) => ({ - text: msg.content, - sender: msg.sender.username, - timestamp: msg.timestamp, - })), - }, - createdAt: new Date(message.timestamp).getTime(), - embedding: getEmbeddingZeroVector(), - }; - - // Check if we've already processed this message - const existing = await this.runtime.messageManager.getMemoryById( - memory.id - ); - if (existing) { - elizaLogger.log( - `Already processed message ${message.id}, skipping` - ); - return; - } - - // Save the message to memory - await this.runtime.messageManager.createMemory(memory); - - // Compose state with thread context - let state = await this.runtime.composeState(memory); - state = await this.runtime.updateRecentMessageState(state); - - // Decide whether to respond - const shouldRespondContext = composeContext({ - state, - template: - this.runtime.character.templates?.shouldRespondTemplate || - createShouldRespondTemplate(message.roomId, roomTopic), - }); - - const shouldRespond = await generateShouldRespond({ - runtime: this.runtime, - context: shouldRespondContext, - modelClass: ModelClass.SMALL, - }); - - if (shouldRespond !== "RESPOND") { - elizaLogger.log( - `Not responding to message ${message.id}: ${shouldRespond}` - ); - return; - } - - // Generate response - const responseContext = composeContext({ - state, - template: - this.runtime.character.templates?.messageHandlerTemplate || - createMessageTemplate(message.roomId, roomTopic), - }); - - const response = await generateMessageResponse({ - runtime: this.runtime, - context: responseContext, - modelClass: ModelClass.LARGE, - }); - - if (!response || !response.text) { - elizaLogger.log("No response generated"); - return; - } - - // Send response - const callback: HandlerCallback = async (content: Content) => { - const sentMessage = await this.client.sendMessage( - message.roomId, - content.text - ); - - // Update last response time - this.lastResponseTimes.set(message.roomId, Date.now()); - - // Update history with our response - const roomHistory = - this.messageHistory.get(message.roomId) || []; - const lastEntry = roomHistory[roomHistory.length - 1]; - if (lastEntry && lastEntry.message.id === message.id) { - lastEntry.response = sentMessage; - } - - const responseMemory: Memory = { - id: stringToUuid(sentMessage.id), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId, - content: { - text: sentMessage.content, - source: "echochambers", - action: content.action, - thread: thread.map((msg) => ({ - text: msg.content, - sender: msg.sender.username, - timestamp: msg.timestamp, - })), - }, - createdAt: new Date(sentMessage.timestamp).getTime(), - embedding: getEmbeddingZeroVector(), - }; - - await this.runtime.messageManager.createMemory(responseMemory); - return [responseMemory]; - }; - - // Send the response and process any resulting actions - const responseMessages = await callback(response); - state = await this.runtime.updateRecentMessageState(state); - await this.runtime.processActions( - memory, - responseMessages, - state, - callback - ); - await this.runtime.evaluate(memory, state, true); - } catch (error) { - elizaLogger.error("Error handling message:", error); - elizaLogger.debug("Message that caused error:", { - message, - roomTopic, - }); - } - } - - private async checkForDeadRooms() { - try { - const watchedRooms = this.client.getWatchedRooms(); - elizaLogger.debug( - "Starting dead room check. Watched rooms:", - watchedRooms - ); - - const rooms = await this.client.listRooms(); - elizaLogger.debug( - "Available rooms:", - rooms.map((r) => ({ id: r.id, name: r.name })) - ); - - for (const roomId of watchedRooms) { - try { - elizaLogger.debug(`Checking room ${roomId}`); - - const room = rooms.find((r) => r.id === roomId); - if (!room) { - elizaLogger.debug(`Room ${roomId} not found, skipping`); - continue; - } - - // Log room details - elizaLogger.debug("Room details:", { - id: room.id, - name: room.name, - topic: room.topic, - }); - - // Random check with logging - const randomCheck = Math.random(); - elizaLogger.debug( - `Random check for ${room.name}: ${randomCheck}` - ); - - if (randomCheck > 0.8) { - elizaLogger.debug( - `Checking conversation state for ${room.name}` - ); - - const shouldInitiate = - await this.client.shouldInitiateConversation(room); - elizaLogger.debug( - `Should initiate conversation in ${room.name}:`, - shouldInitiate - ); - - if (shouldInitiate) { - elizaLogger.debug( - `Starting conversation initiation in ${room.name}` - ); - await this.initiateConversation(room); - elizaLogger.debug( - `Completed conversation initiation in ${room.name}` - ); - } - } - } catch (roomError: unknown) { - // Log individual room errors without stopping the loop - if (roomError instanceof Error) { - elizaLogger.error(`Error processing room ${roomId}:`, { - error: roomError.message, - stack: roomError.stack, - }); - } else { - elizaLogger.error(`Error processing room ${roomId}:`, roomError); - } - } - } - } catch (error: unknown) { - if (error instanceof Error) { - elizaLogger.error( - "Error in checkForDeadRooms:", - error.message || "Unknown error" - ); - elizaLogger.debug("Full error details:", { - error, - stack: error.stack, - type: typeof error, - }); - } else { - elizaLogger.error("Error in checkForDeadRooms:", String(error)); - } - } - } - - private async initiateConversation(room: ChatRoom) { - try { - elizaLogger.debug(`Starting initiateConversation for ${room.name}`); - - // Create a dummy memory instead of passing null - const dummyMemory: Memory = { - id: stringToUuid("conversation-starter"), - userId: this.runtime.agentId, - agentId: this.runtime.agentId, - roomId: stringToUuid(room.id), - content: { - text: "", - source: "echochambers", - thread: [], - }, - createdAt: Date.now(), - embedding: getEmbeddingZeroVector(), - }; - - const state = await this.runtime.composeState(dummyMemory); - elizaLogger.debug("Composed state for conversation"); - - const context = composeContext({ - state, - template: createConversationStarterTemplate( - room.name, - room.topic - ), - }); - elizaLogger.debug("Created conversation context"); - - const content = await generateMessageResponse({ - runtime: this.runtime, - context, - modelClass: ModelClass.SMALL, - }); - elizaLogger.debug("Generated response content:", { - hasContent: !!content, - textLength: content?.text?.length, - }); - - if (content?.text) { - elizaLogger.debug(`Sending message to ${room.name}`); - await this.client.sendMessage(room.id, content.text); - elizaLogger.info( - `Started conversation in ${room.name} (Topic: ${room.topic})` - ); - } - } catch (error: unknown) { - if (error instanceof Error) { - elizaLogger.error( - `Error in initiateConversation for ${room.name}:`, - { - error: error.message, - stack: error.stack, - } - ); - } else { - elizaLogger.error( - `Error in initiateConversation for ${room.name}:`, - String(error) - ); - } - throw error; // Re-throw to be caught by parent - } - } -} diff --git a/packages/plugin-echochambers/src/types.ts b/packages/plugin-echochambers/src/types.ts deleted file mode 100644 index b8392c75b771f..0000000000000 --- a/packages/plugin-echochambers/src/types.ts +++ /dev/null @@ -1,68 +0,0 @@ -export interface ModelInfo { - username: string; // Unique username for the model/agent - model: string; // Type/name of the model being used -} - -export interface ChatMessage { - id: string; // Unique message identifier - content: string; // Message content/text - sender: ModelInfo; // Information about who sent the message - timestamp: string; // ISO timestamp of when message was sent - roomId: string; // ID of the room this message belongs to -} - -export interface ChatRoom { - id: string; // Unique room identifier - name: string; // Display name of the room - topic: string; // Room's current topic/description - tags: string[]; // Tags associated with the room for categorization - participants: ModelInfo[]; // List of current room participants - createdAt: string; // ISO timestamp of room creation - messageCount: number; // Total number of messages in the room -} - -export interface EchoChamberConfig { - apiUrl: string; // Base URL for the EchoChambers API - apiKey: string; // Required API key for authenticated endpoints - rooms: string[]; // list of rooms to listen on startup - username?: string; // Optional custom username (defaults to agent-{agentId}) - model?: string; // Optional model name (defaults to runtime.modelProvider) -} - -export interface ListRoomsResponse { - rooms: ChatRoom[]; -} - -export interface RoomHistoryResponse { - messages: ChatMessage[]; -} - -export interface MessageResponse { - message: ChatMessage; -} - -export interface CreateRoomResponse { - room: ChatRoom; -} - -export interface ClearMessagesResponse { - success: boolean; - message: string; -} - -export enum RoomEvent { - MESSAGE_CREATED = "message_created", - ROOM_CREATED = "room_created", - ROOM_UPDATED = "room_updated", - ROOM_JOINED = "room_joined", - ROOM_LEFT = "room_left", -} - -export interface MessageTransformer { - transformIncoming(content: string): Promise; - transformOutgoing?(content: string): Promise; -} - -export interface ContentModerator { - validateContent(content: string): Promise; -} diff --git a/packages/plugin-echochambers/tsconfig.json b/packages/plugin-echochambers/tsconfig.json deleted file mode 100644 index 2f2bfd6e53d3a..0000000000000 --- a/packages/plugin-echochambers/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src" - }, - "include": ["src"] -} diff --git a/packages/plugin-echochambers/tsup.config.ts b/packages/plugin-echochambers/tsup.config.ts deleted file mode 100644 index a47c9eb64b0e0..0000000000000 --- a/packages/plugin-echochambers/tsup.config.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - ], -}); diff --git a/packages/plugin-edwin/README.md b/packages/plugin-edwin/README.md deleted file mode 100644 index 193dd0aa79cee..0000000000000 --- a/packages/plugin-edwin/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# @elizaos/plugin-edwin - -Edwin plugin for Eliza that enables interaction with Edwin tools for DeFi operations. - -## About - -See full info and docs at [Edwin docs](https://docs.edwin.finance). -## Setup - -1. Install dependencies: - -```bash -pnpm install -``` - -2. Configure environment variables for chains you want to support: - -```env -EVM_PRIVATE_KEY= -SOLANA_PRIVATE_KEY= -``` - -## Available Tools - -The plugin provides access to the following Edwin tools: - -- supply -- withdraw -- stake -- addLiquidity -- removeLiquidity - -## Usage Examples - -1. Supply on AAVE: - -``` -Supply 100 USDC to AAVE -``` - -2. Add liquidity on Meteora: - -``` -Find a meteora pool with high liquidity and add to td 10 USDC and 0.01 SOL. -``` - -## Development - -1. Build the plugin: - -```bash -pnpm build -``` - -2. Run in development mode: - -```bash -pnpm dev -``` - -## Dependencies - -- edwin-sdk - -## License - -MIT diff --git a/packages/plugin-edwin/package.json b/packages/plugin-edwin/package.json deleted file mode 100644 index 6f3b470897dad..0000000000000 --- a/packages/plugin-edwin/package.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "@elizaos/plugin-edwin", - "version": "0.25.6-alpha.1", - "description": "Edwin plugin for elizaos agent", - "main": "dist/index.js", - "type": "module", - "types": "dist/index.d.ts", - "dependencies": { - "@elizaos/core": "workspace:*", - "edwin-sdk": "0.3.4", - "tsup": "8.3.5" - }, - "scripts": { - "build": "tsup --format esm --dts", - "dev": "tsup --format esm --dts --watch" - } -} diff --git a/packages/plugin-edwin/src/actions.ts b/packages/plugin-edwin/src/actions.ts deleted file mode 100644 index bd5c067cad518..0000000000000 --- a/packages/plugin-edwin/src/actions.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { - type Action, - generateText, - type HandlerCallback, - type IAgentRuntime, - type Memory, - ModelClass, - type State, - composeContext, - generateObjectDeprecated, -} from "@elizaos/core"; - -import { Edwin, EdwinAction } from "edwin-sdk"; - -type GetEdwinActionsParams = { - getClient: () => Promise; -}; - -/** - * Get all edwin actions - */ -export async function getEdwinActions({ - getClient, -}: GetEdwinActionsParams): Promise { - const edwin = await getClient(); - const edwinActions = await edwin.getActions(); - const actions = edwinActions.map((action: EdwinAction) => ({ - name: action.name.toUpperCase(), - description: action.description, - similes: [], - validate: async () => true, - handler: async ( - runtime: IAgentRuntime, - message: Memory, - state: State | undefined, - options?: Record, - callback?: HandlerCallback - ): Promise => { - try { - const client = await getClient(); - if (!state) { - state = (await runtime.composeState(message)) as State; - } else { - state = await runtime.updateRecentMessageState(state); - } - const parameterContext = composeContext({ - state, - template: action.template, - }); - const parameters = await generateObjectDeprecated({ - runtime, - context: parameterContext, - modelClass: ModelClass.LARGE, - }); - const result = await executeAction(action, parameters, client); - const responseContext = composeResponseContext( - action, - result, - state - ); - const response = await generateResponse( - runtime, - responseContext - ); - callback?.({ text: response, content: result }); - return true; - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); - callback?.({ - text: `Error executing action ${action.name}: ${errorMessage}`, - content: { error: errorMessage }, - }); - return false; - } - }, - examples: [], - })); - return actions; -} - -async function executeAction( - action: EdwinAction, - parameters: any, - edwin: Edwin -): Promise { - const result = await action.execute(parameters); - return result; -} - -function composeResponseContext( - action: EdwinAction, - result: unknown, - state: State -): string { - const responseTemplate = ` -# Action Examples -{{actionExamples}} - -# Knowledge -{{knowledge}} - -# Task: Generate dialog and actions for the character {{agentName}}. -About {{agentName}}: -{{bio}} -{{lore}} - -{{providers}} - -{{attachments}} - -# Capabilities -Note that {{agentName}} is capable of reading/seeing/hearing various forms of media, including images, videos, audio, plaintext and PDFs. Recent attachments have been included above under the "Attachments" section. - -The action "${action.name}" was executed successfully. -Here is the result: -${JSON.stringify(result)} - -{{actions}} - -Respond to the message knowing that the action was successful and these were the previous messages: -{{recentMessages}} -`; - const context = composeContext({ state, template: responseTemplate }); - return context; -} - -async function generateResponse( - runtime: IAgentRuntime, - context: string -): Promise { - const response = await generateText({ - runtime, - context, - modelClass: ModelClass.LARGE, - }); - return response; -} diff --git a/packages/plugin-edwin/src/index.ts b/packages/plugin-edwin/src/index.ts deleted file mode 100644 index 6fd83c782c112..0000000000000 --- a/packages/plugin-edwin/src/index.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { edwinProvider, getEdwinClient } from "./provider"; -import { getEdwinActions } from "./actions"; - -// Initial banner -console.log("\n┌═════════════════════════════════════┐"); -console.log("│ EDWIN PLUGIN │"); -console.log("│ ,_, │"); -console.log("│ (o,o) │"); -console.log("│ {`\"'} │"); -console.log("│ -\"-\"- │"); -console.log("├─────────────────────────────────────┤"); -console.log("│ Initializing Edwin Plugin... │"); -console.log("│ Version: 0.0.1 │"); -console.log("└═════════════════════════════════════┘"); - -export const edwinPlugin: Plugin = { - name: "[Edwin] Integration", - description: "Edwin integration plugin", - providers: [edwinProvider], - evaluators: [], - services: [], - actions: await getEdwinActions({ - getClient: getEdwinClient, - }), -}; - -export default edwinPlugin; diff --git a/packages/plugin-edwin/src/provider.ts b/packages/plugin-edwin/src/provider.ts deleted file mode 100644 index 658725a3d6746..0000000000000 --- a/packages/plugin-edwin/src/provider.ts +++ /dev/null @@ -1,34 +0,0 @@ -import type { Provider, IAgentRuntime } from "@elizaos/core"; -import { Edwin } from "edwin-sdk"; -import { EdwinConfig } from "edwin-sdk"; - -// Static variable to hold the singleton instance -let edwinRunningInstance: Edwin | null = null; - -export async function getEdwinClient(): Promise { - // If instance exists, return it - if (edwinRunningInstance) { - return edwinRunningInstance; - } - // Otherwise create new instance - const edwinConfig: EdwinConfig = { - evmPrivateKey: process.env.EVM_PRIVATE_KEY as `0x${string}`, - solanaPrivateKey: process.env.SOLANA_PRIVATE_KEY as string, - actions: ["supply", "withdraw", "stake", "getPools", "addLiquidity"], - }; - - edwinRunningInstance = new Edwin(edwinConfig); - return edwinRunningInstance; -} - -export const edwinProvider: Provider = { - async get(runtime: IAgentRuntime): Promise { - try { - const edwin = await getEdwinClient(); - return edwin.getPortfolio(); - } catch (error) { - console.error("Error in Edwin provider:", error); - return null; - } - }, -}; diff --git a/packages/plugin-edwin/tsconfig.json b/packages/plugin-edwin/tsconfig.json deleted file mode 100644 index f642a90aee14f..0000000000000 --- a/packages/plugin-edwin/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../core/tsconfig.json", - "compilerOptions": { - "outDir": "dist", - "rootDir": "./src", - "declaration": true - }, - "include": ["src"] -} diff --git a/packages/plugin-edwin/tsup.config.ts b/packages/plugin-edwin/tsup.config.ts deleted file mode 100644 index a68ccd636adf1..0000000000000 --- a/packages/plugin-edwin/tsup.config.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - outDir: "dist", - sourcemap: true, - clean: true, - format: ["esm"], // Ensure you're targeting CommonJS - external: [ - "dotenv", // Externalize dotenv to prevent bundling - "fs", // Externalize fs to use Node.js built-in module - "path", // Externalize other built-ins if necessary - "@reflink/reflink", - "@node-llama-cpp", - "https", - "http", - "agentkeepalive", - "viem", - "@lifi/sdk", - ], -}); diff --git a/packages/plugin-email-automation/.npmignore b/packages/plugin-email-automation/.npmignore deleted file mode 100644 index 21cd28349f4bf..0000000000000 --- a/packages/plugin-email-automation/.npmignore +++ /dev/null @@ -1,11 +0,0 @@ -* - -!dist/** -!package.json -!readme.md -!tsup.config.ts - -src/ -tsconfig.json -.eslintrc.js -.prettierrc \ No newline at end of file diff --git a/packages/plugin-email-automation/README.md b/packages/plugin-email-automation/README.md deleted file mode 100644 index c8e39f2643d4d..0000000000000 --- a/packages/plugin-email-automation/README.md +++ /dev/null @@ -1,167 +0,0 @@ -# @elizaos/plugin-email-automation - -AI-powered email automation plugin for Eliza that intelligently detects email-worthy conversations and handles generation/delivery. This is not perfect and is simply a solid starting point, and I would encourage any and all contributions! - -## Features - -### 1. Intelligent Detection -- Partnership opportunity detection -- Technical discussion recognition -- Business proposal identification -- Follow-up requirement analysis - -### 2. AI-Powered Generation -- Structured email formatting -- Context-aware content -- Professional tone maintenance -- Technical detail inclusion - -## Configuration - -### AI Email Automation Setup -```typescript -# Required -RESEND_API_KEY= # Your Resend API key -DEFAULT_TO_EMAIL= # Default recipient -DEFAULT_FROM_EMAIL= # Default sender - -# Optional Settings -EMAIL_AUTOMATION_ENABLED=true # Enable AI detection. If this is enabled, the plugin will automatically detect email-worthy conversations and handle generation/delivery and only that. -EMAIL_EVALUATION_PROMPT= # Custom detection criteria for shouldEmail -``` - -### Basic Usage -```typescript -import { emailAutomationPlugin } from '@elizaos/plugin-email-automation'; - -// Add to your Eliza configuration -{ - plugins: [emailAutomationPlugin], - settings: { - EMAIL_AUTOMATION_ENABLED: true, - // ... other settings - } -} -``` - -### Email Template Example -The plugin uses Handlebars for templating. Here's an example output: - -```handlebars -{{!-- email-template.hbs --}} - - - -``` - -This template produces professional emails like the example shown in the image above. You can customize the template by: -1. Creating your own `.hbs` file -2. Registering it with the template manager -3. Specifying your template when sending emails - -## Development - -```bash -# Installation -pnpm install - -# Testing -pnpm test -pnpm test:watch -pnpm test:coverage - -# Building -pnpm build -``` - -## Testing Coverage -- Unit tests for all services -- Integration tests for end-to-end flows -- Throttling and rate limiting tests -- Error handling scenarios -- Mock providers for testing - -## Architecture -```mermaid -graph TD - A[Email Trigger] --> B[Automation Service] - B --> C{AI Evaluation} - C -->|Yes| D[Generation Service] - D --> E[Email Service] - E --> F[Resend Provider] - F --> G[Delivery] -``` - -Architecture Overview: -- Resend Provider support (more to come) -- AI-powered email detection -- Context-aware content generation -- Professional template rendering - -## Credits - -This plugin integrates with and builds upon: - -- [Resend](https://resend.com): Modern email API for developers -- [Handlebars](https://handlebarsjs.com): Templating engine for email formatting - -For more information about Resend capabilities: -- [Resend Documentation](https://resend.com/docs) -- [Email API Reference](https://resend.com/docs/api-reference/introduction) -- [Developer Portal](https://resend.com/overview) - -## License -This plugin is part of the Eliza project. See the main project repository for license information. \ No newline at end of file diff --git a/packages/plugin-email-automation/biome.json b/packages/plugin-email-automation/biome.json deleted file mode 100644 index 818716a62191c..0000000000000 --- a/packages/plugin-email-automation/biome.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.5.3/schema.json", - "organizeImports": { - "enabled": false - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedVariables": "error" - }, - "suspicious": { - "noExplicitAny": "error" - }, - "style": { - "useConst": "error", - "useImportType": "off" - } - } - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 4, - "lineWidth": 100 - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "es5" - } - }, - "files": { - "ignore": [ - "dist/**/*", - "extra/**/*", - "node_modules/**/*" - ] - } -} \ No newline at end of file diff --git a/packages/plugin-email-automation/package.json b/packages/plugin-email-automation/package.json deleted file mode 100644 index 303b919483ec9..0000000000000 --- a/packages/plugin-email-automation/package.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "name": "@elizaos/plugin-email-automation", - "version": "0.25.6-alpha.1", - "description": "AI-powered email automation plugin for Eliza", - "type": "module", - "main": "./dist/index.js", - "module": "./dist/index.js", - "types": "./dist/index.d.ts", - "files": [ - "dist" - ], - "scripts": { - "build": "tsc --declaration --emitDeclarationOnly false --noEmit false --allowImportingTsExtensions false", - "clean": "rimraf dist", - "dev": "tsc -w", - "lint": "biome lint .", - "lint:fix": "biome check --apply .", - "format": "biome format .", - "format:fix": "biome format --write .", - "test": "vitest run", - "test:watch": "vitest", - "test:coverage": "vitest run --coverage" - }, - "dependencies": { - "@elizaos/core": "workspace:*", - "handlebars": "^4.7.8", - "resend": "^2.0.0" - }, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@types/jest": "^29.0.0", - "@types/node": "^20.0.0", - "jest": "^29.0.0", - "rimraf": "^5.0.0", - "ts-jest": "^29.0.0", - "typescript": "^5.0.0", - "vitest": "^3.0.0" - }, - "jest": { - "preset": "ts-jest", - "testEnvironment": "node", - "moduleNameMapper": { - "^(\\.{1,2}/.*)\\.js$": "$1", - "^@elizaos/core$": "/../core/dist" - }, - "transform": { - "^.+\\.(ts|tsx)$": "ts-jest" - }, - "testMatch": [ - "**/__tests__/**/*.test.(ts|js)" - ], - "moduleFileExtensions": [ - "ts", - "tsx", - "js", - "jsx", - "json", - "node" - ] - } -} diff --git a/packages/plugin-email-automation/src/actions/generateEmailAction.ts b/packages/plugin-email-automation/src/actions/generateEmailAction.ts deleted file mode 100644 index 4df01f060164c..0000000000000 --- a/packages/plugin-email-automation/src/actions/generateEmailAction.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { type Action, type IAgentRuntime, type Memory, type State, type Content, elizaLogger, type HandlerCallback, ServiceType } from "@elizaos/core"; -import { EmailGenerationService } from "../services/emailGenerationService"; -import { EmailPromptSchema } from "../schemas/emailGenerationSchema"; -import type { EmailPrompt } from "../types"; -import type { GeneratedEmailContent } from "../types"; - -interface EmailState extends State { - generatedEmail?: GeneratedEmailContent; - tone?: EmailPrompt['tone']; - format?: EmailPrompt['format']; - language?: string; -} - -export const generateEmailAction: Action = { - name: "generate_email", - description: "Generate an email based on user requirements. Use this when the user wants to compose or write an email, or when they provide content that should be formatted as an email.", - similes: ["write an email", "compose an email", "draft an email"], - examples: [ - [{ user: "user1", content: { text: "Can you write an email to my team about the project update?" } }], - [{ user: "user1", content: { text: "Draft a professional email about the upcoming meeting" } }] - ], - - async validate(_runtime: IAgentRuntime, message: Memory): Promise { - const content = message.content as Content; - const isValid = content?.text?.toLowerCase().includes('email') ?? false; - elizaLogger.info('Generate validation:', { - isValid, - messageId: message.id, - content: content?.text, - userId: message.userId - }); - return isValid; - }, - - async handler( - runtime: IAgentRuntime, - message: Memory, - state?: EmailState, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise { - try { - // Properly compose state first - state = (await runtime.composeState(message)) as EmailState; - - elizaLogger.info('Generate handler started', { - messageId: message.id, - hasState: !!state, - stateKeys: state ? Object.keys(state) : [], - content: message.content.text - }); - - const emailService = new EmailGenerationService(runtime); - const content = message.content as Content; - - const prompt: EmailPrompt = { - content: content.text, - tone: (state?.tone as EmailPrompt['tone']) || 'professional', - format: (state?.format as EmailPrompt['format']) || 'paragraph', - language: state?.language?.toString() || 'English' - }; - - elizaLogger.info('Generating email with prompt:', { prompt }); - - const generatedEmail = await emailService.generateEmail(prompt); - elizaLogger.info('Email generated:', { - hasSubject: !!generatedEmail.subject, - blockCount: generatedEmail.blocks.length, - metadata: generatedEmail.metadata - }); - - if (state) { - state.generatedEmail = { - subject: generatedEmail.subject, - blocks: generatedEmail.blocks, - metadata: generatedEmail.metadata - }; - - // Update state in memory system - await runtime.updateRecentMessageState(state); - - elizaLogger.info('State updated and persisted', { - stateKeys: Object.keys(state), - hasGeneratedEmail: !!state.generatedEmail, - emailSubject: generatedEmail.subject - }); - } else { - elizaLogger.warn('No state object available for storing email'); - } - - // Add preview message for Discord - if (callback) { - const preview = `📧 **Generated Email Preview** - -**Subject:** ${generatedEmail.subject} -**To:** [Recipient's email will be set when sending] -─────────────── - -${generatedEmail.blocks.map(block => { - switch(block.type) { - case 'heading': - return `## ${block.content}\n\n`; - case 'paragraph': - return `${block.content}\n\n`; - case 'bulletList': - return Array.isArray(block.content) - ? `${block.content.map(item => `• ${item}`).join('\n')}\n\n` - : `• ${block.content}\n\n`; - default: - return `${block.content}\n\n`; - } -}).join('')}`; - - // Simply send the preview without any buttons - callback({ - text: preview, - content: { - preview: true, - email: generatedEmail - } - }); - } - - elizaLogger.info('Email generation completed successfully'); - - // write email to state - state.generatedEmail = { - subject: generatedEmail.subject, - blocks: generatedEmail.blocks, - metadata: generatedEmail.metadata - }; - - // Update state again after modification - await runtime.updateRecentMessageState(state); - - return true; - } catch (error) { - elizaLogger.error('Failed to generate email:', error); - if (callback) { - callback({ - text: 'Failed to generate email. Please try again.', - content: { error: 'Generation failed' } - }); - } - return false; - } - } -}; \ No newline at end of file diff --git a/packages/plugin-email-automation/src/actions/sendEmailAction.ts b/packages/plugin-email-automation/src/actions/sendEmailAction.ts deleted file mode 100644 index 76b138a4ce723..0000000000000 --- a/packages/plugin-email-automation/src/actions/sendEmailAction.ts +++ /dev/null @@ -1,184 +0,0 @@ -import { type Action, type IAgentRuntime, type Memory, type State, type Content, elizaLogger, type HandlerCallback } from "@elizaos/core"; -import { EmailService } from "../services/emailService"; -import type { EmailPrompt, GeneratedEmailContent } from "../types"; -import { EmailGenerationService } from "../services/emailGenerationService"; - -// Define the state interface -interface EmailState extends State { - generatedEmail?: GeneratedEmailContent; -} - -export const sendEmailAction: Action = { - name: "send_email", - description: "Send an email using the configured email service", - similes: ["send email", "send the email", "deliver email"], - examples: [ - [{ user: "user1", content: { text: "Please send this email to the team" } }], - [{ user: "user1", content: { text: "Send the email to john@example.com" } }] - ], - - async validate(_runtime: IAgentRuntime, message: Memory): Promise { - const content = message.content as Content; - const text = content?.text?.toLowerCase() || ''; - - // Strip Discord mention if present - const cleanText = text.replace(/<@[0-9]+>\s*/, '').trim(); - - // Check for send command - const startsWithSend = /^(please\s+)?send(\s+an?)?\s+email/.test(cleanText); - const hasEmailAddress = /[\w.-]+@[\w.-]+\.\w+/.test(text); - - elizaLogger.info('Send validation:', { - originalText: text, - cleanText, - startsWithSend, - hasEmailAddress, - messageId: message.id, - userId: message.userId - }); - - return startsWithSend && hasEmailAddress; - }, - - async handler( - runtime: IAgentRuntime, - message: Memory, - state?: EmailState, - _options: { [key: string]: unknown } = {}, - callback?: HandlerCallback - ): Promise { - try { - elizaLogger.info('Handler invoked for sendEmailAction', { - messageId: message.id, - userId: message.userId - }); - - // Initialize or update state - let currentState = state; - if (!currentState) { - currentState = (await runtime.composeState(message)) as State; - } else { - currentState = await runtime.updateRecentMessageState(currentState); - } - - - elizaLogger.info('Send handler started', { - messageId: message.id, - hasState: !!currentState, - hasGeneratedEmail: !!currentState?.generatedEmail - }); - - // Check if we have a generated email - if (!currentState?.generatedEmail) { - elizaLogger.info('No email content found, generating first...'); - const emailService = new EmailGenerationService(runtime); - const content = message.content as Content; - - const prompt: EmailPrompt = { - content: content.text, - tone: 'professional', - format: 'paragraph', - language: 'English' - }; - - const generatedEmail = await emailService.generateEmail(prompt); - currentState.generatedEmail = { - subject: generatedEmail.subject, - blocks: generatedEmail.blocks, - metadata: generatedEmail.metadata - }; - - // Update state with new email - await runtime.updateRecentMessageState(currentState); - } - - // Get raw secrets string first - const secretsStr = runtime.getSetting('secrets'); - elizaLogger.debug('Got secrets configuration', { - hasSecrets: !!secretsStr - }); - - if (!secretsStr) { - elizaLogger.error('Secrets configuration not found'); - if (callback) { - callback({ - text: 'Email configuration not found.', - content: { error: 'Missing secrets' } - }); - } - return false; - } - - // Parse secrets string to object - const secrets = typeof secretsStr === 'string' ? JSON.parse(secretsStr) : secretsStr; - - // Extract email address from message - const emailMatch = message.content.text.match(/[\w.-]+@[\w.-]+\.\w+/); - elizaLogger.info('Extracted email address', { - hasMatch: !!emailMatch, - email: emailMatch ? emailMatch[0] : null - }); - - if (!emailMatch) { - elizaLogger.error('No valid email address found'); - if (callback) { - callback({ - text: 'Please provide a valid email address.', - content: { error: 'Invalid email' } - }); - } - return false; - } - - // Validate email content - if (!state?.generatedEmail) { - elizaLogger.error('No generated email content available', { - stateContent: state ? Object.keys(state) : [] - }); - if (callback) { - callback({ - text: 'Please generate an email first using the generate command.', - content: { error: 'No content' } - }); - } - return false; - } - - const emailService = new EmailService({ - RESEND_API_KEY: secrets.RESEND_API_KEY, - OWNER_EMAIL: secrets.OWNER_EMAIL || secrets.DEFAULT_FROM_EMAIL - }); - - elizaLogger.info('Sending email', { - to: emailMatch[0], - hasSubject: !!state.generatedEmail.subject, - blockCount: state.generatedEmail.blocks.length - }); - - await emailService.sendEmail(state.generatedEmail, { - from: secrets.OWNER_EMAIL || 'onboarding@resend.dev', - to: [emailMatch[0]] - }); - - elizaLogger.info('Email sent successfully'); - if (callback) { - callback({ - text: `Email sent successfully to ${emailMatch[0]}!`, - content: { success: true } - }); - } - - return true; - - } catch (error) { - elizaLogger.error('Failed to handle email action:', error); - if (callback) { - callback({ - text: 'Failed to send email. Please try again.', - content: { error: 'Send failed' } - }); - } - return false; - } - } -}; \ No newline at end of file diff --git a/packages/plugin-email-automation/src/evaluators/emailEvaluator.ts b/packages/plugin-email-automation/src/evaluators/emailEvaluator.ts deleted file mode 100644 index 11a3ec72dc622..0000000000000 --- a/packages/plugin-email-automation/src/evaluators/emailEvaluator.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { type Evaluator, type Memory, type IAgentRuntime, elizaLogger } from "@elizaos/core"; -import { EmailAutomationService } from "../services/emailAutomationService"; - -export const emailEvaluator: Evaluator = { - name: "EMAIL_AUTOMATION", - description: "Evaluates messages for potential email triggers", - similes: [ - "Checks if a message warrants sending an email", - "Monitors conversation for email-worthy interactions", - "Evaluates if email follow-up is needed" - ], - alwaysRun: true, - examples: [{ - context: "User expresses interest in business opportunity", - messages: [{ - user: "user123", - content: { - text: "I'd like to discuss a potential partnership" - } - }], - outcome: "Should trigger email automation" - }], - validate: async () => true, - handler: async (runtime: IAgentRuntime, message: Memory) => { - try { - const emailService = runtime.getService(EmailAutomationService.serviceType) as EmailAutomationService; - if (!emailService) { - elizaLogger.warn("📧 Email automation service not available"); - return; - } - - await emailService.evaluateMessage(message); - } catch (error) { - elizaLogger.error("📧 Error in email evaluator:", error); - } - } -}; \ No newline at end of file diff --git a/packages/plugin-email-automation/src/index.ts b/packages/plugin-email-automation/src/index.ts deleted file mode 100644 index 6e1da66ff222f..0000000000000 --- a/packages/plugin-email-automation/src/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { Plugin } from "@elizaos/core"; -import { EmailAutomationService } from "./services/emailAutomationService"; - -export const emailAutomationPlugin: Plugin = { - name: "email-automation", - description: "AI-powered email automation plugin for Eliza", - services: [new EmailAutomationService()], - clients: [], - evaluators: [], - providers: [], -}; - -export default emailAutomationPlugin; diff --git a/packages/plugin-email-automation/src/providers/errors.ts b/packages/plugin-email-automation/src/providers/errors.ts deleted file mode 100644 index 0ce37d12571b2..0000000000000 --- a/packages/plugin-email-automation/src/providers/errors.ts +++ /dev/null @@ -1,17 +0,0 @@ -export class EmailProviderError extends Error { - constructor( - public provider: string, - public originalError: unknown, - public context?: Record - ) { - super(`Error in ${provider} provider: ${originalError}`); - this.name = 'EmailProviderError'; - } -} - -// Export the factory function -export const createEmailProviderError = ( - provider: string, - error: unknown, - context?: Record -): EmailProviderError => new EmailProviderError(provider, error, context); \ No newline at end of file diff --git a/packages/plugin-email-automation/src/providers/resend.ts b/packages/plugin-email-automation/src/providers/resend.ts deleted file mode 100644 index bd3eb72ef3e9f..0000000000000 --- a/packages/plugin-email-automation/src/providers/resend.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { Resend } from "resend"; -import { elizaLogger } from "@elizaos/core"; -import type { - EmailOptions, - EmailResponse, - EmailProviderError, - EmailProviderResponse -} from "../types"; -import { createEmailProviderError } from './errors'; - -export class ResendProvider { - private client: Resend; - private readonly retryAttempts = 3; - private readonly retryDelay = 1000; // ms - - constructor(apiKey: string) { - this.client = new Resend(apiKey); - } - - async sendEmail(options: EmailOptions): Promise { - let lastError: Error | null = null; - - for (let attempt = 1; attempt <= this.retryAttempts; attempt++) { - try { - const response = await this.client.emails.send({ - from: options.from, - to: options.to, - subject: options.subject, - html: options.html || options.body, - text: options.text, - bcc: options.bcc, - cc: options.cc, - reply_to: options.replyTo, - headers: options.headers, - attachments: options.attachments, - tags: options.tags - }); - - if (!response.data?.id) { - throw new Error('Missing response data from Resend'); - } - - elizaLogger.debug('Email sent successfully', { - id: response.data.id, - attempt - }); - - return { - id: response.data.id, - provider: 'resend', - status: 'success', - timestamp: new Date() - }; - - } catch (error) { - lastError = error as Error; - elizaLogger.error(`Resend attempt ${attempt} failed:`, { - error, - options: { - to: options.to, - subject: options.subject - } - }); - - if (this.shouldRetry(error) && attempt < this.retryAttempts) { - await this.delay(attempt * this.retryDelay); - continue; - } - break; - } - } - - throw createEmailProviderError( - 'resend', - lastError as Error, - { - attempts: this.retryAttempts, - lastAttemptAt: new Date().toISOString() - } - ); - } - - private shouldRetry(error: unknown): boolean { - if (error instanceof Error) { - // Retry on network errors or rate limits - return error.message.includes('network') || - error.message.includes('rate limit') || - error.message.includes('timeout'); - } - return false; - } - - private delay(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); - } - - async validateConfig(): Promise { - try { - // Try to get account info or similar lightweight call - await this.client.emails.send({ - from: 'test@resend.dev', - to: 'validate@resend.dev', - subject: 'Configuration Test', - text: 'Testing configuration' - }); - return true; - } catch (error) { - if (error instanceof Error && - error.message.includes('unauthorized')) { - return false; - } - // Other errors might indicate valid config but other issues - return true; - } - } -} \ No newline at end of file diff --git a/packages/plugin-email-automation/src/schemas/emailGenerationSchema.ts b/packages/plugin-email-automation/src/schemas/emailGenerationSchema.ts deleted file mode 100644 index 11dd064235c26..0000000000000 --- a/packages/plugin-email-automation/src/schemas/emailGenerationSchema.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { z } from "zod"; - -// Define the block types we need for structure -export const EmailBlockTypeEnum = z.enum([ - 'paragraph', - 'bulletList', - 'heading', - 'callout', - 'signature', - 'banner' -]); - -// Make metadata more flexible -export const EmailMetadataSchema = z.object({ - tone: z.string().describe('The overall tone of the email'), - intent: z.string().describe('The primary purpose of the email'), - priority: z.enum(['low', 'medium', 'high']).describe('The priority level of the email'), - language: z.string().optional().describe('The language to use for the email') -}); - -export const EmailBlockSchema = z.object({ - type: EmailBlockTypeEnum, - content: z.union([z.string(), z.array(z.string())]), - metadata: z.object({ - style: z.string().optional(), - className: z.string().optional(), - importance: z.enum(['high', 'medium', 'low']).optional() - }).optional() -}); - -export const EmailPromptSchema = z.object({ - content: z.string(), - format: z.enum(['bullet', 'paragraph']).optional(), - tone: z.string().optional(), - language: z.string().optional(), - style: z.string().optional() -}); - -export const EmailGenerationSchema = z.object({ - name: z.literal('generateEmail'), - parameters: z.object({ - subject: z.string().min(1).max(100).describe('The email subject line'), - blocks: z.array(EmailBlockSchema).describe('The content blocks making up the email body'), - metadata: EmailMetadataSchema.describe('Metadata about the email') - }) -}); - -export const formatBlock = (block: EmailBlock): string => { - if (typeof block.content === 'string') { - return block.content; - } - - switch (block.type) { - case "paragraph": - case "heading": - case "callout": - case "banner": - return block.content.toString(); - case "bulletList": - return block.content.map(item => `• ${item}`).join("\n"); - case "signature": - return `\n--\n${block.content.join("\n")}`; - default: - return ""; - } -}; - -// Export types -export type EmailBlock = z.infer; -export type EmailMetadata = z.infer; -export type EmailGeneration = z.infer; -export type EmailPrompt = z.infer; diff --git a/packages/plugin-email-automation/src/services/__tests__/contextBuilder.test.ts b/packages/plugin-email-automation/src/services/__tests__/contextBuilder.test.ts deleted file mode 100644 index 96319bdf6cdeb..0000000000000 --- a/packages/plugin-email-automation/src/services/__tests__/contextBuilder.test.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { EmailAutomationService } from '../emailAutomationService'; - -describe('Context Builder', () => { - let service: EmailAutomationService; - - beforeEach(() => { - service = new EmailAutomationService(); - }); - - describe('State Composition', () => { - it('should handle missing state gracefully', async () => { - const mockRuntime = { - getSetting: vi.fn(), - composeState: vi.fn().mockResolvedValue(null) - } as any; - - await service.initialize(mockRuntime); - const context = await (service as any).buildContext({ - userId: 'test', - content: { text: 'message' } - }); - - expect(context.state).toBeNull(); - expect(context.metadata).toEqual({}); - }); - - it('should include conversation history', async () => { - const mockRuntime = { - getSetting: vi.fn(), - composeState: vi.fn().mockResolvedValue({ - previousMessages: [ - { content: { text: 'msg1' } }, - { content: { text: 'msg2' } } - ] - }) - } as any; - - await service.initialize(mockRuntime); - const context = await (service as any).buildContext({ - userId: 'test', - content: { text: 'current' } - }); - - expect(context.state.previousMessages).toHaveLength(2); - }); - }); - - describe('Metadata Handling', () => { - it('should merge metadata correctly', async () => { - const mockRuntime = { - getSetting: vi.fn(), - composeState: vi.fn().mockResolvedValue({ - metadata: { - source: 'chat', - priority: 'high' - } - }) - } as any; - - await service.initialize(mockRuntime); - const context = await (service as any).buildContext({ - userId: 'test', - content: { text: 'message' } - }); - - expect(context.metadata).toEqual({ - source: 'chat', - priority: 'high' - }); - }); - }); -}); \ No newline at end of file diff --git a/packages/plugin-email-automation/src/services/__tests__/emailAutomationService.test.ts b/packages/plugin-email-automation/src/services/__tests__/emailAutomationService.test.ts deleted file mode 100644 index 52e50062dd819..0000000000000 --- a/packages/plugin-email-automation/src/services/__tests__/emailAutomationService.test.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; -import { EmailAutomationService } from '../emailAutomationService'; -import { generateText } from '@elizaos/core'; - -// Mock the core generateText function -vi.mock('@elizaos/core', async () => ({ - ...await vi.importActual('@elizaos/core'), - generateText: vi.fn() -})); - -describe('EmailAutomationService', () => { - it('should detect partnership opportunities', async () => { - const mockRuntime = { - getSetting: vi.fn((key: string) => { - if (key === 'EMAIL_AUTOMATION_ENABLED') return 'true'; - if (key === 'RESEND_API_KEY') return 'test_key'; - if (key === 'DEFAULT_TO_EMAIL') return 'test@test.com'; - if (key === 'DEFAULT_FROM_EMAIL') return 'from@test.com'; - return null; - }), - composeState: vi.fn().mockResolvedValue({ - metadata: {}, - previousMessages: [] - }) - } as any; - - const service = new EmailAutomationService(); - await service.initialize(mockRuntime); - - // Mock the generateText response - (generateText as any).mockResolvedValueOnce('[EMAIL] Valid opportunity'); - - // Mock the email generation and sending - (service as any).handleEmailTrigger = vi.fn().mockResolvedValue(true); - - const result = await service.evaluateMessage({ - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { text: 'Partnership proposal with significant details' } - } as any); - - expect(generateText).toHaveBeenCalled(); - expect(result).toBe(true); - }); - - it('should ignore casual messages', async () => { - const mockRuntime = { - getSetting: vi.fn((key: string) => { - if (key === 'EMAIL_AUTOMATION_ENABLED') return 'true'; - if (key === 'RESEND_API_KEY') return 'test_key'; - if (key === 'DEFAULT_TO_EMAIL') return 'test@test.com'; - if (key === 'DEFAULT_FROM_EMAIL') return 'from@test.com'; - return null; - }), - composeState: vi.fn().mockResolvedValue({ - metadata: {}, - previousMessages: [] - }) - } as any; - - const service = new EmailAutomationService(); - await service.initialize(mockRuntime); - (generateText as any).mockResolvedValueOnce('[SKIP] General chat'); - - const result = await service.evaluateMessage({ - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { text: 'gm' } - } as any); - expect(result).toBe(false); - }); - - it('should handle missing settings gracefully', async () => { - const mockRuntime = { - getSetting: vi.fn((key: string) => { - if (key === 'EMAIL_AUTOMATION_ENABLED') return 'true'; - return null; - }), - composeState: vi.fn() - } as any; - - const service = new EmailAutomationService(); - await service.initialize(mockRuntime); - - await expect(service.evaluateMessage({ - content: { text: 'test' } - } as any)).rejects.toThrow('Missing required email configuration'); - }); - - it('should build context correctly', async () => { - const mockRuntime = { - getSetting: vi.fn((key: string) => { - if (key === 'EMAIL_AUTOMATION_ENABLED') return 'true'; - if (key === 'RESEND_API_KEY') return 'test_key'; - if (key === 'DEFAULT_TO_EMAIL') return 'test@test.com'; - if (key === 'DEFAULT_FROM_EMAIL') return 'from@test.com'; - return null; - }), - composeState: vi.fn().mockResolvedValue({ - metadata: { test: true }, - previousMessages: ['msg1', 'msg2'] - }) - } as any; - - const service = new EmailAutomationService(); - await service.initialize(mockRuntime); - (generateText as any).mockResolvedValueOnce('[EMAIL] Test'); - - await service.evaluateMessage({ - userId: 'test-user', - agentId: 'test-agent', - roomId: 'test-room', - content: { text: 'Test message' } - } as any); - - expect(mockRuntime.composeState).toHaveBeenCalledWith( - expect.objectContaining({ - content: { text: 'Test message' } - }) - ); - }); - - it('should use custom prompt when provided', async () => { - const customPrompt = 'Custom evaluation prompt'; - const mockRuntime = { - getSetting: vi.fn((key: string) => { - if (key === 'EMAIL_AUTOMATION_ENABLED') return 'true'; - if (key === 'RESEND_API_KEY') return 'test_key'; - if (key === 'DEFAULT_TO_EMAIL') return 'test@test.com'; - if (key === 'DEFAULT_FROM_EMAIL') return 'from@test.com'; - if (key === 'EMAIL_EVALUATION_PROMPT') return customPrompt; - return null; - }), - composeState: vi.fn().mockResolvedValue({ - metadata: {}, - previousMessages: [] - }) - } as any; - - const service = new EmailAutomationService(); - await service.initialize(mockRuntime); - (generateText as any).mockResolvedValueOnce('[EMAIL] Test'); - - await service.evaluateMessage({ - content: { text: 'Test message' } - } as any); - - expect(generateText).toHaveBeenCalledWith( - expect.objectContaining({ - context: expect.stringContaining(customPrompt) - }) - ); - }); -}); \ No newline at end of file diff --git a/packages/plugin-email-automation/src/services/__tests__/emailGenerationService.test.ts b/packages/plugin-email-automation/src/services/__tests__/emailGenerationService.test.ts deleted file mode 100644 index 0a188272986ef..0000000000000 --- a/packages/plugin-email-automation/src/services/__tests__/emailGenerationService.test.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { describe, expect, it, vi, beforeEach } from 'vitest'; -import { IAgentRuntime, ModelClass, elizaLogger, generateObject } from '@elizaos/core'; -import { EmailGenerationService } from '../emailGenerationService'; -import { EmailGenerationSchema } from '../../schemas/emailGenerationSchema'; - -// Mock the generateObject function -vi.mock('@elizaos/core', async () => { - const actual = await vi.importActual('@elizaos/core'); - return { - ...actual, - generateObject: vi.fn(), - elizaLogger: { - debug: vi.fn(), - error: vi.fn() - } - }; -}); - -describe('EmailGenerationService', () => { - let service: EmailGenerationService; - let mockRuntime: IAgentRuntime; - - beforeEach(() => { - mockRuntime = { - // Minimal mock implementation - } as unknown as IAgentRuntime; - - service = new EmailGenerationService(mockRuntime); - vi.clearAllMocks(); - }); - - it('should generate structured email content', async () => { - const mockEmailContent = { - subject: 'Test Subject', - blocks: [{ - type: 'paragraph', - content: 'Test content', - metadata: {} - }], - metadata: { - tone: 'professional', - intent: 'inform', - priority: 'medium' - } - }; - - (generateObject as ReturnType).mockResolvedValueOnce({ - object: { - name: 'generateEmail', - parameters: mockEmailContent - } - }); - - const result = await service.generateEmail({ - content: 'Write a test email', - tone: 'professional' - }); - - expect(generateObject).toHaveBeenCalledWith(expect.objectContaining({ - runtime: mockRuntime, - context: expect.any(String), - modelClass: ModelClass.LARGE, - schema: EmailGenerationSchema, - schemaName: 'generateEmail', - schemaDescription: "Generate a structured email" - })); - - expect(result).toEqual(mockEmailContent); - }); - - it('should handle AI generation errors', async () => { - const testError = new Error('AI generation failed'); - (generateObject as ReturnType).mockRejectedValueOnce(testError); - - await expect(async () => { - await service.generateEmail({ content: 'test' }); - }).rejects.toThrow('AI generation failed'); - - expect(elizaLogger.error).toHaveBeenCalled(); - }); - - it('should validate input options', async () => { - await expect(async () => { - await service.generateEmail({ content: '' }); - }).rejects.toThrow(); - }); -}); \ No newline at end of file diff --git a/packages/plugin-email-automation/src/services/__tests__/emailParser.test.ts b/packages/plugin-email-automation/src/services/__tests__/emailParser.test.ts deleted file mode 100644 index 2b5cb667f9877..0000000000000 --- a/packages/plugin-email-automation/src/services/__tests__/emailParser.test.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { EmailAutomationService } from '../emailAutomationService'; - -describe('Email Parser', () => { - let service: EmailAutomationService; - - beforeEach(() => { - service = new EmailAutomationService(); - }); - - describe('Section Parsing', () => { - it('should handle missing sections gracefully', () => { - const result = (service as any).parseFormattedEmail(` - Subject: Test Email - - Background: - This is a test. - `); - - expect(result).toEqual({ - subject: 'Test Email', - background: 'This is a test.', - keyPoints: [], - nextSteps: [] - }); - }); - - it('should parse complex technical details', () => { - const result = (service as any).parseFormattedEmail(` - Subject: Technical Discussion - - Background: - Project overview. - - Technical Details: - • Architecture: Microservices - • Stack: Node.js, TypeScript - • Database: PostgreSQL - - Next Steps: - 1. Review architecture - 2. Schedule follow-up - `); - - expect(result.technicalDetails).toHaveLength(3); - expect(result.technicalDetails[0]).toContain('Architecture'); - expect(result.nextSteps).toHaveLength(2); - }); - - it('should handle malformed input', () => { - // Mock the parseFormattedEmail method to throw an error - vi.spyOn(service as any, 'parseFormattedEmail').mockImplementation(() => { - throw new Error('Failed to parse email format'); - }); - - expect(() => (service as any).parseFormattedEmail('Invalid format')) - .toThrow('Failed to parse email format'); - }); - }); - - describe('Content Validation', () => { - it('should validate required sections', () => { - const result = (service as any).parseFormattedEmail(` - Subject: Test - - Background: - Test background. - - Key Points: - • Point 1 - • Point 2 - `); - - expect(result.subject).toBe('Test'); - expect(result.background).toBeTruthy(); - expect(result.keyPoints).toHaveLength(2); - }); - }); -}); \ No newline at end of file diff --git a/packages/plugin-email-automation/src/services/__tests__/emailService.test.ts b/packages/plugin-email-automation/src/services/__tests__/emailService.test.ts deleted file mode 100644 index 01c917ee66c79..0000000000000 --- a/packages/plugin-email-automation/src/services/__tests__/emailService.test.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { describe, expect, it, vi, beforeEach, afterEach, Mock } from 'vitest'; -import { EmailService } from '../emailService'; -import { elizaLogger } from '@elizaos/core'; -import { ResendProvider } from '../../providers/resend'; -import { EmailTemplateManager } from '../emailTemplateManager'; - -// Mock the providers and dependencies -vi.mock('../../providers/resend', () => ({ - ResendProvider: vi.fn().mockImplementation(() => ({ - sendEmail: vi.fn().mockImplementation(() => Promise.resolve({ - id: 'test_id', - provider: 'resend', - status: 'success', - timestamp: new Date() - })) - })) -})); -vi.mock('../emailTemplateManager'); -vi.mock('@elizaos/core', () => ({ - elizaLogger: { - debug: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -describe('EmailService', () => { - let service: EmailService; - let mockProvider: { sendEmail: Mock }; - - beforeEach(() => { - // Reset all mocks before each test - vi.clearAllMocks(); - - service = new EmailService({ - RESEND_API_KEY: 'test_key', - OWNER_EMAIL: 'test@example.com' - }); - - // Get the mock provider instance - mockProvider = (service as any).provider; - }); - - afterEach(() => { - vi.unstubAllEnvs(); - }); - - describe('Email Sending', () => { - it('should send email successfully', async () => { - mockProvider.sendEmail.mockResolvedValueOnce({ - id: 'test_id', - provider: 'resend', - status: 'success', - timestamp: new Date() - }); - - const result = await service.sendEmail({ - subject: 'Test Email', - blocks: [{ type: 'paragraph', content: 'Test content' }], - metadata: { - tone: 'professional', - intent: 'inform', - priority: 'medium' - } - }, { - to: 'recipient@example.com', - from: 'test@example.com' - }); - - expect(result).toEqual({ - id: 'test_id', - provider: 'resend', - status: 'success', - timestamp: expect.any(Date) - }); - }); - - it('should handle multiple recipients', async () => { - mockProvider.sendEmail.mockResolvedValueOnce({ - id: 'test_id', - provider: 'resend', - status: 'success', - timestamp: new Date() - }); - - const result = await service.sendEmail({ - subject: 'Test Email', - blocks: [{ type: 'paragraph', content: 'Test content' }], - metadata: { - tone: 'professional', - intent: 'inform', - priority: 'medium' - } - }, { - to: ['recipient1@example.com', 'recipient2@example.com'], - from: '' - }); - - expect(result).toEqual({ - id: 'test_id', - provider: 'resend', - status: 'success', - timestamp: expect.any(Date) - }); - }); - - it('should handle custom headers and tags', async () => { - mockProvider.sendEmail.mockResolvedValueOnce({ - id: 'test_id', - provider: 'resend', - status: 'success', - timestamp: new Date() - }); - - const result = await service.sendEmail({ - subject: 'Test Email', - blocks: [{ type: 'paragraph', content: 'Test content' }], - metadata: { - tone: 'professional', - intent: 'inform', - priority: 'medium' - } - }, { - to: 'recipient@example.com', - headers: { 'X-Custom': 'value' }, - tags: [{ name: 'category', value: 'test' }], - from: '' - }); - - expect(result).toEqual({ - id: 'test_id', - provider: 'resend', - status: 'success', - timestamp: expect.any(Date) - }); - }); - }); -}); \ No newline at end of file diff --git a/packages/plugin-email-automation/src/services/__tests__/emailTemplateManager.test.ts b/packages/plugin-email-automation/src/services/__tests__/emailTemplateManager.test.ts deleted file mode 100644 index 7578b172c7b2b..0000000000000 --- a/packages/plugin-email-automation/src/services/__tests__/emailTemplateManager.test.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { describe, it, expect, beforeEach } from 'vitest'; -import { EmailTemplateManager } from '../emailTemplateManager'; -import { EmailTemplate, EmailBlock } from '../../types'; - -describe('EmailTemplateManager', () => { - let manager: EmailTemplateManager; - - beforeEach(() => { - manager = new EmailTemplateManager(); - }); - - describe('Template Management', () => { - it('should provide default template', () => { - const template = manager.getTemplate('default'); - expect(template).toBeDefined(); - expect(template.id).toBe('default'); - expect(template.html).toContain('email-container'); - }); - - it('should provide notification template', () => { - const template = manager.getTemplate('notification'); - expect(template).toBeDefined(); - expect(template.id).toBe('notification'); - expect(template.html).toContain('notification-header'); - }); - - it('should fall back to default template for unknown templates', () => { - const template = manager.getTemplate('nonexistent'); - expect(template.id).toBe('default'); - }); - - it('should register custom template', () => { - const customTemplate: EmailTemplate = { - id: 'custom', - name: 'Custom Template', - html: '
    {{content}}
    ', - variables: ['content'], - defaultStyle: { default: '' } - }; - - manager.registerTemplate(customTemplate); - const retrieved = manager.getTemplate('custom'); - expect(retrieved).toEqual(customTemplate); - }); - - it('should reject invalid template registration', () => { - expect(() => manager.registerTemplate({ - id: '', - name: 'Invalid', - html: '', - variables: [], - defaultStyle: { default: '' } - })).toThrow('Invalid template: missing required fields (id, html, variables)'); - }); - }); - - describe('Template Rendering', () => { - it('should format blocks correctly', () => { - const rendered = manager.renderBlock({ - type: 'paragraph', - content: 'Test content' - }); - - expect(rendered).toContain(''); - }); - - it('should format bullet lists correctly', () => { - const rendered = manager.renderBlock({ - type: 'bulletList', - content: ['Item 1', 'Item 2'] - }); - - expect(rendered).toContain('