Skip to content

Commit

Permalink
Merge branch 'main' into feat/af-get-cid-local
Browse files Browse the repository at this point in the history
  • Loading branch information
Xm0onh authored Jan 10, 2025
2 parents bbf9cc2 + 93972b9 commit 3ac15ae
Show file tree
Hide file tree
Showing 9 changed files with 131 additions and 55 deletions.
16 changes: 0 additions & 16 deletions auto-agents-framework/.env.sample
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,10 @@
TWITTER_USERNAME=<twitter_username>
TWITTER_PASSWORD=<twitter_password>

# Twitter data fetch and post configuration
NUM_TIMELINE_TWEETS=10
NUM_FOLLOWING_RECENT_TWEETS=10
NUM_RANDOM_FOLLOWERS=5
MAX_MENTIONS=20
MAX_THREAD_LENGTH=20
MAX_MY_RECENT_TWEETS=10
MAX_MY_RECENT_REPLIES=10
POST_TWEETS=false
RESPONSE_INTERVAL_MINUTES=26
POST_INTERVAL_MINUTES=30

# LLM Configuration
OPENAI_API_KEY=<openai_api_key>
ANTHROPIC_API_KEY=<anthropic_api_key>
LLAMA_API_URL=<llama_api_url>
# Config the models and sizes in src/config/llm.ts

# AutoDrive Configuration
AUTO_DRIVE_API_KEY=<auto_drive_api_key>
Expand All @@ -36,8 +23,5 @@ SERPAPI_API_KEY=<serpapi_api_key>
# Environment
NODE_ENV=<node_env>

# Retry Limit
RETRY_LIMIT=<retry_limit>

# Agent Version
AGENT_VERSION=<agent_version>
5 changes: 4 additions & 1 deletion auto-agents-framework/.gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
characters/
!characters/character.example.ts
*.yaml
!config/config.example.yaml
.cookies/
dsn-kol-schemas.json
memories/
memories/

66 changes: 60 additions & 6 deletions auto-agents-framework/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,73 @@ Auto-Agents-Framework is an experimental framework for building AI agents that c
## Getting Started

1. Install dependencies:
`yarn install
`
`yarn install`

2. Copy the environment file and configure your credentials:
`cp .env.sample .env`

3. Configure your `.env` file with required credentials:
``env

```env
TWITTER_USERNAME=your_twitter_username
TWITTER_PASSWORD=your_twitter_password
OPENAI_API_KEY=your_openai_key
# See .env.sample for other configuration options
``
See .env.sample for other configuration options
```

4. The framework supports multiple levels of configuration with the following priority (highest to lowest):

1. Environment variables (`.env` file)
2. YAML configuration (`config/config.yaml`)
3. Default values in code

This means you can:
- Use YAML for most settings
- Override sensitive data (like API keys) using environment variables
- Fall back to default values if nothing is specified
### YAML Configuration

1. Copy the example configuration file:
```bash
cp config/config.example.yaml config/config.yaml
```

2. Customize the settings in `config/config.yaml`:
```yaml
twitter:
NUM_TIMELINE_TWEETS: 10
NUM_FOLLOWING_RECENT_TWEETS: 10
NUM_RANDOM_FOLLOWERS: 5
MAX_MENTIONS: 20
MAX_THREAD_LENGTH: 20
MAX_MY_RECENT_TWEETS: 10
MAX_MY_RECENT_REPLIES: 10
RESPONSE_INTERVAL_MS: 3600000 # 1 hour
POST_INTERVAL_MS: 5400000 # 1.5 hours
POST_TWEETS: false

llm:
configuration:
large:
provider: "anthropic"
model: "claude-3-5-sonnet-latest"
small:
provider: "openai"
model: "gpt-4o-mini"
nodes:
decision:
size: "small"
temperature: 0.2
analyze:
size: "large"
temperature: 0.5
generation:
size: "large"
temperature: 0.8
response:
size: "small"
temperature: 0.8
```
## Character System
Expand Down Expand Up @@ -156,4 +210,4 @@ Monitor the agent's activity in the console and configured log files.

## License

MIT
MIT
34 changes: 34 additions & 0 deletions auto-agents-framework/config/config.example.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
twitter:
NUM_TIMELINE_TWEETS: 10
NUM_FOLLOWING_RECENT_TWEETS: 10
NUM_RANDOM_FOLLOWERS: 5
MAX_MENTIONS: 20
MAX_THREAD_LENGTH: 20
MAX_MY_RECENT_TWEETS: 10
MAX_MY_RECENT_REPLIES: 10
RESPONSE_INTERVAL_MS: 3600000
POST_INTERVAL_MS: 5400000
POST_TWEETS: false

llm:
configuration:
large:
provider: "anthropic"
model: "claude-3-5-sonnet-latest"
small:
provider: "openai"
model: "gpt-4o-mini"

nodes:
decision:
size: "small"
temperature: 0.2
analyze:
size: "large"
temperature: 0.5
generation:
size: "large"
temperature: 0.8
response:
size: "small"
temperature: 0.8
47 changes: 19 additions & 28 deletions auto-agents-framework/src/config/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@ import { configSchema } from './schema.js';
import path from 'path';
import { fileURLToPath } from 'url';
import { mkdir } from 'fs/promises';
import { llmConfig } from './llm.js';
import { llmDefaultConfig } from './llm.js';
import { twitterDefaultConfig } from './twitter.js';
import yaml from 'yaml';
import { readFileSync } from 'fs';

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
Expand All @@ -31,6 +34,17 @@ function formatZodError(error: z.ZodError) {

export const agentVersion = process.env.AGENT_VERSION || '1.0.0';

const yamlConfig = (() => {
try {
const configPath = path.join(workspaceRoot, 'config', 'config.yaml');
const fileContents = readFileSync(configPath, 'utf8');
return yaml.parse(fileContents);
} catch (error) {
console.info('No YAML config found, falling back to environment variables');
return {};
}
})();

export const config = (() => {
try {
const username = process.env.TWITTER_USERNAME || '';
Expand All @@ -41,34 +55,12 @@ export const config = (() => {
USERNAME: username,
PASSWORD: process.env.TWITTER_PASSWORD || '',
COOKIES_PATH: cookiesPath,
NUM_TIMELINE_TWEETS: Number(process.env.NUM_TIMELINE_TWEETS) || 10,
NUM_FOLLOWING_RECENT_TWEETS: Number(process.env.NUM_FOLLOWING_RECENT_TWEETS) || 10,
NUM_RANDOM_FOLLOWERS: Number(process.env.NUM_RANDOM_FOLLOWERS) || 5,
MAX_MENTIONS: Number(process.env.MAX_MENTIONS) || 5,
MAX_THREAD_LENGTH: Number(process.env.MAX_THREAD_LENGTH) || 20,
MAX_MY_RECENT_TWEETS: Number(process.env.MAX_MY_RECENT_TWEETS) || 10,
MAX_MY_RECENT_REPLIES: Number(process.env.MAX_MY_RECENT_REPLIES) || 10,
POST_TWEETS: process.env.POST_TWEETS === 'true',
RESPONSE_INTERVAL_MS: (Number(process.env.RESPONSE_INTERVAL_MINUTES) || 60) * 60 * 1000,
POST_INTERVAL_MS: (Number(process.env.POST_INTERVAL_MINUTES) || 90) * 60 * 1000,
...twitterDefaultConfig,
...(yamlConfig.twitter || {}),
},
llmConfig: {
configuration: {
large: {
provider: llmConfig.configuration.large.provider,
model: llmConfig.configuration.large.model,
},
small: {
provider: llmConfig.configuration.small.provider,
model: llmConfig.configuration.small.model,
},
},
nodes: {
decision: llmConfig.nodes.decision,
analyze: llmConfig.nodes.analyze,
generation: llmConfig.nodes.generation,
response: llmConfig.nodes.response,
},
...llmDefaultConfig,
...(yamlConfig.llm || {}),
OPENAI_API_KEY: process.env.OPENAI_API_KEY || '',
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY || '',
LLAMA_API_URL: process.env.LLAMA_API_URL || '',
Expand All @@ -85,7 +77,6 @@ export const config = (() => {
},
SERPAPI_API_KEY: process.env.SERPAPI_API_KEY || '',
NODE_ENV: process.env.NODE_ENV || 'development',
RETRY_LIMIT: Number(process.env.RETRY_LIMIT) || 2,
};

return configSchema.parse(rawConfig);
Expand Down
2 changes: 1 addition & 1 deletion auto-agents-framework/src/config/llm.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { LLMNodeConfiguration, LLMSize, LLMProvider, llmModels } from '../services/llm/types.js';

export const llmConfig = {
export const llmDefaultConfig = {
configuration: {
large: {
provider: LLMProvider.ANTHROPIC,
Expand Down
1 change: 0 additions & 1 deletion auto-agents-framework/src/config/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,5 +135,4 @@ export const configSchema = z.object({
blockchainConfig: blockchainConfigSchema,
SERPAPI_API_KEY: SERPAPI_API_KEY,
NODE_ENV: z.enum(['development', 'production', 'test']),
RETRY_LIMIT: z.number().int().nonnegative(),
});
12 changes: 12 additions & 0 deletions auto-agents-framework/src/config/twitter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
export const twitterDefaultConfig = {
NUM_TIMELINE_TWEETS: 10,
NUM_FOLLOWING_RECENT_TWEETS: 10,
NUM_RANDOM_FOLLOWERS: 5,
MAX_MENTIONS: 20,
MAX_THREAD_LENGTH: 20,
MAX_MY_RECENT_TWEETS: 10,
MAX_MY_RECENT_REPLIES: 10,
RESPONSE_INTERVAL_MS: 60 * 60 * 1000,
POST_INTERVAL_MS: 90 * 60 * 1000,
POST_TWEETS: false,
};
3 changes: 1 addition & 2 deletions auto-agents-framework/src/services/llm/factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@ import { ChatOpenAI } from '@langchain/openai';
import { ChatAnthropic } from '@langchain/anthropic';
import { ChatOllama } from '@langchain/ollama';
import { LLMProvider, LLMConfiguration, LLMNodeConfiguration } from './types.js';
import { llmConfig } from '../../config/llm.js';
import { config as appConfig } from '../../config/index.js';

export class LLMFactory {
static createModel(node: LLMNodeConfiguration) {
const cfg = llmConfig.configuration[node.size];
const cfg = appConfig.llmConfig.configuration[node.size];
return this.createModelFromConfig(cfg, node.temperature);
}

Expand Down

0 comments on commit 3ac15ae

Please sign in to comment.