-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathagent.py
105 lines (85 loc) · 3.29 KB
/
agent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import os
import json
import asyncio
from typing import List, Dict
from dotenv import load_dotenv
import nltk
from rich.console import Console
from enum import Enum
from datetime import datetime
from agent.core import Agent, AgentConfig
from tools.base import BaseTool
from formatters.pretty_output import PrettyFormatter
from utils.logger import AgentLogger
# Load environment variables
load_dotenv()
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
elif isinstance(obj, Enum):
return obj.value
return super().default(obj)
def initialize_nltk():
# ...existing NLTK initialization code...
pass
async def process_tasks(agent: Agent, tasks: List[str]) -> List[Dict]:
"""Process multiple tasks using the agent"""
return await asyncio.gather(*[agent.process_task(task) for task in tasks])
def main(task_file_path: str, output_file_path: str):
# Verify environment variables are loaded
if not os.getenv("SERPER_API_KEY"):
print("\nEnvironment Variable Check:")
print(f"SERPER_API_KEY: {'✓ Set' if os.getenv('SERPER_API_KEY') else '✗ Missing'}")
print("\nPlease ensure your .env file contains SERPER_API_KEY")
sys.exit(1)
# Initialize NLTK silently
initialize_nltk()
# Import and initialize tools
from tools.google_search import AdaptiveSearchTool
from tools.web_scraper import WebScraperTool
from tools.code_tools import CodeGeneratorTool, CodeAnalysisTool
from tools.dataset_tool import DatasetTool
from tools.content_tools import ContentGeneratorTool
tools = {
"google_search": AdaptiveSearchTool(), # Add Google Search Tool
"web_scraper": WebScraperTool(),
"code_analysis": CodeAnalysisTool(),
"code_generator": CodeGeneratorTool(),
"dataset": DatasetTool(),
"content_generator": ContentGeneratorTool() # Add content generator
}
# Initialize agent with updated config parameters
config = AgentConfig(
max_steps=10,
min_confidence=0.7,
timeout=300,
learning_enabled=True,
memory_path="agent_memory.db",
parallel_execution=True,
planning_enabled=True,
pattern_learning_enabled=True
)
agent = Agent(tools=tools, config=config)
# Read tasks
with open(task_file_path, 'r') as f:
tasks = [line.strip() for line in f.readlines() if line.strip()]
# Process tasks and format results
results = asyncio.run(process_tasks(agent, tasks))
# Output formatting
console = Console()
formatter = PrettyFormatter()
console.print("\n[bold]🤖 Web Research Agent Results[/bold]\n")
for task, result in zip(tasks, results):
formatter.format_task_result(task, result)
console.print("\n" + "-" * 80 + "\n")
# Save results
with open(output_file_path, 'w') as f:
json.dump(results, f, indent=2, cls=EnhancedJSONEncoder)
console.print(f"\n[dim]Full results saved to: {output_file_path}[/dim]")
if __name__ == "__main__":
import sys
if len(sys.argv) != 3:
print("Usage: python agent.py <task_file_path> <output_file_path>")
sys.exit(1)
main(sys.argv[1], sys.argv[2])