add frontend

This commit is contained in:
Marvin Gabler 2025-10-21 19:51:29 +02:00
parent 2b37cddd1d
commit b6275e731d
3 changed files with 114 additions and 7 deletions

View File

@ -0,0 +1,65 @@
# Bug Fix: ChromaDB Collection Name Collision
## Issue Description
When running multiple analyses through the API (either concurrently or sequentially), the system would fail with:
```
chromadb.errors.InternalError: Collection [bull_memory] already exists
```
### Root Cause
The `TradingAgentsGraph` class was creating ChromaDB memory collections with **hardcoded names**:
- `bull_memory`
- `bear_memory`
- `trader_memory`
- `invest_judge_memory`
- `risk_manager_memory`
When multiple analyses ran (even for different tickers), they all tried to create collections with the same names, causing ChromaDB to reject duplicate collection creation.
**Location of the bug:**
- `tradingagents/graph/trading_graph.py` lines 90-94
- `tradingagents/agents/utils/memory.py` line 14
## Solution Implemented
### Changes Made
1. **Modified `TradingAgentsGraph.__init__`** (`tradingagents/graph/trading_graph.py`):
- Added optional `analysis_id` parameter
- Collection names now include the analysis ID as a suffix: `bull_memory_{analysis_id}`
- When `analysis_id` is None, collections use original names (backward compatibility)
2. **Modified `state_manager.py`** (`api/state_manager.py`):
- Pass the unique `analysis_id` when creating `TradingAgentsGraph`
- Added cleanup in `finally` block to delete collections after analysis completes
3. **Added cleanup method** (`tradingagents/graph/trading_graph.py`):
- New `cleanup_memories()` method to delete ChromaDB collections
- Called after each analysis (success or failure) to prevent memory leaks
- Prevents accumulation of old collections in the database
### Backward Compatibility
The fix is **fully backward compatible**:
- CLI usage (`cli/main.py`) - continues to work without `analysis_id`
- Standalone usage (`main.py`) - continues to work without `analysis_id`
- API usage - now provides unique `analysis_id` for isolation
## Testing Recommendations
1. **Test concurrent analyses**: Run multiple analyses simultaneously for the same or different tickers
2. **Test sequential analyses**: Run multiple analyses one after another for the same ticker
3. **Test failure scenarios**: Ensure collections are cleaned up even when analysis fails
4. **Test CLI**: Verify CLI still works without regression
## Benefits
✅ Multiple analyses can now run concurrently without conflicts
✅ Same ticker can be analyzed multiple times without errors
✅ Memory collections are properly cleaned up after each analysis
✅ No breaking changes to existing code
✅ Prevents ChromaDB from accumulating stale collections

View File

@ -258,11 +258,12 @@ class AnalysisExecutor:
self._update_status(analysis_id, status="running", progress=0)
logger.info(f"Analysis {analysis_id}: Initializing trading graph...")
# Initialize the graph
# Initialize the graph with unique analysis_id for memory isolation
graph = TradingAgentsGraph(
selected_analysts=selected_analysts,
config=config,
debug=False,
analysis_id=analysis_id,
)
# Create initial state
@ -380,6 +381,14 @@ class AnalysisExecutor:
analysis_id, status="failed", error_message=error_msg
)
self._store_log(analysis_id, "System", f"Error: {error_msg}\n\nTraceback:\n{error_trace}")
finally:
# Clean up ChromaDB collections to prevent memory leaks
try:
if 'graph' in locals():
graph.cleanup_memories()
logger.info(f"Analysis {analysis_id}: Cleaned up memory collections")
except Exception as cleanup_error:
logger.warning(f"Analysis {analysis_id}: Failed to cleanup memories: {cleanup_error}")
def _get_agent_order(self, selected_analysts: List[str]) -> List[str]:
"""Get the order of agents for progress tracking."""

View File

@ -53,6 +53,7 @@ class TradingAgentsGraph:
selected_analysts=["market", "social", "news", "fundamentals"],
debug=False,
config: Dict[str, Any] = None,
analysis_id: Optional[str] = None,
):
"""Initialize the trading agents graph and components.
@ -60,9 +61,11 @@ class TradingAgentsGraph:
selected_analysts: List of analyst types to include
debug: Whether to run in debug mode
config: Configuration dictionary. If None, uses default config
analysis_id: Optional unique identifier for this analysis (makes memory collections unique)
"""
self.debug = debug
self.config = config or DEFAULT_CONFIG
self.analysis_id = analysis_id
# Update the interface's config
set_config(self.config)
@ -86,12 +89,14 @@ class TradingAgentsGraph:
else:
raise ValueError(f"Unsupported LLM provider: {self.config['llm_provider']}")
# Initialize memories
self.bull_memory = FinancialSituationMemory("bull_memory", self.config)
self.bear_memory = FinancialSituationMemory("bear_memory", self.config)
self.trader_memory = FinancialSituationMemory("trader_memory", self.config)
self.invest_judge_memory = FinancialSituationMemory("invest_judge_memory", self.config)
self.risk_manager_memory = FinancialSituationMemory("risk_manager_memory", self.config)
# Initialize memories with unique names per analysis
# This prevents "Collection already exists" errors when running multiple analyses
memory_suffix = f"_{analysis_id}" if analysis_id else ""
self.bull_memory = FinancialSituationMemory(f"bull_memory{memory_suffix}", self.config)
self.bear_memory = FinancialSituationMemory(f"bear_memory{memory_suffix}", self.config)
self.trader_memory = FinancialSituationMemory(f"trader_memory{memory_suffix}", self.config)
self.invest_judge_memory = FinancialSituationMemory(f"invest_judge_memory{memory_suffix}", self.config)
self.risk_manager_memory = FinancialSituationMemory(f"risk_manager_memory{memory_suffix}", self.config)
# Create tool nodes
self.tool_nodes = self._create_tool_nodes()
@ -240,3 +245,31 @@ class TradingAgentsGraph:
def process_signal(self, full_signal):
"""Process a signal to extract the core decision."""
return self.signal_processor.process_signal(full_signal)
def cleanup_memories(self):
"""Clean up ChromaDB collections for this analysis to prevent memory leaks."""
if not self.analysis_id:
return # Only cleanup if we have a specific analysis_id
try:
memory_suffix = f"_{self.analysis_id}"
collections_to_delete = [
f"bull_memory{memory_suffix}",
f"bear_memory{memory_suffix}",
f"trader_memory{memory_suffix}",
f"invest_judge_memory{memory_suffix}",
f"risk_manager_memory{memory_suffix}",
]
# Get the chroma client from one of the memories
chroma_client = self.bull_memory.chroma_client
for collection_name in collections_to_delete:
try:
chroma_client.delete_collection(name=collection_name)
except Exception as e:
# Ignore errors if collection doesn't exist
pass
except Exception as e:
# Don't fail the analysis if cleanup fails
print(f"Warning: Failed to cleanup memory collections: {e}")