feat(hypotheses): switch LLM analysis from Anthropic to Gemini

Uses google-genai SDK with gemini-2.5-flash-lite — same model already
used by the discovery pipeline, so no new secret needed (GOOGLE_API_KEY).
Removed ANTHROPIC_API_KEY from hypothesis-runner.yml.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Youssef Aitousarrah 2026-04-12 17:36:03 -07:00
parent 3dbcb3fa5b
commit 43fb186d0e
2 changed files with 10 additions and 12 deletions

View File

@ -49,7 +49,6 @@ jobs:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
FINNHUB_API_KEY: ${{ secrets.FINNHUB_API_KEY }}
ALPHA_VANTAGE_API_KEY: ${{ secrets.ALPHA_VANTAGE_API_KEY }}
FMP_API_KEY: ${{ secrets.FMP_API_KEY }}

View File

@ -175,22 +175,22 @@ def run_hypothesis(hyp: dict) -> bool:
def llm_analysis(hyp: dict, conclusion: dict, scanner_domain: str) -> Optional[str]:
"""
Ask Claude to interpret the experiment results and provide richer context.
Ask Gemini to interpret the experiment results and provide richer context.
Returns a markdown string to embed in the PR comment, or None if the API
call fails or ANTHROPIC_API_KEY is not set.
call fails or GOOGLE_API_KEY is not set.
The LLM does NOT override the programmatic decision it adds nuance:
sample-size caveats, market-condition context, follow-up hypotheses.
"""
api_key = os.environ.get("ANTHROPIC_API_KEY")
api_key = os.environ.get("GOOGLE_API_KEY")
if not api_key:
return None
try:
import anthropic
from google import genai
except ImportError:
print(" anthropic SDK not installed, skipping LLM analysis", flush=True)
print(" google-genai SDK not installed, skipping LLM analysis", flush=True)
return None
hyp_metrics = conclusion["hypothesis"]
@ -230,13 +230,12 @@ Provide a concise analysis (35 sentences) covering:
Be direct. Do not restate the numbers interpret them. Do not recommend merging or closing the PR."""
try:
client = anthropic.Anthropic(api_key=api_key)
message = client.messages.create(
model="claude-haiku-4-5-20251001",
max_tokens=512,
messages=[{"role": "user", "content": prompt}],
client = genai.Client(api_key=api_key)
response = client.models.generate_content(
model="gemini-2.5-flash-lite",
contents=prompt,
)
return message.content[0].text.strip()
return response.text.strip()
except Exception as e:
print(f" LLM analysis failed: {e}", flush=True)
return None