commit
454d1b9d3c
|
|
@ -1,2 +1,5 @@
|
|||
ALPHA_VANTAGE_API_KEY=alpha_vantage_api_key_placeholder
|
||||
OPENAI_API_KEY=openai_api_key_placeholder
|
||||
OPENAI_API_KEY=openai_api_key_placeholder
|
||||
TELEGRAM_API_ID=telegram_api_placeholder
|
||||
TELEGRAM_API_HASH=telegram_api_hash_placeholder
|
||||
TELEGRAM_SESSION_NAME=telegram_session_name_placeholder
|
||||
|
|
@ -16,7 +16,7 @@ def create_news_analyst(llm):
|
|||
]
|
||||
|
||||
system_message = (
|
||||
"You are a news researcher tasked with analyzing recent news and trends over the past week. Please write a comprehensive report of the current state of the world that is relevant for trading and macroeconomics. Use the available tools: get_news(query, start_date, end_date) for company-specific or targeted news searches, and get_global_news(curr_date, look_back_days, limit) for broader macroeconomic news. Do not simply state the trends are mixed, provide detailed and finegrained analysis and insights that may help traders make decisions."
|
||||
"You are a news researcher tasked with analyzing recent news and trends over the past week. Please write a comprehensive report of the current state of the world that is relevant for trading and macroeconomics. Use the available tools: get_news(query, start_date, end_date) for crypto-specific or targeted news searches, and get_global_news(curr_date, look_back_days, limit) for broader macroeconomic news. Do not simply state the trends are mixed, provide detailed and finegrained analysis and insights that may help traders make decisions."
|
||||
+ """ Make sure to append a Markdown table at the end of the report to organize key points in the report, organized and easy to read."""
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from typing import Annotated
|
|||
from .local import get_YFin_data, get_finnhub_news, get_finnhub_company_insider_sentiment, get_finnhub_company_insider_transactions, get_simfin_balance_sheet, get_simfin_cashflow, get_simfin_income_statements, get_reddit_global_news, get_reddit_company_news
|
||||
from .y_finance import get_YFin_data_online, get_stock_stats_indicators_window, get_balance_sheet as get_yfinance_balance_sheet, get_cashflow as get_yfinance_cashflow, get_income_statement as get_yfinance_income_statement, get_insider_transactions as get_yfinance_insider_transactions
|
||||
from .google import get_google_news
|
||||
from .openai import get_stock_news_openai, get_global_news_openai, get_fundamentals_openai
|
||||
from .openai import get_crypto_news_openai, get_global_news_openai, get_fundamentals_openai
|
||||
from .alpha_vantage import (
|
||||
get_stock as get_alpha_vantage_stock,
|
||||
get_indicator as get_alpha_vantage_indicator,
|
||||
|
|
@ -16,6 +16,7 @@ from .alpha_vantage import (
|
|||
get_news as get_alpha_vantage_news
|
||||
)
|
||||
from .alpha_vantage_common import AlphaVantageRateLimitError
|
||||
from .telegram import get_crypto_news_telegram
|
||||
|
||||
# Configuration and routing logic
|
||||
from .config import get_config
|
||||
|
|
@ -98,13 +99,14 @@ VENDOR_METHODS = {
|
|||
# news_data
|
||||
"get_news": {
|
||||
"alpha_vantage": get_alpha_vantage_news,
|
||||
"openai": get_stock_news_openai,
|
||||
"openai": get_crypto_news_openai,
|
||||
"google": get_google_news,
|
||||
"local": [get_finnhub_news, get_reddit_company_news, get_google_news],
|
||||
# "local": [get_finnhub_news, get_reddit_company_news, get_google_news],
|
||||
},
|
||||
"get_global_news": {
|
||||
"openai": get_global_news_openai,
|
||||
"local": get_reddit_global_news
|
||||
"telegram": get_crypto_news_telegram,
|
||||
# "local": get_reddit_global_news
|
||||
},
|
||||
"get_insider_sentiment": {
|
||||
"local": get_finnhub_company_insider_sentiment
|
||||
|
|
|
|||
|
|
@ -36,6 +36,39 @@ def get_stock_news_openai(query, start_date, end_date):
|
|||
|
||||
return response.output[1].content[0].text
|
||||
|
||||
def get_crypto_news_openai(query, start_date, end_date):
|
||||
config = get_config()
|
||||
client = OpenAI(base_url=config["backend_url"])
|
||||
|
||||
response = client.responses.create(
|
||||
model=config["quick_think_llm"],
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": [
|
||||
{
|
||||
"type": "input_text",
|
||||
"text": f"Can you search News for {query} from {start_date} to {end_date}? Make sure you only get the data posted during that period.",
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
text={"format": {"type": "text"}},
|
||||
reasoning={},
|
||||
tools=[
|
||||
{
|
||||
"type": "web_search_preview",
|
||||
"user_location": {"type": "approximate"},
|
||||
"search_context_size": "low",
|
||||
}
|
||||
],
|
||||
temperature=1,
|
||||
max_output_tokens=4096,
|
||||
top_p=1,
|
||||
store=True,
|
||||
)
|
||||
|
||||
return response.output[1].content[0].text
|
||||
|
||||
def get_global_news_openai(curr_date, look_back_days=7, limit=5):
|
||||
config = get_config()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,58 @@
|
|||
import asyncio
|
||||
from telethon import TelegramClient
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import os
|
||||
|
||||
def get_api_credentials():
|
||||
api_id = int(os.getenv("TELEGRAM_API_ID", ""))
|
||||
api_hash = os.getenv("TELEGRAM_API_HASH", "")
|
||||
session_name = os.getenv("TELEGRAM_SESSION_NAME", "")
|
||||
return api_id, api_hash, session_name
|
||||
|
||||
async def _get_channel_history_async(start_date_str, end_date_str):
|
||||
"""
|
||||
The internal async logic that does the actual work.
|
||||
"""
|
||||
|
||||
username = "WatcherGuru"
|
||||
|
||||
api_id, api_hash, session_name = get_api_credentials()
|
||||
|
||||
# 1. Start the client using 'async with'
|
||||
# This automatically handles connecting AND disconnecting (releasing the DB lock)
|
||||
async with TelegramClient(session_name, api_id, api_hash) as client:
|
||||
|
||||
# Date parsing logic
|
||||
start_date = datetime.strptime(start_date_str, '%Y-%m-%d').replace(tzinfo=timezone.utc)
|
||||
end_date_obj = datetime.strptime(end_date_str, '%Y-%m-%d').replace(tzinfo=timezone.utc)
|
||||
lookback_days = (end_date_obj - start_date).days
|
||||
end_date = end_date_obj + timedelta(days=1) - timedelta(seconds=1)
|
||||
|
||||
formatted_log = ""
|
||||
|
||||
# Fetching messages
|
||||
n_records = 0
|
||||
async for message in client.iter_messages(username, offset_date=end_date, reverse=False):
|
||||
if message.date < start_date:
|
||||
break
|
||||
|
||||
if message.text:
|
||||
date_str = message.date.strftime('%Y-%m-%d')
|
||||
clean_text = message.text.replace('\n', ' ')
|
||||
formatted_log += f"[{date_str}] {clean_text}\n"
|
||||
n_records += 1
|
||||
|
||||
intro = f"# News data from Telegram channel @{username} from {start_date_str} to {end_date_str} ({lookback_days} days):\n# Total records: {n_records}\n# Data retrieved on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n"
|
||||
|
||||
return intro + formatted_log
|
||||
|
||||
def get_crypto_news_telegram(curr_date, look_back_days=7, limit=100):
|
||||
# ignore limit for now
|
||||
# convert curr_date from yyyy-mm-dd to datetime
|
||||
curr_date = datetime.strptime(curr_date, '%Y-%m-%d')
|
||||
start_date = curr_date - timedelta(days=look_back_days)
|
||||
end_date = curr_date
|
||||
|
||||
start_date_str = start_date.strftime('%Y-%m-%d')
|
||||
end_date_str = end_date.strftime('%Y-%m-%d')
|
||||
return asyncio.run(_get_channel_history_async(start_date_str, end_date_str))
|
||||
Loading…
Reference in New Issue