use 'quick_think_llm' instead of hard coded 'gpt-4.1-mini' for model in response
This commit is contained in:
parent
6321331d3a
commit
28eda0e11f
|
|
@ -707,7 +707,7 @@ def get_stock_news_openai(ticker, curr_date):
|
|||
client = OpenAI(base_url=config["backend_url"])
|
||||
|
||||
response = client.responses.create(
|
||||
model=config["shallow_thinker"],
|
||||
model=config["quick_think_llm"],
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
|
|
@ -742,7 +742,7 @@ def get_global_news_openai(curr_date):
|
|||
client = OpenAI(base_url=config["backend_url"])
|
||||
|
||||
response = client.responses.create(
|
||||
model=config["shallow_thinker"],
|
||||
model=config["quick_think_llm"],
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
|
|
@ -777,7 +777,7 @@ def get_fundamentals_openai(ticker, curr_date):
|
|||
client = OpenAI(base_url=config["backend_url"])
|
||||
|
||||
response = client.responses.create(
|
||||
model=config["shallow_thinker"],
|
||||
model=config["quick_think_llm"],
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
|
|
|
|||
Loading…
Reference in New Issue