[add] log update

This commit is contained in:
kimheesu 2025-07-07 16:53:04 +09:00
parent eb5a95c74b
commit 036b0f64ad
7 changed files with 280 additions and 1 deletions

3
backend/.gitignore vendored
View File

@ -1,2 +1,3 @@
.env
wallet/
wallet/
alembic.ini

81
backend/alembic/env.py Normal file
View File

@ -0,0 +1,81 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
import sys
import os
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Add your model's MetaData object here for 'autogenerate' support
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from sqlmodel import SQLModel
from analysis.infra.db_models.analysis import Analysis
from member.infra.db_models.member import Member
target_metadata = SQLModel.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,106 @@
"""Change TEXT columns to LONGTEXT for analysis reports
Revision ID: 001
Revises:
Create Date: 2025-07-07 16:30:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.mysql import LONGTEXT
# revision identifiers, used by Alembic.
revision: str = '001'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Change TEXT columns to LONGTEXT for better capacity."""
# Change all report columns from TEXT to LONGTEXT
op.alter_column('analyses', 'market_report',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
op.alter_column('analyses', 'sentiment_report',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
op.alter_column('analyses', 'news_report',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
op.alter_column('analyses', 'fundamentals_report',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
op.alter_column('analyses', 'trader_investment_plan',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
op.alter_column('analyses', 'final_trade_decision',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
op.alter_column('analyses', 'final_report',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
op.alter_column('analyses', 'error_message',
existing_type=sa.TEXT(),
type_=LONGTEXT(),
existing_nullable=True)
def downgrade() -> None:
"""Revert LONGTEXT columns back to TEXT."""
# Revert all report columns from LONGTEXT back to TEXT
op.alter_column('analyses', 'market_report',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('analyses', 'sentiment_report',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('analyses', 'news_report',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('analyses', 'fundamentals_report',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('analyses', 'trader_investment_plan',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('analyses', 'final_trade_decision',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('analyses', 'final_report',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('analyses', 'error_message',
existing_type=LONGTEXT(),
type_=sa.TEXT(),
existing_nullable=True)

View File

@ -241,38 +241,63 @@ class AnalysisService:
async def _process_analysis_chunk(self, analysis_id: str, chunk: dict):
"""분석 중간 결과를 처리하고 저장하는 메서드"""
logger.info(f"🔍 청크 키 확인: {list(chunk.keys())}")
updates = {}
# 개별 분석가 보고서 업데이트
if "market_report" in chunk and chunk["market_report"]:
logger.info("✅ market_report 업데이트")
updates["market_report"] = chunk["market_report"]
elif "market_report" in chunk:
logger.info(f"⚠️ market_report 존재하지만 값이 비어있음: {repr(chunk['market_report'])}")
if "sentiment_report" in chunk and chunk["sentiment_report"]:
logger.info("✅ sentiment_report 업데이트")
updates["sentiment_report"] = chunk["sentiment_report"]
elif "sentiment_report" in chunk:
logger.info(f"⚠️ sentiment_report 존재하지만 값이 비어있음: {repr(chunk['sentiment_report'])}")
if "news_report" in chunk and chunk["news_report"]:
logger.info("✅ news_report 업데이트")
updates["news_report"] = chunk["news_report"]
elif "news_report" in chunk:
logger.info(f"⚠️ news_report 존재하지만 값이 비어있음: {repr(chunk['news_report'])}")
if "fundamentals_report" in chunk and chunk["fundamentals_report"]:
logger.info("✅ fundamentals_report 업데이트")
updates["fundamentals_report"] = chunk["fundamentals_report"]
elif "fundamentals_report" in chunk:
logger.info(f"⚠️ fundamentals_report 존재하지만 값이 비어있음: {repr(chunk['fundamentals_report'])}")
# 팀별 의사결정 과정 업데이트
if "investment_debate_state" in chunk and chunk["investment_debate_state"]:
logger.info("✅ investment_debate_state 업데이트")
updates["investment_debate_state"] = chunk["investment_debate_state"]
elif "investment_debate_state" in chunk:
logger.info(f"⚠️ investment_debate_state 존재하지만 값이 비어있음: {repr(chunk['investment_debate_state'])}")
if "trader_investment_plan" in chunk and chunk["trader_investment_plan"]:
logger.info("✅ trader_investment_plan 업데이트")
updates["trader_investment_plan"] = chunk["trader_investment_plan"]
elif "trader_investment_plan" in chunk:
logger.info(f"⚠️ trader_investment_plan 존재하지만 값이 비어있음: {repr(chunk['trader_investment_plan'])}")
if "risk_debate_state" in chunk and chunk["risk_debate_state"]:
logger.info("✅ risk_debate_state 업데이트")
updates["risk_debate_state"] = chunk["risk_debate_state"]
elif "risk_debate_state" in chunk:
logger.info(f"⚠️ risk_debate_state 존재하지만 값이 비어있음: {repr(chunk['risk_debate_state'])}")
# 업데이트가 있는 경우 저장
if updates:
logger.info(f"💾 업데이트할 필드들: {list(updates.keys())}")
# analysis_id를 포함한 AnalysisVO 객체 생성
updates["id"] = analysis_id
updates_vo = AnalysisVO(**updates)
self.analysis_repo.update(updates_vo)
self.session.commit()
else:
logger.info("❌ 업데이트할 데이터가 없음")
def _generate_final_report(self, final_state: dict) -> str:
"""최종 통합 보고서를 생성하는 메서드"""

View File

@ -65,6 +65,7 @@ six==1.17.0
sniffio==1.3.1
SQLAlchemy==2.0.41
sqlmodel==0.0.24
alembic==1.14.0
starlette==0.46.2
typer==0.16.0
typing-inspection==0.4.1

39
backend/run_migration.py Normal file
View File

@ -0,0 +1,39 @@
#!/usr/bin/env python3
"""
데이터베이스 마이그레이션 실행 스크립트
"""
import subprocess
import sys
import os
def run_migration():
"""마이그레이션을 실행합니다."""
try:
# 현재 디렉토리를 backend로 변경
backend_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(backend_dir)
print("🔄 데이터베이스 마이그레이션을 실행합니다...")
# Alembic upgrade 명령 실행
result = subprocess.run([
sys.executable, "-m", "alembic", "upgrade", "head"
], capture_output=True, text=True)
if result.returncode == 0:
print("✅ 마이그레이션이 성공적으로 완료되었습니다!")
print(result.stdout)
else:
print("❌ 마이그레이션 실행 중 오류가 발생했습니다:")
print(result.stderr)
return False
except Exception as e:
print(f"❌ 마이그레이션 실행 중 예외가 발생했습니다: {e}")
return False
return True
if __name__ == "__main__":
success = run_migration()
sys.exit(0 if success else 1)