- **Bootstrap IoC-based architecture with modular services.**

- **Implement `MlxModelService` for local LLM backend.**
- **Introduce `DatabaseService` for MySQL integration.**
- **Add `HistoryService` to manage conversation context.**
- **Set up CLI interface via `CliUiService`.**
- **Establish EventBus for token streaming.**
- **Include conversation repository for data persistence.**
- **Add environment-based configuration management.**
- **Draft IoC architectural plan.**
This commit is contained in:
sal
2026-04-25 01:14:37 +09:00
parent 3b087116c0
commit cd41e9e33e
26 changed files with 693 additions and 4 deletions
View File
View File
+35
View File
@@ -0,0 +1,35 @@
from services.model.base import AbstractModelService
from services.chat.history_service import HistoryService
from services.events.event_bus import EventBus
class ChatService:
"""대화 오케스트레이션 서비스."""
EVENT_TOKEN = "stream.token"
EVENT_END = "stream.end"
def __init__(
self,
model: AbstractModelService,
history: HistoryService,
event_bus: EventBus,
max_tokens: int,
):
self._model = model
self._history = history
self._event_bus = event_bus
self._max_tokens = max_tokens
def respond(self, user_input: str) -> str:
self._history.add("user", user_input)
prompt = self._model.build_prompt(self._history.get())
response_text = ""
for token in self._model.stream(prompt, self._max_tokens):
self._event_bus.publish(self.EVENT_TOKEN, token)
response_text += token
self._event_bus.publish(self.EVENT_END)
self._history.add("assistant", response_text)
return response_text
+18
View File
@@ -0,0 +1,18 @@
from services.model.base import AbstractModelService
class CompactService:
"""오래된 대화 턴을 LLM으로 요약하는 서비스."""
def __init__(self, model: AbstractModelService, max_tokens: int = 512):
self._model = model
self._max_tokens = max_tokens
def summarize(self, turns: list[dict]) -> str:
text = "\n".join(f"{t['role']}: {t['content']}" for t in turns)
prompt_history = [
{"role": "system", "content": "당신은 대화 요약 전문가입니다."},
{"role": "user", "content": f"다음 대화의 핵심 내용을 한국어로 간결하게 요약해주세요:\n\n{text}"},
]
prompt = self._model.build_prompt(prompt_history)
return "".join(self._model.stream(prompt, self._max_tokens))
+81
View File
@@ -0,0 +1,81 @@
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from services.db.conversation_repository import ConversationRepository
from services.chat.compact_service import CompactService
class HistoryService:
"""대화 히스토리를 관리하는 서비스."""
def __init__(
self,
system_prompt: str,
max_turns: int,
compact_threshold: int,
repository: ConversationRepository | None = None,
compact_service: CompactService | None = None,
):
self._system_prompt = system_prompt
self._max_turns = max_turns
self._compact_threshold = compact_threshold
self._repository = repository
self._compact_service = compact_service
self._summary: str | None = None
self._turns: list[dict] = []
self._conversation_id: int | None = None
if repository:
self._load_or_create()
# ── DB 초기화 ────────────────────────────────────────────────
def _load_or_create(self) -> None:
conv_id = self._repository.get_latest_conversation_id()
if conv_id:
summary_id, summary = self._repository.get_latest_summary(conv_id)
turns = self._repository.load_turns_after(
conv_id, summary_id, self._compact_threshold * 2
)
self._summary = summary
self._turns = turns
self._conversation_id = conv_id
else:
self._conversation_id = self._repository.create_conversation()
# ── 공개 인터페이스 ───────────────────────────────────────────
def add(self, role: str, content: str) -> None:
self._turns.append({"role": role, "content": content})
if self._repository and self._conversation_id:
self._repository.save_message(self._conversation_id, role, content)
if role == "assistant":
self._maybe_compact()
def get(self) -> list[dict]:
msgs = [{"role": "system", "content": self._system_prompt}]
if self._summary:
msgs.append({"role": "system", "content": f"[이전 대화 요약]\n{self._summary}"})
msgs.extend(self._turns)
return msgs
def reset(self, new_conversation_id: int) -> None:
self._summary = None
self._turns = []
self._conversation_id = new_conversation_id
# ── 내부 ─────────────────────────────────────────────────────
def _maybe_compact(self) -> None:
if not self._compact_service or len(self._turns) <= self._compact_threshold:
return
mid = len(self._turns) // 2
old_turns, self._turns = self._turns[:mid], self._turns[mid:]
print("\n[대화 내용을 압축하는 중...]\n", flush=True)
self._summary = self._compact_service.summarize(old_turns)
if self._repository and self._conversation_id:
self._repository.save_summary(self._conversation_id, self._summary)
View File
+64
View File
@@ -0,0 +1,64 @@
from __future__ import annotations
from services.db.mysql_service import DatabaseService
class ConversationRepository:
"""td_conversations / td_messages 테이블 접근을 담당하는 Repository."""
def __init__(self, db: DatabaseService):
self._db = db
def create_conversation(self) -> int:
return self._db.execute_write(
"INSERT INTO td_conversations () VALUES ()"
)
def get_latest_conversation_id(self) -> int | None:
rows = self._db.execute(
"SELECT id FROM td_conversations ORDER BY created_at DESC LIMIT 1"
)
return rows[0]["id"] if rows else None
def save_message(self, conversation_id: int, role: str, content: str) -> None:
self._db.execute_write(
"INSERT INTO td_messages (conversation_id, role, content) VALUES (%s, %s, %s)",
(conversation_id, role, content),
)
def save_summary(self, conversation_id: int, summary: str) -> None:
self._db.execute_write(
"INSERT INTO td_messages (conversation_id, role, content) VALUES (%s, %s, %s)",
(conversation_id, "summary", summary),
)
def get_latest_summary(self, conversation_id: int) -> tuple[int | None, str | None]:
"""가장 최근 요약 메시지의 (id, content)를 반환. 없으면 (None, None)."""
rows = self._db.execute(
"""SELECT id, content FROM td_messages
WHERE conversation_id = %s AND role = 'summary'
ORDER BY created_at DESC LIMIT 1""",
(conversation_id,),
)
if rows:
return rows[0]["id"], rows[0]["content"]
return None, None
def load_turns_after(
self, conversation_id: int, after_id: int | None, limit: int
) -> list[dict]:
"""요약 이후의 user/assistant 턴을 최근 limit개 반환."""
if after_id is not None:
rows = self._db.execute(
"""SELECT role, content FROM td_messages
WHERE conversation_id = %s AND id > %s AND role IN ('user', 'assistant')
ORDER BY created_at DESC LIMIT %s""",
(conversation_id, after_id, limit),
)
else:
rows = self._db.execute(
"""SELECT role, content FROM td_messages
WHERE conversation_id = %s AND role IN ('user', 'assistant')
ORDER BY created_at DESC LIMIT %s""",
(conversation_id, limit),
)
return list(reversed(rows))
+63
View File
@@ -0,0 +1,63 @@
from __future__ import annotations
from typing import Any
class DatabaseService:
"""MySQL 연결을 캡슐화하는 서비스. 미설정 시 graceful skip."""
def __init__(self, host: str, port: int, db: str, user: str, password: str):
self._config = dict(host=host, port=port, db=db, user=user, passwd=password)
self._conn = None
def connect(self) -> None:
if not self._config["user"]:
return
try:
import pymysql
self._conn = pymysql.connect(**self._config)
except Exception as e:
print(f"[DB] 연결 실패 (선택적 기능): {e}")
def execute(self, sql: str, params: tuple = ()) -> list[dict[str, Any]]:
if self._conn is None:
return []
cursor = self._conn.cursor()
cursor.execute(sql, params)
columns = [d[0] for d in cursor.description or []]
return [dict(zip(columns, row)) for row in cursor.fetchall()]
def execute_write(self, sql: str, params: tuple = ()) -> int:
"""INSERT/UPDATE/DELETE 실행 후 lastrowid 반환."""
if self._conn is None:
return 0
cursor = self._conn.cursor()
cursor.execute(sql, params)
self._conn.commit()
return cursor.lastrowid
def init_schema(self) -> None:
if self._conn is None:
return
cursor = self._conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS td_conversations (
id INT AUTO_INCREMENT PRIMARY KEY,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS td_messages (
id INT AUTO_INCREMENT PRIMARY KEY,
conversation_id INT NOT NULL,
role VARCHAR(20) NOT NULL,
content TEXT NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (conversation_id) REFERENCES td_conversations(id)
)
""")
self._conn.commit()
def close(self) -> None:
if self._conn:
self._conn.close()
self._conn = None
View File
+19
View File
@@ -0,0 +1,19 @@
from collections import defaultdict
from typing import Callable
class EventBus:
"""Observer 패턴 기반 이벤트 버스."""
def __init__(self):
self._handlers: dict[str, list[Callable]] = defaultdict(list)
def subscribe(self, event: str, handler: Callable) -> None:
self._handlers[event].append(handler)
def unsubscribe(self, event: str, handler: Callable) -> None:
self._handlers[event].remove(handler)
def publish(self, event: str, *args, **kwargs) -> None:
for handler in self._handlers[event]:
handler(*args, **kwargs)
+15
View File
@@ -0,0 +1,15 @@
import sys
class StreamTokenHandler:
"""스트림 토큰을 stdout에 실시간 출력하는 핸들러."""
def __call__(self, token: str) -> None:
print(token, end="", flush=True)
class StreamEndHandler:
"""스트림 종료 시 개행을 출력하는 핸들러."""
def __call__(self) -> None:
print("\n")
View File
+18
View File
@@ -0,0 +1,18 @@
from abc import ABC, abstractmethod
from typing import Iterator
class AbstractModelService(ABC):
"""LLM 백엔드 Strategy 인터페이스."""
@abstractmethod
def load(self) -> None:
"""모델을 메모리에 로드한다."""
@abstractmethod
def stream(self, prompt: str, max_tokens: int) -> Iterator[str]:
"""프롬프트를 받아 토큰을 스트리밍한다."""
@abstractmethod
def build_prompt(self, history: list[dict]) -> str:
"""대화 히스토리를 모델 입력 형식으로 변환한다."""
+29
View File
@@ -0,0 +1,29 @@
from typing import Iterator
from services.model.base import AbstractModelService
class MlxModelService(AbstractModelService):
"""MLX 기반 로컬 LLM Strategy 구현체."""
def __init__(self, model_id: str):
self._model_id = model_id
self._model = None
self._tokenizer = None
def load(self) -> None:
from mlx_lm import load
print(f"모델 로딩 중: {self._model_id}")
self._model, self._tokenizer = load(self._model_id)
def build_prompt(self, history: list[dict]) -> str:
return self._tokenizer.apply_chat_template(
history,
tokenize=False,
add_generation_prompt=True,
)
def stream(self, prompt: str, max_tokens: int) -> Iterator[str]:
from mlx_lm import stream_generate
for chunk in stream_generate(self._model, self._tokenizer, prompt=prompt, max_tokens=max_tokens):
yield chunk.text
View File
+23
View File
@@ -0,0 +1,23 @@
class CliUiService:
"""CLI 입출력 서비스."""
def show_banner(self, model_id: str) -> None:
print(f"모델 로딩 중: {model_id}")
print("(첫 실행 시 HuggingFace에서 자동 다운로드됩니다. 약 4.5GB)\n")
print("=" * 50)
print("육아 & 금융 상담 챗봇 시작!")
print("종료: '종료' / 'quit' / 'exit' 입력")
print("초기화: 'reset' 또는 'clear' 입력")
print("=" * 50 + "\n")
def prompt_user(self) -> str:
return input("나: ").strip()
def show_assistant_prefix(self) -> None:
print("\n도우미: ", end="", flush=True)
def is_exit_command(self, text: str) -> bool:
return text.lower() in ("종료", "quit", "exit")
def is_reset_command(self, text: str) -> bool:
return text.lower() in ("reset", "clear", "초기화")