- **Bootstrap IoC-based architecture with modular services.**
- **Implement `MlxModelService` for local LLM backend.** - **Introduce `DatabaseService` for MySQL integration.** - **Add `HistoryService` to manage conversation context.** - **Set up CLI interface via `CliUiService`.** - **Establish EventBus for token streaming.** - **Include conversation repository for data persistence.** - **Add environment-based configuration management.** - **Draft IoC architectural plan.**
This commit is contained in:
@@ -0,0 +1,35 @@
|
||||
from services.model.base import AbstractModelService
|
||||
from services.chat.history_service import HistoryService
|
||||
from services.events.event_bus import EventBus
|
||||
|
||||
|
||||
class ChatService:
|
||||
"""대화 오케스트레이션 서비스."""
|
||||
|
||||
EVENT_TOKEN = "stream.token"
|
||||
EVENT_END = "stream.end"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: AbstractModelService,
|
||||
history: HistoryService,
|
||||
event_bus: EventBus,
|
||||
max_tokens: int,
|
||||
):
|
||||
self._model = model
|
||||
self._history = history
|
||||
self._event_bus = event_bus
|
||||
self._max_tokens = max_tokens
|
||||
|
||||
def respond(self, user_input: str) -> str:
|
||||
self._history.add("user", user_input)
|
||||
prompt = self._model.build_prompt(self._history.get())
|
||||
|
||||
response_text = ""
|
||||
for token in self._model.stream(prompt, self._max_tokens):
|
||||
self._event_bus.publish(self.EVENT_TOKEN, token)
|
||||
response_text += token
|
||||
|
||||
self._event_bus.publish(self.EVENT_END)
|
||||
self._history.add("assistant", response_text)
|
||||
return response_text
|
||||
@@ -0,0 +1,18 @@
|
||||
from services.model.base import AbstractModelService
|
||||
|
||||
|
||||
class CompactService:
|
||||
"""오래된 대화 턴을 LLM으로 요약하는 서비스."""
|
||||
|
||||
def __init__(self, model: AbstractModelService, max_tokens: int = 512):
|
||||
self._model = model
|
||||
self._max_tokens = max_tokens
|
||||
|
||||
def summarize(self, turns: list[dict]) -> str:
|
||||
text = "\n".join(f"{t['role']}: {t['content']}" for t in turns)
|
||||
prompt_history = [
|
||||
{"role": "system", "content": "당신은 대화 요약 전문가입니다."},
|
||||
{"role": "user", "content": f"다음 대화의 핵심 내용을 한국어로 간결하게 요약해주세요:\n\n{text}"},
|
||||
]
|
||||
prompt = self._model.build_prompt(prompt_history)
|
||||
return "".join(self._model.stream(prompt, self._max_tokens))
|
||||
@@ -0,0 +1,81 @@
|
||||
from __future__ import annotations
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from services.db.conversation_repository import ConversationRepository
|
||||
from services.chat.compact_service import CompactService
|
||||
|
||||
|
||||
class HistoryService:
|
||||
"""대화 히스토리를 관리하는 서비스."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
system_prompt: str,
|
||||
max_turns: int,
|
||||
compact_threshold: int,
|
||||
repository: ConversationRepository | None = None,
|
||||
compact_service: CompactService | None = None,
|
||||
):
|
||||
self._system_prompt = system_prompt
|
||||
self._max_turns = max_turns
|
||||
self._compact_threshold = compact_threshold
|
||||
self._repository = repository
|
||||
self._compact_service = compact_service
|
||||
self._summary: str | None = None
|
||||
self._turns: list[dict] = []
|
||||
self._conversation_id: int | None = None
|
||||
|
||||
if repository:
|
||||
self._load_or_create()
|
||||
|
||||
# ── DB 초기화 ────────────────────────────────────────────────
|
||||
|
||||
def _load_or_create(self) -> None:
|
||||
conv_id = self._repository.get_latest_conversation_id()
|
||||
if conv_id:
|
||||
summary_id, summary = self._repository.get_latest_summary(conv_id)
|
||||
turns = self._repository.load_turns_after(
|
||||
conv_id, summary_id, self._compact_threshold * 2
|
||||
)
|
||||
self._summary = summary
|
||||
self._turns = turns
|
||||
self._conversation_id = conv_id
|
||||
else:
|
||||
self._conversation_id = self._repository.create_conversation()
|
||||
|
||||
# ── 공개 인터페이스 ───────────────────────────────────────────
|
||||
|
||||
def add(self, role: str, content: str) -> None:
|
||||
self._turns.append({"role": role, "content": content})
|
||||
if self._repository and self._conversation_id:
|
||||
self._repository.save_message(self._conversation_id, role, content)
|
||||
if role == "assistant":
|
||||
self._maybe_compact()
|
||||
|
||||
def get(self) -> list[dict]:
|
||||
msgs = [{"role": "system", "content": self._system_prompt}]
|
||||
if self._summary:
|
||||
msgs.append({"role": "system", "content": f"[이전 대화 요약]\n{self._summary}"})
|
||||
msgs.extend(self._turns)
|
||||
return msgs
|
||||
|
||||
def reset(self, new_conversation_id: int) -> None:
|
||||
self._summary = None
|
||||
self._turns = []
|
||||
self._conversation_id = new_conversation_id
|
||||
|
||||
# ── 내부 ─────────────────────────────────────────────────────
|
||||
|
||||
def _maybe_compact(self) -> None:
|
||||
if not self._compact_service or len(self._turns) <= self._compact_threshold:
|
||||
return
|
||||
|
||||
mid = len(self._turns) // 2
|
||||
old_turns, self._turns = self._turns[:mid], self._turns[mid:]
|
||||
|
||||
print("\n[대화 내용을 압축하는 중...]\n", flush=True)
|
||||
self._summary = self._compact_service.summarize(old_turns)
|
||||
|
||||
if self._repository and self._conversation_id:
|
||||
self._repository.save_summary(self._conversation_id, self._summary)
|
||||
Reference in New Issue
Block a user