cd41e9e33e
- **Implement `MlxModelService` for local LLM backend.** - **Introduce `DatabaseService` for MySQL integration.** - **Add `HistoryService` to manage conversation context.** - **Set up CLI interface via `CliUiService`.** - **Establish EventBus for token streaming.** - **Include conversation repository for data persistence.** - **Add environment-based configuration management.** - **Draft IoC architectural plan.**
82 lines
3.2 KiB
Python
82 lines
3.2 KiB
Python
from __future__ import annotations
|
|
from typing import TYPE_CHECKING
|
|
|
|
if TYPE_CHECKING:
|
|
from services.db.conversation_repository import ConversationRepository
|
|
from services.chat.compact_service import CompactService
|
|
|
|
|
|
class HistoryService:
|
|
"""대화 히스토리를 관리하는 서비스."""
|
|
|
|
def __init__(
|
|
self,
|
|
system_prompt: str,
|
|
max_turns: int,
|
|
compact_threshold: int,
|
|
repository: ConversationRepository | None = None,
|
|
compact_service: CompactService | None = None,
|
|
):
|
|
self._system_prompt = system_prompt
|
|
self._max_turns = max_turns
|
|
self._compact_threshold = compact_threshold
|
|
self._repository = repository
|
|
self._compact_service = compact_service
|
|
self._summary: str | None = None
|
|
self._turns: list[dict] = []
|
|
self._conversation_id: int | None = None
|
|
|
|
if repository:
|
|
self._load_or_create()
|
|
|
|
# ── DB 초기화 ────────────────────────────────────────────────
|
|
|
|
def _load_or_create(self) -> None:
|
|
conv_id = self._repository.get_latest_conversation_id()
|
|
if conv_id:
|
|
summary_id, summary = self._repository.get_latest_summary(conv_id)
|
|
turns = self._repository.load_turns_after(
|
|
conv_id, summary_id, self._compact_threshold * 2
|
|
)
|
|
self._summary = summary
|
|
self._turns = turns
|
|
self._conversation_id = conv_id
|
|
else:
|
|
self._conversation_id = self._repository.create_conversation()
|
|
|
|
# ── 공개 인터페이스 ───────────────────────────────────────────
|
|
|
|
def add(self, role: str, content: str) -> None:
|
|
self._turns.append({"role": role, "content": content})
|
|
if self._repository and self._conversation_id:
|
|
self._repository.save_message(self._conversation_id, role, content)
|
|
if role == "assistant":
|
|
self._maybe_compact()
|
|
|
|
def get(self) -> list[dict]:
|
|
msgs = [{"role": "system", "content": self._system_prompt}]
|
|
if self._summary:
|
|
msgs.append({"role": "system", "content": f"[이전 대화 요약]\n{self._summary}"})
|
|
msgs.extend(self._turns)
|
|
return msgs
|
|
|
|
def reset(self, new_conversation_id: int) -> None:
|
|
self._summary = None
|
|
self._turns = []
|
|
self._conversation_id = new_conversation_id
|
|
|
|
# ── 내부 ─────────────────────────────────────────────────────
|
|
|
|
def _maybe_compact(self) -> None:
|
|
if not self._compact_service or len(self._turns) <= self._compact_threshold:
|
|
return
|
|
|
|
mid = len(self._turns) // 2
|
|
old_turns, self._turns = self._turns[:mid], self._turns[mid:]
|
|
|
|
print("\n[대화 내용을 압축하는 중...]\n", flush=True)
|
|
self._summary = self._compact_service.summarize(old_turns)
|
|
|
|
if self._repository and self._conversation_id:
|
|
self._repository.save_summary(self._conversation_id, self._summary)
|