- **Bootstrap IoC-based architecture with modular services.**
- **Implement `MlxModelService` for local LLM backend.** - **Introduce `DatabaseService` for MySQL integration.** - **Add `HistoryService` to manage conversation context.** - **Set up CLI interface via `CliUiService`.** - **Establish EventBus for token streaming.** - **Include conversation repository for data persistence.** - **Add environment-based configuration management.** - **Draft IoC architectural plan.**
This commit is contained in:
@@ -0,0 +1,18 @@
|
||||
from services.model.base import AbstractModelService
|
||||
|
||||
|
||||
class CompactService:
|
||||
"""오래된 대화 턴을 LLM으로 요약하는 서비스."""
|
||||
|
||||
def __init__(self, model: AbstractModelService, max_tokens: int = 512):
|
||||
self._model = model
|
||||
self._max_tokens = max_tokens
|
||||
|
||||
def summarize(self, turns: list[dict]) -> str:
|
||||
text = "\n".join(f"{t['role']}: {t['content']}" for t in turns)
|
||||
prompt_history = [
|
||||
{"role": "system", "content": "당신은 대화 요약 전문가입니다."},
|
||||
{"role": "user", "content": f"다음 대화의 핵심 내용을 한국어로 간결하게 요약해주세요:\n\n{text}"},
|
||||
]
|
||||
prompt = self._model.build_prompt(prompt_history)
|
||||
return "".join(self._model.stream(prompt, self._max_tokens))
|
||||
Reference in New Issue
Block a user