cd41e9e33e
- **Implement `MlxModelService` for local LLM backend.** - **Introduce `DatabaseService` for MySQL integration.** - **Add `HistoryService` to manage conversation context.** - **Set up CLI interface via `CliUiService`.** - **Establish EventBus for token streaming.** - **Include conversation repository for data persistence.** - **Add environment-based configuration management.** - **Draft IoC architectural plan.**
36 lines
1.0 KiB
Python
36 lines
1.0 KiB
Python
from services.model.base import AbstractModelService
|
|
from services.chat.history_service import HistoryService
|
|
from services.events.event_bus import EventBus
|
|
|
|
|
|
class ChatService:
|
|
"""대화 오케스트레이션 서비스."""
|
|
|
|
EVENT_TOKEN = "stream.token"
|
|
EVENT_END = "stream.end"
|
|
|
|
def __init__(
|
|
self,
|
|
model: AbstractModelService,
|
|
history: HistoryService,
|
|
event_bus: EventBus,
|
|
max_tokens: int,
|
|
):
|
|
self._model = model
|
|
self._history = history
|
|
self._event_bus = event_bus
|
|
self._max_tokens = max_tokens
|
|
|
|
def respond(self, user_input: str) -> str:
|
|
self._history.add("user", user_input)
|
|
prompt = self._model.build_prompt(self._history.get())
|
|
|
|
response_text = ""
|
|
for token in self._model.stream(prompt, self._max_tokens):
|
|
self._event_bus.publish(self.EVENT_TOKEN, token)
|
|
response_text += token
|
|
|
|
self._event_bus.publish(self.EVENT_END)
|
|
self._history.add("assistant", response_text)
|
|
return response_text
|