llm-chat/tests/test_cli.py

47 lines
1.2 KiB
Python

from io import StringIO
from unittest.mock import MagicMock
from pytest import MonkeyPatch
from rich.console import Console
from typer.testing import CliRunner
import llm_chat
from llm_chat.chat import ChatProtocol
from llm_chat.cli import app
from llm_chat.settings import OpenAISettings
runner = CliRunner()
class ChatFake:
"""Fake chat class for testing."""
received_messages: list[str] = []
def send_message(self, message: str) -> str:
"""Echo the received message."""
self.received_messages.append(message)
return message
def test_chat(monkeypatch: MonkeyPatch) -> None:
chat_fake = ChatFake()
output = StringIO()
console = Console(file=output)
def mock_get_chat(_: OpenAISettings) -> ChatProtocol:
return chat_fake
def mock_get_console() -> Console:
return console
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
monkeypatch.setattr(llm_chat.cli, "get_chat", mock_get_chat)
monkeypatch.setattr(llm_chat.cli, "get_console", mock_get_console)
monkeypatch.setattr(llm_chat.cli, "read_user_input", mock_read_user_input)
result = runner.invoke(app)
assert result.exit_code == 0
assert chat_fake.received_messages == ["Hello"]