from unittest.mock import patch import pytest from llm_chat.chat import Chat from llm_chat.settings import Model, OpenAISettings def test_send_message() -> None: with patch("llm_chat.chat.Chat._make_request") as mock_make_request: mock_make_request.return_value = { "choices": [{"message": {"content": "Hello!"}}], "usage": {"completion_tokens": 1, "prompt_tokens": 1}, } conversation = Chat() response = conversation.send_message("Hello") assert isinstance(response, str) assert response == "Hello!" @pytest.mark.parametrize("model,cost", [(Model.GPT3, 0.000043), (Model.GPT4, 0.00105)]) def test_calculate_cost(model: Model, cost: float) -> None: with patch("llm_chat.chat.Chat._make_request") as mock_make_request: mock_make_request.return_value = { "choices": [{"message": {"content": "Hello!"}}], "usage": {"completion_tokens": 10, "prompt_tokens": 15}, } settings = OpenAISettings(model=model) conversation = Chat(settings=settings) _ = conversation.send_message("Hello") assert conversation.cost == cost