2023-08-14 11:37:20 +00:00
|
|
|
from enum import StrEnum, auto
|
|
|
|
|
|
|
|
from pydantic import BaseModel, ConfigDict
|
|
|
|
|
2023-08-24 17:01:34 +00:00
|
|
|
from llm_chat.settings import DEFAULT_TEMPERATURE, Model
|
|
|
|
|
2023-08-14 11:37:20 +00:00
|
|
|
|
|
|
|
class Role(StrEnum):
|
|
|
|
"""Role of a user in the chat."""
|
|
|
|
|
|
|
|
ASSISTANT = auto()
|
|
|
|
SYSTEM = auto()
|
|
|
|
USER = auto()
|
|
|
|
|
|
|
|
|
|
|
|
class Message(BaseModel):
|
|
|
|
"""Message in the conversation."""
|
|
|
|
|
|
|
|
role: Role
|
|
|
|
content: str
|
|
|
|
|
2023-08-16 09:10:06 +00:00
|
|
|
model_config: ConfigDict = ConfigDict( # type: ignore[misc]
|
2023-08-14 11:37:20 +00:00
|
|
|
frozen=True,
|
|
|
|
use_enum_values=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class Conversation(BaseModel):
|
|
|
|
"""Conversation in the chat."""
|
|
|
|
|
|
|
|
messages: list[Message]
|
2023-08-24 17:01:34 +00:00
|
|
|
model: Model
|
|
|
|
temperature: float = DEFAULT_TEMPERATURE
|
2023-08-24 14:23:32 +00:00
|
|
|
completion_tokens: int = 0
|
|
|
|
prompt_tokens: int = 0
|
|
|
|
cost: float = 0.0
|
2023-08-14 11:37:20 +00:00
|
|
|
|
2023-08-16 09:10:06 +00:00
|
|
|
model_config: ConfigDict = ConfigDict( # type: ignore[misc]
|
2023-08-14 11:37:20 +00:00
|
|
|
frozen=False,
|
|
|
|
use_enum_values=True,
|
|
|
|
)
|