llm-chat/src/llm_chat/settings.py

43 lines
1.1 KiB
Python

from enum import StrEnum
from pathlib import Path
from pydantic import field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
class Model(StrEnum):
"""Model to use for the LLM Chat application."""
GPT3 = "gpt-3.5-turbo"
GPT4 = "gpt-4"
GPT4_TURBO = "gpt-4-turbo-preview"
DEFAULT_MODEL = Model.GPT3
DEFAULT_TEMPERATURE = 0.7
DEFAULT_HISTORY_DIR = Path.home() / ".llm_chat" / "history"
class OpenAISettings(BaseSettings):
"""Settings for the LLM Chat application."""
api_key: str = ""
model: Model = DEFAULT_MODEL
temperature: float = DEFAULT_TEMPERATURE
history_dir: Path = DEFAULT_HISTORY_DIR
model_config: SettingsConfigDict = SettingsConfigDict( # type: ignore[misc]
env_file=".env",
env_file_encoding="utf-8",
env_prefix="OPENAI_",
frozen=True,
use_enum_values=True,
)
@field_validator("history_dir")
def history_dir_must_exist(cls, history_dir: Path) -> Path:
"""Ensure that the history directory exists."""
if not history_dir.exists():
history_dir.mkdir(parents=True)
return history_dir