llm-chat/src/llm_chat/settings.py

33 lines
765 B
Python
Raw Normal View History

from enum import StrEnum
from pathlib import Path
from pydantic_settings import BaseSettings, SettingsConfigDict
class Model(StrEnum):
"""Model to use for the LLM Chat application."""
GPT3 = "gpt-3.5-turbo"
GPT4 = "gpt-4"
DEFAULT_MODEL = Model.GPT3
DEFAULT_TEMPERATURE = 0.7
class OpenAISettings(BaseSettings):
"""Settings for the LLM Chat application."""
api_key: str = ""
model: Model = DEFAULT_MODEL
temperature: float = DEFAULT_TEMPERATURE
history_dir: Path = Path().absolute() / ".history"
model_config: SettingsConfigDict = SettingsConfigDict( # type: ignore[misc]
env_file=".env",
env_file_encoding="utf-8",
env_prefix="OPENAI_",
frozen=True,
use_enum_values=True,
)