llm-chat/src/llm_chat/settings.py

27 lines
611 B
Python
Raw Normal View History

from enum import StrEnum
from pydantic_settings import BaseSettings, SettingsConfigDict
class Model(StrEnum):
"""Model to use for the LLM Chat application."""
GPT3 = "gpt-3.5-turbo"
GPT4 = "gpt-4"
class OpenAISettings(BaseSettings):
"""Settings for the LLM Chat application."""
api_key: str = ""
model: Model = Model.GPT3
temperature: float = 0.7
model_config: SettingsConfigDict = SettingsConfigDict( # type: ignore[misc]
env_file=".env",
env_file_encoding="utf-8",
env_prefix="OPENAI_",
frozen=True,
use_enum_values=True,
)