From ff836e617c6c2a940facbf64e8627d0de1528072 Mon Sep 17 00:00:00 2001 From: Paul Harrison Date: Fri, 23 Feb 2024 11:15:09 +0000 Subject: [PATCH] Get default chat paramters from settings file In the current CLI implementation, default values for the model, temperature, and history directory were provided as defaults to the `typer` CLI interface. This bypassed the ability to set values via environment variables and relied on `typer` rather than `pydantic` to validate the history directory. This prevented the directory's creation when it doesn't exist. This commit sets the default values in the CLI to `None` and thus relies on the `OpenAISettings` class for default values and parameter validation. --- pyproject.toml | 2 +- src/llm_chat/cli.py | 42 ++++++++++++++++-------------------------- 2 files changed, 17 insertions(+), 27 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 03769be..8897fe1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "llm-chat" -version = "1.1.3" +version = "1.1.4" description = "A general CLI interface for large language models." authors = ["Paul Harrison "] readme = "README.md" diff --git a/src/llm_chat/cli.py b/src/llm_chat/cli.py index 3f9662c..984eff9 100644 --- a/src/llm_chat/cli.py +++ b/src/llm_chat/cli.py @@ -8,13 +8,7 @@ from rich.markdown import Markdown from llm_chat.chat import ChatProtocol, get_chat, get_chat_class from llm_chat.models import Message, Role -from llm_chat.settings import ( - DEFAULT_HISTORY_DIR, - DEFAULT_MODEL, - DEFAULT_TEMPERATURE, - Model, - OpenAISettings, -) +from llm_chat.settings import Model, OpenAISettings app = typer.Typer() @@ -106,15 +100,15 @@ def chat( ), ] = None, model: Annotated[ - Model, + Optional[Model], typer.Option(..., "--model", "-m", help="Model to use.", show_choices=True), - ] = DEFAULT_MODEL, + ] = None, temperature: Annotated[ - float, + Optional[float], typer.Option( ..., "--temperature", "-t", help="Model temperature (i.e. creativeness)." ), - ] = DEFAULT_TEMPERATURE, + ] = None, context: Annotated[ list[Path], typer.Option( @@ -132,17 +126,14 @@ def chat( ), ] = [], history_dir: Annotated[ - Path, + Optional[Path], typer.Option( ..., "--history-dir", "-d", help="Path to the directory where conversation history will be saved.", - exists=True, - dir_okay=True, - file_okay=False, ), - ] = DEFAULT_HISTORY_DIR, + ] = None, name: Annotated[ str, typer.Option( @@ -155,17 +146,16 @@ def chat( ) -> None: """Start a chat session.""" # TODO: Add option to provide context string as an argument. + args: dict[str, Any] = {} if api_key is not None: - settings = OpenAISettings( - api_key=api_key, - model=model, - temperature=temperature, - history_dir=history_dir, - ) - else: - settings = OpenAISettings( - model=model, temperature=temperature, history_dir=history_dir - ) + args |= {"api_key": api_key} + if model is not None: + args |= {"model": model} + if temperature is not None: + args |= {"temperature": temperature} + if history_dir is not None: + args |= {"history_dir": history_dir} + settings = OpenAISettings(**args) context_messages = [load_context(path) for path in context]