Get default chat paramters from settings file

In the current CLI implementation, default values for the model,
temperature, and history directory were provided as defaults to the
`typer` CLI interface. This bypassed the ability to set values via
environment variables and relied on `typer` rather than `pydantic` to
validate the history directory. This prevented the directory's creation
when it doesn't exist.

This commit sets the default values in the CLI to `None` and thus relies
on the `OpenAISettings` class for default values and parameter
validation.
This commit is contained in:
Paul Harrison 2024-02-23 11:15:09 +00:00
parent b859c8bb95
commit ff836e617c
2 changed files with 17 additions and 27 deletions

View File

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "llm-chat" name = "llm-chat"
version = "1.1.3" version = "1.1.4"
description = "A general CLI interface for large language models." description = "A general CLI interface for large language models."
authors = ["Paul Harrison <paul@harrison.sh>"] authors = ["Paul Harrison <paul@harrison.sh>"]
readme = "README.md" readme = "README.md"

View File

@ -8,13 +8,7 @@ from rich.markdown import Markdown
from llm_chat.chat import ChatProtocol, get_chat, get_chat_class from llm_chat.chat import ChatProtocol, get_chat, get_chat_class
from llm_chat.models import Message, Role from llm_chat.models import Message, Role
from llm_chat.settings import ( from llm_chat.settings import Model, OpenAISettings
DEFAULT_HISTORY_DIR,
DEFAULT_MODEL,
DEFAULT_TEMPERATURE,
Model,
OpenAISettings,
)
app = typer.Typer() app = typer.Typer()
@ -106,15 +100,15 @@ def chat(
), ),
] = None, ] = None,
model: Annotated[ model: Annotated[
Model, Optional[Model],
typer.Option(..., "--model", "-m", help="Model to use.", show_choices=True), typer.Option(..., "--model", "-m", help="Model to use.", show_choices=True),
] = DEFAULT_MODEL, ] = None,
temperature: Annotated[ temperature: Annotated[
float, Optional[float],
typer.Option( typer.Option(
..., "--temperature", "-t", help="Model temperature (i.e. creativeness)." ..., "--temperature", "-t", help="Model temperature (i.e. creativeness)."
), ),
] = DEFAULT_TEMPERATURE, ] = None,
context: Annotated[ context: Annotated[
list[Path], list[Path],
typer.Option( typer.Option(
@ -132,17 +126,14 @@ def chat(
), ),
] = [], ] = [],
history_dir: Annotated[ history_dir: Annotated[
Path, Optional[Path],
typer.Option( typer.Option(
..., ...,
"--history-dir", "--history-dir",
"-d", "-d",
help="Path to the directory where conversation history will be saved.", help="Path to the directory where conversation history will be saved.",
exists=True,
dir_okay=True,
file_okay=False,
), ),
] = DEFAULT_HISTORY_DIR, ] = None,
name: Annotated[ name: Annotated[
str, str,
typer.Option( typer.Option(
@ -155,17 +146,16 @@ def chat(
) -> None: ) -> None:
"""Start a chat session.""" """Start a chat session."""
# TODO: Add option to provide context string as an argument. # TODO: Add option to provide context string as an argument.
args: dict[str, Any] = {}
if api_key is not None: if api_key is not None:
settings = OpenAISettings( args |= {"api_key": api_key}
api_key=api_key, if model is not None:
model=model, args |= {"model": model}
temperature=temperature, if temperature is not None:
history_dir=history_dir, args |= {"temperature": temperature}
) if history_dir is not None:
else: args |= {"history_dir": history_dir}
settings = OpenAISettings( settings = OpenAISettings(**args)
model=model, temperature=temperature, history_dir=history_dir
)
context_messages = [load_context(path) for path in context] context_messages = [load_context(path) for path in context]