Compare commits

..

2 Commits

Author SHA1 Message Date
Paul Harrison 76bee1aed9 Update default history directory 2024-02-23 11:25:27 +00:00
Paul Harrison ff836e617c Get default chat paramters from settings file
In the current CLI implementation, default values for the model,
temperature, and history directory were provided as defaults to the
`typer` CLI interface. This bypassed the ability to set values via
environment variables and relied on `typer` rather than `pydantic` to
validate the history directory. This prevented the directory's creation
when it doesn't exist.

This commit sets the default values in the CLI to `None` and thus relies
on the `OpenAISettings` class for default values and parameter
validation.
2024-02-23 11:15:09 +00:00
3 changed files with 18 additions and 28 deletions

View File

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "llm-chat" name = "llm-chat"
version = "1.1.3" version = "1.1.5"
description = "A general CLI interface for large language models." description = "A general CLI interface for large language models."
authors = ["Paul Harrison <paul@harrison.sh>"] authors = ["Paul Harrison <paul@harrison.sh>"]
readme = "README.md" readme = "README.md"

View File

@ -8,13 +8,7 @@ from rich.markdown import Markdown
from llm_chat.chat import ChatProtocol, get_chat, get_chat_class from llm_chat.chat import ChatProtocol, get_chat, get_chat_class
from llm_chat.models import Message, Role from llm_chat.models import Message, Role
from llm_chat.settings import ( from llm_chat.settings import Model, OpenAISettings
DEFAULT_HISTORY_DIR,
DEFAULT_MODEL,
DEFAULT_TEMPERATURE,
Model,
OpenAISettings,
)
app = typer.Typer() app = typer.Typer()
@ -106,15 +100,15 @@ def chat(
), ),
] = None, ] = None,
model: Annotated[ model: Annotated[
Model, Optional[Model],
typer.Option(..., "--model", "-m", help="Model to use.", show_choices=True), typer.Option(..., "--model", "-m", help="Model to use.", show_choices=True),
] = DEFAULT_MODEL, ] = None,
temperature: Annotated[ temperature: Annotated[
float, Optional[float],
typer.Option( typer.Option(
..., "--temperature", "-t", help="Model temperature (i.e. creativeness)." ..., "--temperature", "-t", help="Model temperature (i.e. creativeness)."
), ),
] = DEFAULT_TEMPERATURE, ] = None,
context: Annotated[ context: Annotated[
list[Path], list[Path],
typer.Option( typer.Option(
@ -132,17 +126,14 @@ def chat(
), ),
] = [], ] = [],
history_dir: Annotated[ history_dir: Annotated[
Path, Optional[Path],
typer.Option( typer.Option(
..., ...,
"--history-dir", "--history-dir",
"-d", "-d",
help="Path to the directory where conversation history will be saved.", help="Path to the directory where conversation history will be saved.",
exists=True,
dir_okay=True,
file_okay=False,
), ),
] = DEFAULT_HISTORY_DIR, ] = None,
name: Annotated[ name: Annotated[
str, str,
typer.Option( typer.Option(
@ -155,17 +146,16 @@ def chat(
) -> None: ) -> None:
"""Start a chat session.""" """Start a chat session."""
# TODO: Add option to provide context string as an argument. # TODO: Add option to provide context string as an argument.
args: dict[str, Any] = {}
if api_key is not None: if api_key is not None:
settings = OpenAISettings( args |= {"api_key": api_key}
api_key=api_key, if model is not None:
model=model, args |= {"model": model}
temperature=temperature, if temperature is not None:
history_dir=history_dir, args |= {"temperature": temperature}
) if history_dir is not None:
else: args |= {"history_dir": history_dir}
settings = OpenAISettings( settings = OpenAISettings(**args)
model=model, temperature=temperature, history_dir=history_dir
)
context_messages = [load_context(path) for path in context] context_messages = [load_context(path) for path in context]

View File

@ -15,7 +15,7 @@ class Model(StrEnum):
DEFAULT_MODEL = Model.GPT3 DEFAULT_MODEL = Model.GPT3
DEFAULT_TEMPERATURE = 0.7 DEFAULT_TEMPERATURE = 0.7
DEFAULT_HISTORY_DIR = Path.home() / ".llm_chat" DEFAULT_HISTORY_DIR = Path.home() / ".llm_chat" / "history"
class OpenAISettings(BaseSettings): class OpenAISettings(BaseSettings):