From 0ad1634cf6f83c999a7d22df2484b421daf01c1e Mon Sep 17 00:00:00 2001 From: Paul Harrison Date: Tue, 22 Aug 2023 17:08:41 +0100 Subject: [PATCH] Include one or more context files in chat session --- pyproject.toml | 2 +- src/llm_chat/chat.py | 4 ++-- src/llm_chat/cli.py | 34 +++++++++++++++++++++++++++++++++- 3 files changed, 36 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2849184..92ad0c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "llm-chat" -version = "0.1.0" +version = "0.2.0" description = "A general CLI interface for large language models." authors = ["Paul Harrison "] readme = "README.md" diff --git a/src/llm_chat/chat.py b/src/llm_chat/chat.py index 35cf640..1c65b10 100644 --- a/src/llm_chat/chat.py +++ b/src/llm_chat/chat.py @@ -77,6 +77,6 @@ class Chat: return message -def get_chat(settings: OpenAISettings | None = None) -> ChatProtocol: +def get_chat(settings: OpenAISettings | None = None, context: list[Message] = []) -> ChatProtocol: """Get a chat object.""" - return Chat(settings=settings) + return Chat(settings=settings, context=context) diff --git a/src/llm_chat/cli.py b/src/llm_chat/cli.py index 7d8cff1..9487113 100644 --- a/src/llm_chat/cli.py +++ b/src/llm_chat/cli.py @@ -1,3 +1,4 @@ +from pathlib import Path from typing import Annotated, Any, Optional import typer @@ -6,6 +7,7 @@ from rich.console import Console from rich.markdown import Markdown from llm_chat.chat import get_chat +from llm_chat.models import Message, Role from llm_chat.settings import DEFAULT_MODEL, DEFAULT_TEMPERATURE, Model, OpenAISettings app = typer.Typer() @@ -39,6 +41,20 @@ def read_user_input(session: PromptSession[Any]) -> str: return prompt +def load_context(path: Path) -> Message: + """Load context text from file.""" + if not path.exists(): + raise typer.BadParameter(f"File {path} does not exist.") + + if not path.is_file(): + raise typer.BadParameter(f"Path {path} is not a file.") + + with path.open() as f: + content = f.read() + + return Message(role=Role.SYSTEM, content=content) + + @app.command() def chat( api_key: Annotated[ @@ -63,6 +79,19 @@ def chat( ..., "--temperature", "-t", help="Model temperature (i.e. creativeness)." ), ] = DEFAULT_TEMPERATURE, + context: Annotated[ + Optional[list[Path]], + typer.Option( + ..., + "--context", + "-c", + help="Path to a file containing context text.", + exists=True, + file_okay=True, + dir_okay=False, + readable=True, + ), + ] = [], ) -> None: """Start a chat session.""" # TODO: Add option to load context from file. @@ -71,8 +100,11 @@ def chat( settings = OpenAISettings(api_key=api_key, model=model, temperature=temperature) else: settings = OpenAISettings(model=model, temperature=temperature) + + context_messages = [load_context(path) for path in context] - current_chat = get_chat(settings) + current_chat = get_chat(settings=settings, context=context_messages) + # current_chat = get_chat(settings=settings) console = get_console() session = get_session()