Bot create and load class methods
This commit is contained in:
parent
2868c78184
commit
7968fc0ccf
|
@ -764,4 +764,4 @@ files = [
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.11,<3.12"
|
python-versions = ">=3.11,<3.12"
|
||||||
content-hash = "e8c5d78c5c95eaadb03e603c5b4ceada8aa27aaa049e6c0d72129c1f2dc53ed9"
|
content-hash = "8d76898eeb53fd3848f3be2f6aa1662517f9dbd80146db8dfd6f2932021ace48"
|
||||||
|
|
|
@ -0,0 +1,108 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from llm_chat.models import Message, Role
|
||||||
|
from llm_chat.utils import kebab_case
|
||||||
|
|
||||||
|
|
||||||
|
def _bot_id_from_name(name: str) -> str:
|
||||||
|
"""Create bot ID from name."""
|
||||||
|
return kebab_case(name)
|
||||||
|
|
||||||
|
|
||||||
|
class BotConfig(BaseModel):
|
||||||
|
"""Bot configuration class."""
|
||||||
|
|
||||||
|
bot_id: str
|
||||||
|
name: str
|
||||||
|
context_files: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
class BotExists(Exception):
|
||||||
|
"""Bot already exists error."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BotDoesNotExists(Exception):
|
||||||
|
"""Bot already exists error."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Bot:
|
||||||
|
"""Custom bot interface."""
|
||||||
|
|
||||||
|
def __init__(self, config: BotConfig, bot_dir: Path) -> None:
|
||||||
|
self.config = config
|
||||||
|
self.context: list[Message] = []
|
||||||
|
for context_file in config.context_files:
|
||||||
|
path = bot_dir / "context" / context_file
|
||||||
|
if not path.exists():
|
||||||
|
raise ValueError(f"{path} does not exist.")
|
||||||
|
if not path.is_file():
|
||||||
|
raise ValueError(f"{path} is not a file")
|
||||||
|
with path.open("r") as f:
|
||||||
|
content = f.read()
|
||||||
|
self.context.append(Message(role=Role.SYSTEM, content=content))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> str:
|
||||||
|
"""Return the bot ID."""
|
||||||
|
return self.config.bot_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
"""Return the bot name."""
|
||||||
|
return self.config.name
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, name: str, bot_dir: Path, context_files: list[Path] = []) -> None:
|
||||||
|
"""Create a custom bot.
|
||||||
|
|
||||||
|
This command creates the directory structure for the custom bot and copies
|
||||||
|
the context files. The bot directory is stored within the base bot directory
|
||||||
|
(e.g. `~/.llm_chat/bots/<name>`), which is stored as the snake case version of
|
||||||
|
the name argument. the directory contains a settings file `<name>.json` and a
|
||||||
|
directory of context files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name of the custom bot.
|
||||||
|
bot_dir: Path to where custom bot contexts are stored.
|
||||||
|
context_files: Paths to context files.
|
||||||
|
"""
|
||||||
|
bot_id = _bot_id_from_name(name)
|
||||||
|
path = bot_dir / bot_id
|
||||||
|
if path.exists():
|
||||||
|
raise BotExists(f"The bot {name} already exists.")
|
||||||
|
(path / "context").mkdir(parents=True)
|
||||||
|
|
||||||
|
config = BotConfig(
|
||||||
|
bot_id=bot_id,
|
||||||
|
name=name,
|
||||||
|
context_files=[context.name for context in context_files],
|
||||||
|
)
|
||||||
|
|
||||||
|
with (path / f"{bot_id}.json").open("w") as f:
|
||||||
|
f.write(config.model_dump_json() + "\n")
|
||||||
|
|
||||||
|
for context in context_files:
|
||||||
|
shutil.copy(context, path / "context" / context.name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load(cls, name: str, bot_dir: Path) -> Bot:
|
||||||
|
"""Load existing bot."""
|
||||||
|
bot_id = _bot_id_from_name(name)
|
||||||
|
bot_path = bot_dir / bot_id
|
||||||
|
if not bot_path.exists():
|
||||||
|
raise BotDoesNotExists(f"Bot {name} does not exist.")
|
||||||
|
|
||||||
|
with (bot_path / f"{bot_id}.json").open("r") as f:
|
||||||
|
config = BotConfig(**json.load(f))
|
||||||
|
|
||||||
|
return cls(config, bot_dir)
|
|
@ -0,0 +1,3 @@
|
||||||
|
from llm_chat.cli.main import app
|
||||||
|
|
||||||
|
__all__ = ["app"]
|
|
@ -0,0 +1,52 @@
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Annotated, Any, Optional
|
||||||
|
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from llm_chat.bot import Bot
|
||||||
|
from llm_chat.settings import OpenAISettings
|
||||||
|
|
||||||
|
app = typer.Typer()
|
||||||
|
|
||||||
|
|
||||||
|
@app.command("create")
|
||||||
|
def create(
|
||||||
|
name: Annotated[
|
||||||
|
str,
|
||||||
|
typer.Argument(help="Name of bot."),
|
||||||
|
],
|
||||||
|
base_dir: Annotated[
|
||||||
|
Optional[Path],
|
||||||
|
typer.Option(
|
||||||
|
...,
|
||||||
|
"--base-dir",
|
||||||
|
"-d",
|
||||||
|
help=(
|
||||||
|
"Path to the base directory in which conversation "
|
||||||
|
"configuration and history will be saved."
|
||||||
|
),
|
||||||
|
),
|
||||||
|
] = None,
|
||||||
|
context_files: Annotated[
|
||||||
|
list[Path],
|
||||||
|
typer.Option(
|
||||||
|
...,
|
||||||
|
"--context",
|
||||||
|
"-c",
|
||||||
|
help=(
|
||||||
|
"Path to a file containing context text. "
|
||||||
|
"Can provide multiple times for multiple files."
|
||||||
|
),
|
||||||
|
exists=True,
|
||||||
|
file_okay=True,
|
||||||
|
dir_okay=False,
|
||||||
|
readable=True,
|
||||||
|
),
|
||||||
|
] = [],
|
||||||
|
) -> None:
|
||||||
|
"""Create a new bot."""
|
||||||
|
args: dict[str, Any] = {}
|
||||||
|
if base_dir is not None:
|
||||||
|
args |= {"base_dir": base_dir}
|
||||||
|
settings = OpenAISettings(**args)
|
||||||
|
Bot.create(name, settings.bot_dir, context_files=context_files)
|
|
@ -7,10 +7,12 @@ from rich.console import Console
|
||||||
from rich.markdown import Markdown
|
from rich.markdown import Markdown
|
||||||
|
|
||||||
from llm_chat.chat import ChatProtocol, get_chat, get_chat_class
|
from llm_chat.chat import ChatProtocol, get_chat, get_chat_class
|
||||||
|
from llm_chat.cli import bot
|
||||||
from llm_chat.models import Message, Role
|
from llm_chat.models import Message, Role
|
||||||
from llm_chat.settings import Model, OpenAISettings
|
from llm_chat.settings import Model, OpenAISettings
|
||||||
|
|
||||||
app = typer.Typer()
|
app = typer.Typer()
|
||||||
|
app.add_typer(bot.app, name="bot", help="Manage custom bots.")
|
||||||
|
|
||||||
|
|
||||||
def prompt_continuation(width: int, *args: Any) -> str:
|
def prompt_continuation(width: int, *args: Any) -> str:
|
||||||
|
@ -117,7 +119,7 @@ def chat(
|
||||||
"-c",
|
"-c",
|
||||||
help=(
|
help=(
|
||||||
"Path to a file containing context text. "
|
"Path to a file containing context text. "
|
||||||
"Can provide multiple time for multiple files."
|
"Can provide multiple times for multiple files."
|
||||||
),
|
),
|
||||||
exists=True,
|
exists=True,
|
||||||
file_okay=True,
|
file_okay=True,
|
||||||
|
@ -157,7 +159,7 @@ def chat(
|
||||||
if temperature is not None:
|
if temperature is not None:
|
||||||
args |= {"temperature": temperature}
|
args |= {"temperature": temperature}
|
||||||
if base_dir is not None:
|
if base_dir is not None:
|
||||||
args |= {"history_dir": base_dir}
|
args |= {"base_dir": base_dir}
|
||||||
settings = OpenAISettings(**args)
|
settings = OpenAISettings(**args)
|
||||||
|
|
||||||
context_messages = [load_context(path) for path in context]
|
context_messages = [load_context(path) for path in context]
|
|
@ -15,7 +15,7 @@ class Model(StrEnum):
|
||||||
|
|
||||||
DEFAULT_MODEL = Model.GPT3
|
DEFAULT_MODEL = Model.GPT3
|
||||||
DEFAULT_TEMPERATURE = 0.7
|
DEFAULT_TEMPERATURE = 0.7
|
||||||
DEFAULT_BASE_DIR = Path.home() / ".llm_chat"
|
DEFAULT_BASE_DIR = Path.home() / ".llm-chat"
|
||||||
DEFAULT_BOT_PATH = "bots"
|
DEFAULT_BOT_PATH = "bots"
|
||||||
DEFAULT_HISTORY_PATH = "history"
|
DEFAULT_HISTORY_PATH = "history"
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def kebab_case(string: str) -> str:
|
||||||
|
"""Convert a string to kebab case."""
|
||||||
|
string = string.replace("-", " ")
|
||||||
|
string = re.sub("([A-Z][a-z]+)", r" \1", string)
|
||||||
|
string = re.sub("([A-Z]+)", r" \1", string)
|
||||||
|
return "-".join(string.split()).lower()
|
|
@ -9,9 +9,9 @@ from pytest import MonkeyPatch
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from typer.testing import CliRunner
|
from typer.testing import CliRunner
|
||||||
|
|
||||||
import llm_chat
|
import llm_chat.cli
|
||||||
from llm_chat.chat import ChatProtocol
|
from llm_chat.chat import ChatProtocol
|
||||||
from llm_chat.cli import app
|
from llm_chat.cli.main import app
|
||||||
from llm_chat.models import Conversation, Message, Role
|
from llm_chat.models import Conversation, Message, Role
|
||||||
from llm_chat.settings import Model, OpenAISettings
|
from llm_chat.settings import Model, OpenAISettings
|
||||||
|
|
||||||
|
@ -77,9 +77,9 @@ def test_chat(monkeypatch: MonkeyPatch) -> None:
|
||||||
|
|
||||||
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
||||||
|
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_chat", mock_get_chat)
|
monkeypatch.setattr(llm_chat.cli.main, "get_chat", mock_get_chat)
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_console", mock_get_console)
|
monkeypatch.setattr(llm_chat.cli.main, "get_console", mock_get_console)
|
||||||
monkeypatch.setattr(llm_chat.cli, "read_user_input", mock_read_user_input)
|
monkeypatch.setattr(llm_chat.cli.main, "read_user_input", mock_read_user_input)
|
||||||
|
|
||||||
result = runner.invoke(app, ["chat"])
|
result = runner.invoke(app, ["chat"])
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
|
@ -107,9 +107,9 @@ def test_chat_with_context(
|
||||||
|
|
||||||
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
||||||
|
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_chat", mock_get_chat)
|
monkeypatch.setattr(llm_chat.cli.main, "get_chat", mock_get_chat)
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_console", mock_get_console)
|
monkeypatch.setattr(llm_chat.cli.main, "get_console", mock_get_console)
|
||||||
monkeypatch.setattr(llm_chat.cli, "read_user_input", mock_read_user_input)
|
monkeypatch.setattr(llm_chat.cli.main, "read_user_input", mock_read_user_input)
|
||||||
|
|
||||||
result = runner.invoke(app, ["chat", argument, str(tmp_file)])
|
result = runner.invoke(app, ["chat", argument, str(tmp_file)])
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
|
@ -139,9 +139,9 @@ def test_chat_with_name(
|
||||||
|
|
||||||
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
||||||
|
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_chat", mock_get_chat)
|
monkeypatch.setattr(llm_chat.cli.main, "get_chat", mock_get_chat)
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_console", mock_get_console)
|
monkeypatch.setattr(llm_chat.cli.main, "get_console", mock_get_console)
|
||||||
monkeypatch.setattr(llm_chat.cli, "read_user_input", mock_read_user_input)
|
monkeypatch.setattr(llm_chat.cli.main, "read_user_input", mock_read_user_input)
|
||||||
|
|
||||||
result = runner.invoke(app, ["chat", argument, name])
|
result = runner.invoke(app, ["chat", argument, name])
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
|
@ -179,9 +179,9 @@ def test_load(monkeypatch: MonkeyPatch, tmp_path: Path) -> None:
|
||||||
|
|
||||||
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
mock_read_user_input = MagicMock(side_effect=["Hello", "/q"])
|
||||||
|
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_chat_class", mock_get_chat)
|
monkeypatch.setattr(llm_chat.cli.main, "get_chat_class", mock_get_chat)
|
||||||
monkeypatch.setattr(llm_chat.cli, "get_console", mock_get_console)
|
monkeypatch.setattr(llm_chat.cli.main, "get_console", mock_get_console)
|
||||||
monkeypatch.setattr(llm_chat.cli, "read_user_input", mock_read_user_input)
|
monkeypatch.setattr(llm_chat.cli.main, "read_user_input", mock_read_user_input)
|
||||||
|
|
||||||
# Load the conversation from the file
|
# Load the conversation from the file
|
||||||
result = runner.invoke(app, ["load", str(file_path)])
|
result = runner.invoke(app, ["load", str(file_path)])
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from llm_chat.utils import kebab_case
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"string,expected",
|
||||||
|
[
|
||||||
|
("fooBar", "foo-bar"),
|
||||||
|
("FooBar", "foo-bar"),
|
||||||
|
("Foo Bar", "foo-bar"),
|
||||||
|
("1Foo2Bar3", "1-foo2-bar3"),
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
"fooBar",
|
||||||
|
"FooBar",
|
||||||
|
"Foo Bar",
|
||||||
|
"1Foo2Bar3",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_kebab_case(string: str, expected: str) -> None:
|
||||||
|
assert kebab_case(string) == expected
|
Loading…
Reference in New Issue