Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
## Unreleased

### Features

- Make LLM support optional and installable via `litecli[ai]`.

### Bug Fixes

- Avoid completion refresh crashes when no database is connected.

### Internal

- Clean up ty type-checking for optional sqlean/llm imports.
- Add an llm module alias for test patching.
- Avoid ty conflicts for optional sqlite/llm imports.

## 1.18.0

### Internal
Expand Down
1 change: 0 additions & 1 deletion litecli/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# type: ignore
from __future__ import annotations

import importlib.metadata
Expand Down
20 changes: 15 additions & 5 deletions litecli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,6 @@
from collections import namedtuple
from datetime import datetime
from io import open

try:
from sqlean import OperationalError, sqlite_version
except ImportError:
from sqlite3 import OperationalError, sqlite_version
from time import time
from typing import Any, Generator, Iterable, cast

Expand Down Expand Up @@ -51,6 +46,21 @@
from .sqlcompleter import SQLCompleter
from .sqlexecute import SQLExecute


def _load_sqlite3() -> Any:
try:
import sqlean
except ImportError:
import sqlite3

return sqlite3
return sqlean


_sqlite3 = _load_sqlite3()
OperationalError = _sqlite3.OperationalError
sqlite_version = _sqlite3.sqlite_version

# Query tuples are used for maintaining history
Query = namedtuple("Query", ["query", "successful", "mutating"])

Expand Down
64 changes: 58 additions & 6 deletions litecli/packages/special/llm.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import contextlib
import importlib
import io
import logging
import os
Expand All @@ -13,20 +14,53 @@
from typing import Any

import click
import llm
from llm.cli import cli

from . import export
from .main import Verbosity, parse_special_command
from .types import DBCursor


def _load_llm_module() -> Any | None:
try:
return importlib.import_module("llm")
except ImportError:
return None


def _load_llm_cli_module() -> Any | None:
try:
return importlib.import_module("llm.cli")
except ImportError:
return None


llm_module = _load_llm_module()
llm_cli_module = _load_llm_cli_module()

# Alias for tests and patching.
llm = llm_module

LLM_IMPORTED = llm_module is not None

cli: click.Command | None
if llm_cli_module is not None:
llm_cli = getattr(llm_cli_module, "cli", None)
cli = llm_cli if isinstance(llm_cli, click.Command) else None
else:
cli = None

LLM_CLI_IMPORTED = cli is not None

log = logging.getLogger(__name__)

LLM_TEMPLATE_NAME = "litecli-llm-template"
LLM_CLI_COMMANDS: list[str] = list(cli.commands.keys())
LLM_CLI_COMMANDS: list[str] = list(cli.commands.keys()) if isinstance(cli, click.Group) else []
# Mapping of model_id to None used for completion tree leaves.
# the file name is llm.py and module name is llm, hence ty is complaining that get_models is missing.
MODELS: dict[str, None] = {x.model_id: None for x in llm.get_models()} # type: ignore[attr-defined]
if llm_module is not None:
get_models = getattr(llm_module, "get_models", None)
MODELS: dict[str, None] = {x.model_id: None for x in get_models()} if callable(get_models) else {}
else:
MODELS = {}


def run_external_cmd(
Expand Down Expand Up @@ -110,7 +144,7 @@ def build_command_tree(cmd: click.Command) -> dict[str, Any] | None:


# Generate the tree
COMMAND_TREE: dict[str, Any] | None = build_command_tree(cli)
COMMAND_TREE: dict[str, Any] | None = build_command_tree(cli) if cli is not None else {}


def get_completions(tokens: list[str], tree: dict[str, Any] | None = COMMAND_TREE) -> list[str]:
Expand All @@ -123,6 +157,8 @@ def get_completions(tokens: list[str], tree: dict[str, Any] | None = COMMAND_TRE
Returns:
list[str]: List of possible completions.
"""
if not LLM_CLI_IMPORTED:
return []
for token in tokens:
if token.startswith("-"):
# Skip options (flags)
Expand Down Expand Up @@ -171,6 +207,18 @@ def __init__(self, results: Any | None = None) -> None:
# https://llm.datasette.io/en/stable/plugins/directory.html
"""

NEED_DEPENDENCIES = """
To enable LLM features you need to install litecli with AI support:

pip install 'litecli[ai]'

or install LLM libraries separately

pip install llm

This is required to use the \\llm command.
"""

_SQL_CODE_FENCE = r"```sql\n(.*?)\n```"
PROMPT = """
You are a helpful assistant who is a SQLite expert. You are embedded in a SQLite
Expand Down Expand Up @@ -230,6 +278,10 @@ def handle_llm(text: str, cur: DBCursor) -> tuple[str, str | None, float]:
is_verbose = mode is Verbosity.VERBOSE
is_succinct = mode is Verbosity.SUCCINCT

if not LLM_IMPORTED:
output = [(None, None, None, NEED_DEPENDENCIES)]
raise FinishIteration(output)

if not arg.strip(): # No question provided. Print usage and bail.
output = [(None, None, None, USAGE)]
raise FinishIteration(output)
Expand Down
29 changes: 21 additions & 8 deletions litecli/packages/special/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@

log = logging.getLogger(__name__)

try:
import llm # noqa: F401

LLM_IMPORTED = True
except ImportError:
LLM_IMPORTED = False

NO_QUERY = 0
PARSED_QUERY = 1
RAW_QUERY = 2
Expand Down Expand Up @@ -176,13 +183,19 @@ def quit(*_args: Any) -> None:
arg_type=NO_QUERY,
case_sensitive=True,
)
@special_command(
"\\llm",
"\\ai",
"Use LLM to construct a SQL query.",
arg_type=NO_QUERY,
case_sensitive=False,
aliases=(".ai", ".llm"),
)
def stub() -> None:
raise NotImplementedError


if LLM_IMPORTED:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the user never installed llm, will \llm invocation show the message, NEED_DEPENDENCIES?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes. This what they see:

/Users/amjith/History> \llm

To enable LLM features you need to install litecli with AI support:

    pip install 'litecli[ai]'

or install LLM libraries separately

   pip install llm

This is required to use the \llm command.

Time: 0.000s
/Users/amjith/History>


@special_command(
"\\llm",
"\\ai",
"Use LLM to construct a SQL query.",
arg_type=NO_QUERY,
case_sensitive=False,
aliases=(".ai", ".llm"),
)
def llm_stub() -> None:
raise NotImplementedError
30 changes: 17 additions & 13 deletions litecli/sqlexecute.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
from __future__ import annotations

import logging
import os.path
from contextlib import closing
from typing import Any, Generator, Iterable
from typing import Any, Generator, Iterable, cast
from urllib.parse import urlparse

import sqlparse

try:
import sqlean as sqlite3
from sqlean import OperationalError
import sqlean as _sqlite3

sqlite3.extensions.enable_all()
_sqlite3.extensions.enable_all()
except ImportError:
import sqlite3
from sqlite3 import OperationalError
import os.path
from urllib.parse import urlparse

import sqlparse
import sqlite3 as _sqlite3

from litecli.packages import special
from litecli.packages.special.utils import check_if_sqlitedotcommand

sqlite3 = cast(Any, _sqlite3)
OperationalError = sqlite3.OperationalError

_logger = logging.getLogger(__name__)

# FIELD_TYPES = decoders.copy()
Expand Down Expand Up @@ -179,7 +180,8 @@ def get_result(self, cursor: Any) -> tuple[str | None, list | None, list | None,

def tables(self) -> Generator[tuple[str], None, None]:
"""Yields table names"""
assert self.conn is not None
if not self.conn:
return
with closing(self.conn.cursor()) as cur:
_logger.debug("Tables Query. sql: %r", self.tables_query)
cur.execute(self.tables_query)
Expand All @@ -188,7 +190,8 @@ def tables(self) -> Generator[tuple[str], None, None]:

def table_columns(self) -> Generator[tuple[str, str], None, None]:
"""Yields column names"""
assert self.conn is not None
if not self.conn:
return
with closing(self.conn.cursor()) as cur:
_logger.debug("Columns Query. sql: %r", self.table_columns_query)
cur.execute(self.table_columns_query)
Expand All @@ -206,7 +209,8 @@ def databases(self) -> Generator[str, None, None]:

def functions(self) -> Iterable[tuple]:
"""Yields tuples of (schema_name, function_name)"""
assert self.conn is not None
if not self.conn:
return
with closing(self.conn.cursor()) as cur:
_logger.debug("Functions Query. sql: %r", self.functions_query)
cur.execute(self.functions_query % self.dbname)
Expand Down
15 changes: 9 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,7 @@ dependencies = [
"configobj>=5.0.5",
"prompt-toolkit>=3.0.3,<4.0.0",
"pygments>=1.6",
"sqlparse>=0.4.4",
"setuptools", # Required by llm commands to install models
"pip",
"llm>=0.25.0"
"sqlparse>=0.4.4"
]

[build-system]
Expand All @@ -33,7 +30,11 @@ build-backend = "setuptools.build_meta"
litecli = "litecli.main:cli"

[project.optional-dependencies]
ai = ["llm"]
ai = [
"llm>=0.25.0",
"setuptools", # Required by llm commands to install models
"pip",
]
sqlean = ["sqlean-py>=3.47.0",
"sqlean-stubs>=0.0.3"]

Expand All @@ -45,7 +46,9 @@ dev = [
"pytest-cov>=4.1.0",
"tox>=4.8.0",
"pdbpp>=0.10.3",
"llm>=0.19.0",
"llm>=0.25.0",
"setuptools",
"pip",
"ty>=0.0.4"
]

Expand Down
7 changes: 7 additions & 0 deletions tests/test_llm_special.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,16 @@

import pytest

import litecli.packages.special.llm as llm_module
from litecli.packages.special.llm import USAGE, FinishIteration, handle_llm


@pytest.fixture(autouse=True)
def enable_llm(monkeypatch):
monkeypatch.setattr(llm_module, "LLM_IMPORTED", True)
monkeypatch.setattr(llm_module, "LLM_CLI_COMMANDS", ["models"])


Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we consider testing missing llm module?

@patch("litecli.packages.special.llm.llm")
def test_llm_command_without_args(mock_llm, executor):
r"""
Expand Down
2 changes: 1 addition & 1 deletion tests/test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def stub_terminal_size():
shutil.get_terminal_size = stub_terminal_size # type: ignore[assignment]
lc = LiteCli()
assert isinstance(lc.get_reserved_space(), int)
shutil.get_terminal_size = old_func # type: ignore[assignment]
shutil.get_terminal_size = old_func


@dbtest
Expand Down
19 changes: 15 additions & 4 deletions tests/test_sqlexecute.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,26 @@
# coding=UTF-8

import os
from typing import Any

import pytest

from .utils import assert_result_equal, dbtest, is_expanded_output, run, set_expanded_output

try:
from sqlean import OperationalError, ProgrammingError
except ImportError:
from sqlite3 import OperationalError, ProgrammingError

def _load_sqlite3() -> Any:
try:
import sqlean
except ImportError:
import sqlite3

return sqlite3
return sqlean


_sqlite3 = _load_sqlite3()
OperationalError = _sqlite3.OperationalError
ProgrammingError = _sqlite3.ProgrammingError


@dbtest
Expand Down