Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/pylint.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: Pylint

on: [ push ]
on: [ pull_request ]

jobs:
build:
Expand Down
25 changes: 18 additions & 7 deletions tests/test_local_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,28 @@


class DummyModel:
def __call__(self, *_args, **_kwargs):
return {"choices": [{"text": "ok"}]}
"""Simple callable model stub for local LLM tests."""

def __init__(self):
self.last_kwargs = {}

def test_normalize_max_tokens():
assert local_llm._normalize_max_tokens(-1) == local_llm.LOCAL_LLM_MAX_TOKENS
assert local_llm._normalize_max_tokens("bad") == local_llm.LOCAL_LLM_MAX_TOKENS
assert local_llm._normalize_max_tokens(1_000_000) == local_llm.LOCAL_LLM_CONTEXT
def __call__(self, *_args, **kwargs):
self.last_kwargs = kwargs
return {"choices": [{"text": "ok"}]}


def test_ask_local_llm_empty_prompt(monkeypatch):
monkeypatch.setattr(local_llm, "LLM_MODEL", DummyModel())
dummy_model = DummyModel()
monkeypatch.setattr(local_llm, "LLM_MODEL", dummy_model)
result = local_llm.ask_local_llm(" ")
assert result == "[Local LLM error: Empty prompt]"


def test_ask_local_llm_normalizes_max_tokens(monkeypatch):
dummy_model = DummyModel()
monkeypatch.setattr(local_llm, "LLM_MODEL", dummy_model)

result = local_llm.ask_local_llm("build plan", max_tokens=-1)

assert result == "ok"
assert dummy_model.last_kwargs["max_tokens"] == local_llm.LOCAL_LLM_MAX_TOKENS
Loading