Skip to content

Commit 9975633

Browse files
committed
Add response service tests
1 parent 53ac685 commit 9975633

File tree

1 file changed

+45
-0
lines changed

1 file changed

+45
-0
lines changed
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
import pytest
2+
from unittest.mock import MagicMock, patch
3+
from agent import ResponseService
4+
5+
6+
@pytest.fixture
7+
def fake_repository():
8+
repo = MagicMock()
9+
repo.get_top_k_chunks_by_similarity.return_value = [
10+
MagicMock(content="Chunk 1"),
11+
MagicMock(content="Chunk 2"),
12+
MagicMock(content="Chunk 3"),
13+
]
14+
return repo
15+
16+
17+
@pytest.fixture
18+
def service(fake_repository):
19+
return ResponseService(repository=fake_repository)
20+
21+
22+
@patch("agent.services.response_service.create_llm_response", return_value="LLM Answer")
23+
@patch("agent.services.response_service.create_prompt", return_value="Generated Prompt")
24+
def test_ask_question(mock_create_prompt, mock_create_llm_response, service, capsys):
25+
"""Should call the LLM with the correct prompt and print the answer."""
26+
27+
service.ask_question("What is AI?", k=3)
28+
29+
service.repository.get_top_k_chunks_by_similarity.assert_called_once_with(
30+
"What is AI?", 3
31+
)
32+
mock_create_prompt.assert_called_once_with(
33+
"Chunk 1\n\nChunk 2\n\nChunk 3", "What is AI?"
34+
)
35+
mock_create_llm_response.assert_called_once_with("Generated Prompt")
36+
captured = capsys.readouterr()
37+
assert "LLM Answer" in captured.out
38+
39+
40+
def test_prepare_prompt_context(service):
41+
"""Should prepare the prompt context correctly."""
42+
43+
context = service._ResponseService__prepare_prompt_context("test query", k=3)
44+
45+
assert context == "Chunk 1\n\nChunk 2\n\nChunk 3"

0 commit comments

Comments
 (0)