test: bunch of tests
Written with AI, still need to check them ref: N25B-449
This commit is contained in:
@@ -58,17 +58,20 @@ async def test_llm_processing_success(mock_httpx_client, mock_settings):
|
||||
to="llm_agent",
|
||||
sender=mock_settings.agent_settings.bdi_core_name,
|
||||
body=prompt.model_dump_json(),
|
||||
thread="prompt_message", # REQUIRED: thread must match handle_message logic
|
||||
)
|
||||
|
||||
await agent.handle_message(msg)
|
||||
|
||||
# Verification
|
||||
# "Hello world." constitutes one sentence/chunk based on punctuation split
|
||||
# The agent should call send once with the full sentence
|
||||
# The agent should call send once with the full sentence, PLUS once more for full reply
|
||||
assert agent.send.called
|
||||
args = agent.send.call_args_list[0][0][0]
|
||||
assert args.to == mock_settings.agent_settings.bdi_core_name
|
||||
assert "Hello world." in args.body
|
||||
|
||||
# Check args. We expect at least one call sending "Hello world."
|
||||
calls = agent.send.call_args_list
|
||||
bodies = [c[0][0].body for c in calls]
|
||||
assert any("Hello world." in b for b in bodies)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -80,18 +83,23 @@ async def test_llm_processing_errors(mock_httpx_client, mock_settings):
|
||||
to="llm",
|
||||
sender=mock_settings.agent_settings.bdi_core_name,
|
||||
body=prompt.model_dump_json(),
|
||||
thread="prompt_message",
|
||||
)
|
||||
|
||||
# HTTP Error
|
||||
# HTTP Error: stream method RAISES exception immediately
|
||||
mock_httpx_client.stream = MagicMock(side_effect=httpx.HTTPError("Fail"))
|
||||
|
||||
await agent.handle_message(msg)
|
||||
assert "LLM service unavailable." in agent.send.call_args[0][0].body
|
||||
|
||||
# Check that error message was sent
|
||||
assert agent.send.called
|
||||
assert "LLM service unavailable." in agent.send.call_args_list[0][0][0].body
|
||||
|
||||
# General Exception
|
||||
agent.send.reset_mock()
|
||||
mock_httpx_client.stream = MagicMock(side_effect=Exception("Boom"))
|
||||
await agent.handle_message(msg)
|
||||
assert "Error processing the request." in agent.send.call_args[0][0].body
|
||||
assert "Error processing the request." in agent.send.call_args_list[0][0][0].body
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -113,16 +121,19 @@ async def test_llm_json_error(mock_httpx_client, mock_settings):
|
||||
|
||||
agent = LLMAgent("llm_agent")
|
||||
agent.send = AsyncMock()
|
||||
# Ensure logger is mocked
|
||||
agent.logger = MagicMock()
|
||||
|
||||
with patch.object(agent.logger, "error") as log:
|
||||
prompt = LLMPromptMessage(text="Hi", norms=[], goals=[])
|
||||
msg = InternalMessage(
|
||||
to="llm",
|
||||
sender=mock_settings.agent_settings.bdi_core_name,
|
||||
body=prompt.model_dump_json(),
|
||||
)
|
||||
await agent.handle_message(msg)
|
||||
log.assert_called() # Should log JSONDecodeError
|
||||
prompt = LLMPromptMessage(text="Hi", norms=[], goals=[])
|
||||
msg = InternalMessage(
|
||||
to="llm",
|
||||
sender=mock_settings.agent_settings.bdi_core_name,
|
||||
body=prompt.model_dump_json(),
|
||||
thread="prompt_message",
|
||||
)
|
||||
await agent.handle_message(msg)
|
||||
|
||||
agent.logger.error.assert_called() # Should log JSONDecodeError
|
||||
|
||||
|
||||
def test_llm_instructions():
|
||||
@@ -157,6 +168,7 @@ async def test_handle_message_validation_error_branch_no_send(mock_httpx_client,
|
||||
to="llm_agent",
|
||||
sender=mock_settings.agent_settings.bdi_core_name,
|
||||
body=invalid_json,
|
||||
thread="prompt_message",
|
||||
)
|
||||
|
||||
await agent.handle_message(msg)
|
||||
@@ -285,3 +297,28 @@ async def test_clear_history_command(mock_settings):
|
||||
)
|
||||
await agent.handle_message(msg)
|
||||
assert len(agent.history) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handle_assistant_and_user_messages(mock_settings):
|
||||
agent = LLMAgent("llm_agent")
|
||||
|
||||
# Assistant message
|
||||
msg_ast = InternalMessage(
|
||||
to="llm_agent",
|
||||
sender=mock_settings.agent_settings.bdi_core_name,
|
||||
thread="assistant_message",
|
||||
body="I said this",
|
||||
)
|
||||
await agent.handle_message(msg_ast)
|
||||
assert agent.history[-1] == {"role": "assistant", "content": "I said this"}
|
||||
|
||||
# User message
|
||||
msg_usr = InternalMessage(
|
||||
to="llm_agent",
|
||||
sender=mock_settings.agent_settings.bdi_core_name,
|
||||
thread="user_message",
|
||||
body="User said this",
|
||||
)
|
||||
await agent.handle_message(msg_usr)
|
||||
assert agent.history[-1] == {"role": "user", "content": "User said this"}
|
||||
|
||||
Reference in New Issue
Block a user