test: increased cb test coverage
This commit is contained in:
committed by
Luijkx,S.O.H. (Storm)
parent
de2e56ffce
commit
7f7c658901
@@ -1,4 +1,6 @@
|
|||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
from unittest.mock import AsyncMock, MagicMock, mock_open, patch
|
from unittest.mock import AsyncMock, MagicMock, mock_open, patch
|
||||||
|
|
||||||
import agentspeak
|
import agentspeak
|
||||||
@@ -77,11 +79,6 @@ async def test_incorrect_belief_collector_message(agent, mock_settings):
|
|||||||
agent.bdi_agent.call.assert_not_called() # did not set belief
|
agent.bdi_agent.call.assert_not_called() # did not set belief
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_handle_llm_response(agent):
|
async def test_handle_llm_response(agent):
|
||||||
"""Test that LLM responses are forwarded to the Robot Speech Agent"""
|
"""Test that LLM responses are forwarded to the Robot Speech Agent"""
|
||||||
@@ -124,3 +121,148 @@ async def test_custom_actions(agent):
|
|||||||
next(gen) # Execute
|
next(gen) # Execute
|
||||||
|
|
||||||
agent._send_to_llm.assert_called_with("Hello", "Norm", "Goal")
|
agent._send_to_llm.assert_called_with("Hello", "Norm", "Goal")
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_belief_sets_event(agent):
|
||||||
|
"""Test that a belief triggers wake event and call()"""
|
||||||
|
agent._wake_bdi_loop = MagicMock()
|
||||||
|
|
||||||
|
belief = Belief(name="test_belief", arguments=["a", "b"])
|
||||||
|
agent._apply_beliefs([belief])
|
||||||
|
|
||||||
|
assert agent.bdi_agent.call.called
|
||||||
|
agent._wake_bdi_loop.set.assert_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_apply_beliefs_empty_returns(agent):
|
||||||
|
"""Line: if not beliefs: return"""
|
||||||
|
agent._wake_bdi_loop = MagicMock()
|
||||||
|
agent._apply_beliefs([])
|
||||||
|
agent.bdi_agent.call.assert_not_called()
|
||||||
|
agent._wake_bdi_loop.set.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_belief_success_wakes_loop(agent):
|
||||||
|
"""Line: if result: wake set"""
|
||||||
|
agent._wake_bdi_loop = MagicMock()
|
||||||
|
agent.bdi_agent.call.return_value = True
|
||||||
|
|
||||||
|
agent._remove_belief("remove_me", ["x"])
|
||||||
|
|
||||||
|
assert agent.bdi_agent.call.called
|
||||||
|
trigger, goaltype, literal, *_ = agent.bdi_agent.call.call_args.args
|
||||||
|
|
||||||
|
assert trigger == agentspeak.Trigger.removal
|
||||||
|
assert goaltype == agentspeak.GoalType.belief
|
||||||
|
assert literal.functor == "remove_me"
|
||||||
|
assert literal.args[0].functor == "x"
|
||||||
|
|
||||||
|
agent._wake_bdi_loop.set.assert_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_belief_failure_does_not_wake(agent):
|
||||||
|
"""Line: else result is False"""
|
||||||
|
agent._wake_bdi_loop = MagicMock()
|
||||||
|
agent.bdi_agent.call.return_value = False
|
||||||
|
|
||||||
|
agent._remove_belief("not_there", ["y"])
|
||||||
|
|
||||||
|
assert agent.bdi_agent.call.called # removal was attempted
|
||||||
|
agent._wake_bdi_loop.set.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_all_with_name_wakes_loop(agent):
|
||||||
|
"""Cover _remove_all_with_name() removed counter + wake"""
|
||||||
|
agent._wake_bdi_loop = MagicMock()
|
||||||
|
|
||||||
|
fake_literal = agentspeak.Literal("delete_me", (agentspeak.Literal("arg1"),))
|
||||||
|
fake_key = ("delete_me", 1)
|
||||||
|
agent.bdi_agent.beliefs = {fake_key: {fake_literal}}
|
||||||
|
|
||||||
|
agent._remove_all_with_name("delete_me")
|
||||||
|
|
||||||
|
assert agent.bdi_agent.call.called
|
||||||
|
agent._wake_bdi_loop.set.assert_called()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_bdi_step_true_branch_hits_line_67(agent):
|
||||||
|
"""Force step() to return True once so line 67 is actually executed"""
|
||||||
|
# counter that isn't tied to MagicMock.call_count ordering
|
||||||
|
counter = {"i": 0}
|
||||||
|
|
||||||
|
def fake_step():
|
||||||
|
counter["i"] += 1
|
||||||
|
return counter["i"] == 1 # True only first time
|
||||||
|
|
||||||
|
# Important: wrap fake_step into another mock so `.called` still exists
|
||||||
|
agent.bdi_agent.step = MagicMock(side_effect=fake_step)
|
||||||
|
agent.bdi_agent.shortest_deadline = MagicMock(return_value=None)
|
||||||
|
|
||||||
|
agent._running = True
|
||||||
|
agent._wake_bdi_loop = asyncio.Event()
|
||||||
|
agent._wake_bdi_loop.set()
|
||||||
|
|
||||||
|
task = asyncio.create_task(agent._bdi_loop())
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
task.cancel()
|
||||||
|
try:
|
||||||
|
await task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert agent.bdi_agent.step.called
|
||||||
|
assert counter["i"] >= 1 # proves True branch ran
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_belief_calls_remove_all(agent):
|
||||||
|
"""Cover: if belief.replace: self._remove_all_with_name()"""
|
||||||
|
agent._remove_all_with_name = MagicMock()
|
||||||
|
agent._wake_bdi_loop = MagicMock()
|
||||||
|
|
||||||
|
belief = Belief(name="user_said", arguments=["Hello"], replace=True)
|
||||||
|
agent._apply_beliefs([belief])
|
||||||
|
|
||||||
|
agent._remove_all_with_name.assert_called_with("user_said")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_to_llm_creates_prompt_and_sends(agent):
|
||||||
|
"""Cover entire _send_to_llm() including message send and logger.info"""
|
||||||
|
agent.bdi_agent = MagicMock() # ensure mocked BDI does not interfere
|
||||||
|
agent._wake_bdi_loop = MagicMock()
|
||||||
|
|
||||||
|
await agent._send_to_llm("hello world", "n1\nn2", "g1")
|
||||||
|
|
||||||
|
# send() was called
|
||||||
|
assert agent.send.called
|
||||||
|
sent_msg: InternalMessage = agent.send.call_args.args[0]
|
||||||
|
|
||||||
|
# Message routing values correct
|
||||||
|
assert sent_msg.to == settings.agent_settings.llm_name
|
||||||
|
assert "hello world" in sent_msg.body
|
||||||
|
|
||||||
|
# JSON contains split norms/goals
|
||||||
|
body = json.loads(sent_msg.body)
|
||||||
|
assert body["norms"] == ["n1", "n2"]
|
||||||
|
assert body["goals"] == ["g1"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_deadline_sleep_branch(agent):
|
||||||
|
"""Specifically assert the if deadline: sleep → maybe_more_work=True branch"""
|
||||||
|
future_deadline = time.time() + 0.005
|
||||||
|
agent.bdi_agent.step.return_value = False
|
||||||
|
agent.bdi_agent.shortest_deadline.return_value = future_deadline
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
agent._running = True
|
||||||
|
agent._wake_bdi_loop = asyncio.Event()
|
||||||
|
agent._wake_bdi_loop.set()
|
||||||
|
|
||||||
|
task = asyncio.create_task(agent._bdi_loop())
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
task.cancel()
|
||||||
|
|
||||||
|
duration = time.time() - start_time
|
||||||
|
assert duration >= 0.004 # loop slept until deadline
|
||||||
|
|||||||
77
test/unit/agents/bdi/test_bdi_program_manager.py
Normal file
77
test/unit/agents/bdi/test_bdi_program_manager.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from control_backend.agents.bdi.bdi_program_manager import BDIProgramManager
|
||||||
|
from control_backend.core.agent_system import InternalMessage
|
||||||
|
from control_backend.schemas.belief_message import BeliefMessage
|
||||||
|
from control_backend.schemas.program import Program
|
||||||
|
|
||||||
|
# Fix Windows Proactor loop for zmq
|
||||||
|
if sys.platform.startswith("win"):
|
||||||
|
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
||||||
|
|
||||||
|
|
||||||
|
def make_valid_program_json(norm="N1", goal="G1"):
|
||||||
|
return json.dumps(
|
||||||
|
{
|
||||||
|
"phases": [
|
||||||
|
{
|
||||||
|
"id": "phase1",
|
||||||
|
"label": "Phase 1",
|
||||||
|
"triggers": [],
|
||||||
|
"norms": [{"id": "n1", "label": "Norm 1", "norm": norm}],
|
||||||
|
"goals": [
|
||||||
|
{"id": "g1", "label": "Goal 1", "description": goal, "achieved": False}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_to_bdi():
|
||||||
|
manager = BDIProgramManager(name="program_manager_test")
|
||||||
|
manager.send = AsyncMock()
|
||||||
|
|
||||||
|
program = Program.model_validate_json(make_valid_program_json())
|
||||||
|
await manager._send_to_bdi(program)
|
||||||
|
|
||||||
|
assert manager.send.await_count == 1
|
||||||
|
msg: InternalMessage = manager.send.await_args[0][0]
|
||||||
|
assert msg.thread == "beliefs"
|
||||||
|
|
||||||
|
beliefs = BeliefMessage.model_validate_json(msg.body)
|
||||||
|
names = {b.name: b.arguments for b in beliefs.beliefs}
|
||||||
|
|
||||||
|
assert "norms" in names and names["norms"] == ["N1"]
|
||||||
|
assert "goals" in names and names["goals"] == ["G1"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_receive_programs_valid_and_invalid():
|
||||||
|
sub = AsyncMock()
|
||||||
|
sub.recv_multipart.side_effect = [
|
||||||
|
(b"program", b"{bad json"),
|
||||||
|
(b"program", make_valid_program_json().encode()),
|
||||||
|
]
|
||||||
|
|
||||||
|
manager = BDIProgramManager(name="program_manager_test")
|
||||||
|
manager.sub_socket = sub
|
||||||
|
manager._send_to_bdi = AsyncMock()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Will give StopAsyncIteration when the predefined `sub.recv_multipart` side-effects run out
|
||||||
|
await manager._receive_programs()
|
||||||
|
except StopAsyncIteration:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Only valid Program should have triggered _send_to_bdi
|
||||||
|
assert manager._send_to_bdi.await_count == 1
|
||||||
|
forwarded: Program = manager._send_to_bdi.await_args[0][0]
|
||||||
|
assert forwarded.phases[0].norms[0].norm == "N1"
|
||||||
|
assert forwarded.phases[0].goals[0].description == "G1"
|
||||||
@@ -87,3 +87,49 @@ async def test_send_beliefs_to_bdi(agent):
|
|||||||
assert sent.to == settings.agent_settings.bdi_core_name
|
assert sent.to == settings.agent_settings.bdi_core_name
|
||||||
assert sent.thread == "beliefs"
|
assert sent.thread == "beliefs"
|
||||||
assert json.loads(sent.body)["beliefs"] == [belief.model_dump() for belief in beliefs]
|
assert json.loads(sent.body)["beliefs"] == [belief.model_dump() for belief in beliefs]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_setup_executes(agent):
|
||||||
|
"""Covers setup and asserts the agent has a name."""
|
||||||
|
await agent.setup()
|
||||||
|
assert agent.name == "belief_collector_agent" # simple property assertion
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_handle_message_unrecognized_type_executes(agent):
|
||||||
|
"""Covers the else branch for unrecognized message type."""
|
||||||
|
payload = {"type": "unknown_type"}
|
||||||
|
msg = make_msg(payload, sender="tester")
|
||||||
|
# Wrap send to ensure nothing is sent
|
||||||
|
agent.send = AsyncMock()
|
||||||
|
await agent.handle_message(msg)
|
||||||
|
# Assert no messages were sent
|
||||||
|
agent.send.assert_not_awaited()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_handle_emo_text_executes(agent):
|
||||||
|
"""Covers the _handle_emo_text method."""
|
||||||
|
# The method does nothing, but we can assert it returns None
|
||||||
|
result = await agent._handle_emo_text({}, "origin")
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_beliefs_to_bdi_empty_executes(agent):
|
||||||
|
"""Covers early return when beliefs are empty."""
|
||||||
|
agent.send = AsyncMock()
|
||||||
|
await agent._send_beliefs_to_bdi({})
|
||||||
|
# Assert that nothing was sent
|
||||||
|
agent.send.assert_not_awaited()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_handle_belief_text_invalid_returns_none(agent, mocker):
|
||||||
|
payload = {"type": "belief_extraction_text", "beliefs": {"user_said": "invalid-argument"}}
|
||||||
|
|
||||||
|
result = await agent._handle_belief_text(payload, "origin")
|
||||||
|
|
||||||
|
# The method itself returns None
|
||||||
|
assert result is None
|
||||||
|
|||||||
@@ -56,3 +56,10 @@ async def test_process_transcription_demo(agent, mock_settings):
|
|||||||
assert sent.thread == "beliefs"
|
assert sent.thread == "beliefs"
|
||||||
parsed = json.loads(sent.body)
|
parsed = json.loads(sent.body)
|
||||||
assert parsed["beliefs"]["user_said"] == [transcription]
|
assert parsed["beliefs"]["user_said"] == [transcription]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_setup_initializes_beliefs(agent):
|
||||||
|
"""Covers the setup method and ensures beliefs are initialized."""
|
||||||
|
await agent.setup()
|
||||||
|
assert agent.beliefs == {"mood": ["X"], "car": ["Y"]}
|
||||||
|
|||||||
@@ -334,3 +334,13 @@ async def test_listen_loop_ping_sends_internal(zmq_context):
|
|||||||
await agent._listen_loop()
|
await agent._listen_loop()
|
||||||
|
|
||||||
pub_socket.send_multipart.assert_awaited()
|
pub_socket.send_multipart.assert_awaited()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_negotiate_req_socket_none_causes_retry(zmq_context):
|
||||||
|
agent = RICommunicationAgent("ri_comm")
|
||||||
|
agent._req_socket = None
|
||||||
|
|
||||||
|
result = await agent._negotiate_connection(max_retries=1)
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|||||||
@@ -134,3 +134,128 @@ def test_llm_instructions():
|
|||||||
text_def = instr_def.build_developer_instruction()
|
text_def = instr_def.build_developer_instruction()
|
||||||
assert "Norms to follow" in text_def
|
assert "Norms to follow" in text_def
|
||||||
assert "Goals to reach" in text_def
|
assert "Goals to reach" in text_def
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_handle_message_validation_error_branch_no_send(mock_httpx_client, mock_settings):
|
||||||
|
"""
|
||||||
|
Covers the ValidationError branch:
|
||||||
|
except ValidationError:
|
||||||
|
self.logger.debug("Prompt message from BDI core is invalid.")
|
||||||
|
Assert: no message is sent.
|
||||||
|
"""
|
||||||
|
agent = LLMAgent("llm_agent")
|
||||||
|
agent.send = AsyncMock()
|
||||||
|
|
||||||
|
# Invalid JSON that triggers ValidationError in LLMPromptMessage
|
||||||
|
invalid_json = '{"text": "Hi", "wrong_field": 123}' # field not in schema
|
||||||
|
|
||||||
|
msg = InternalMessage(
|
||||||
|
to="llm_agent",
|
||||||
|
sender=mock_settings.agent_settings.bdi_core_name,
|
||||||
|
body=invalid_json,
|
||||||
|
)
|
||||||
|
|
||||||
|
await agent.handle_message(msg)
|
||||||
|
|
||||||
|
# Should not send any reply
|
||||||
|
agent.send.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_handle_message_ignored_sender_branch_no_send(mock_httpx_client, mock_settings):
|
||||||
|
"""
|
||||||
|
Covers the else branch for messages not from BDI core:
|
||||||
|
else:
|
||||||
|
self.logger.debug("Message ignored (not from BDI core.")
|
||||||
|
Assert: no message is sent.
|
||||||
|
"""
|
||||||
|
agent = LLMAgent("llm_agent")
|
||||||
|
agent.send = AsyncMock()
|
||||||
|
|
||||||
|
msg = InternalMessage(
|
||||||
|
to="llm_agent",
|
||||||
|
sender="some_other_agent", # Not BDI core
|
||||||
|
body='{"text": "Hi"}',
|
||||||
|
)
|
||||||
|
|
||||||
|
await agent.handle_message(msg)
|
||||||
|
|
||||||
|
# Should not send any reply
|
||||||
|
agent.send.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_query_llm_yields_final_tail_chunk(mock_settings):
|
||||||
|
"""
|
||||||
|
Covers the branch: if current_chunk: yield current_chunk
|
||||||
|
Ensure that the last partial chunk is emitted.
|
||||||
|
"""
|
||||||
|
agent = LLMAgent("llm_agent")
|
||||||
|
agent.send = AsyncMock()
|
||||||
|
|
||||||
|
# Patch _stream_query_llm to yield tokens that do NOT end with punctuation
|
||||||
|
async def fake_stream(messages):
|
||||||
|
yield "Hello"
|
||||||
|
yield " world" # No punctuation to trigger the normal chunking
|
||||||
|
|
||||||
|
agent._stream_query_llm = fake_stream
|
||||||
|
|
||||||
|
prompt = LLMPromptMessage(text="Hi", norms=[], goals=[])
|
||||||
|
|
||||||
|
# Collect chunks yielded
|
||||||
|
chunks = []
|
||||||
|
async for chunk in agent._query_llm(prompt.text, prompt.norms, prompt.goals):
|
||||||
|
chunks.append(chunk)
|
||||||
|
|
||||||
|
# The final chunk should be yielded
|
||||||
|
assert chunks[-1] == "Hello world"
|
||||||
|
assert any("Hello" in c for c in chunks)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_stream_query_llm_skips_non_data_lines(mock_httpx_client, mock_settings):
|
||||||
|
"""
|
||||||
|
Covers: if not line or not line.startswith("data: "): continue
|
||||||
|
Feed lines that are empty or do not start with 'data:' and check they are skipped.
|
||||||
|
"""
|
||||||
|
# Mock response
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
"", # empty line
|
||||||
|
"not data", # invalid prefix
|
||||||
|
'data: {"choices": [{"delta": {"content": "Hi"}}]}',
|
||||||
|
"data: [DONE]",
|
||||||
|
]
|
||||||
|
|
||||||
|
async def aiter_lines_gen():
|
||||||
|
for line in lines:
|
||||||
|
yield line
|
||||||
|
|
||||||
|
mock_response.aiter_lines.side_effect = aiter_lines_gen
|
||||||
|
|
||||||
|
# Proper async context manager for stream
|
||||||
|
mock_stream_context = MagicMock()
|
||||||
|
mock_stream_context.__aenter__ = AsyncMock(return_value=mock_response)
|
||||||
|
mock_stream_context.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
|
||||||
|
# Make stream return the async context manager
|
||||||
|
mock_httpx_client.stream = MagicMock(return_value=mock_stream_context)
|
||||||
|
|
||||||
|
agent = LLMAgent("llm_agent")
|
||||||
|
agent.send = AsyncMock()
|
||||||
|
|
||||||
|
# Patch settings for local LLM URL
|
||||||
|
with patch("control_backend.agents.llm.llm_agent.settings") as mock_sett:
|
||||||
|
mock_sett.llm_settings.local_llm_url = "http://localhost"
|
||||||
|
mock_sett.llm_settings.local_llm_model = "test-model"
|
||||||
|
|
||||||
|
# Collect tokens
|
||||||
|
tokens = []
|
||||||
|
async for token in agent._stream_query_llm([]):
|
||||||
|
tokens.append(token)
|
||||||
|
|
||||||
|
# Only the valid 'data:' line should yield content
|
||||||
|
assert tokens == ["Hi"]
|
||||||
|
|||||||
@@ -120,3 +120,83 @@ def test_mlx_recognizer():
|
|||||||
mlx_mock.transcribe.return_value = {"text": "Hi"}
|
mlx_mock.transcribe.return_value = {"text": "Hi"}
|
||||||
res = rec.recognize_speech(np.zeros(10))
|
res = rec.recognize_speech(np.zeros(10))
|
||||||
assert res == "Hi"
|
assert res == "Hi"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_transcription_loop_continues_after_error(mock_zmq_context):
|
||||||
|
mock_sub = MagicMock()
|
||||||
|
mock_sub.recv = AsyncMock()
|
||||||
|
mock_zmq_context.instance.return_value.socket.return_value = mock_sub
|
||||||
|
|
||||||
|
fake_audio = np.zeros(16000, dtype=np.float32).tobytes()
|
||||||
|
|
||||||
|
mock_sub.recv.side_effect = [
|
||||||
|
fake_audio, # first iteration → recognizer fails
|
||||||
|
asyncio.CancelledError(), # second iteration → stop loop
|
||||||
|
]
|
||||||
|
|
||||||
|
with patch.object(SpeechRecognizer, "best_type") as mock_best:
|
||||||
|
mock_recognizer = MagicMock()
|
||||||
|
mock_recognizer.recognize_speech.side_effect = RuntimeError("fail")
|
||||||
|
mock_best.return_value = mock_recognizer
|
||||||
|
|
||||||
|
agent = TranscriptionAgent("tcp://in")
|
||||||
|
agent._running = True # ← REQUIRED to enter the loop
|
||||||
|
agent.send = AsyncMock() # should never be called
|
||||||
|
agent.add_behavior = AsyncMock() # match other tests
|
||||||
|
|
||||||
|
await agent.setup()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await agent._transcribing_loop()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# recognizer failed, so we should never send anything
|
||||||
|
agent.send.assert_not_called()
|
||||||
|
|
||||||
|
# recv must have been called twice (audio then CancelledError)
|
||||||
|
assert mock_sub.recv.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_transcription_continue_branch_when_empty(mock_zmq_context):
|
||||||
|
mock_sub = MagicMock()
|
||||||
|
mock_sub.recv = AsyncMock()
|
||||||
|
mock_zmq_context.instance.return_value.socket.return_value = mock_sub
|
||||||
|
|
||||||
|
# First recv → audio chunk
|
||||||
|
# Second recv → Cancel loop → stop iteration
|
||||||
|
fake_audio = np.zeros(16000, dtype=np.float32).tobytes()
|
||||||
|
mock_sub.recv.side_effect = [fake_audio, asyncio.CancelledError()]
|
||||||
|
|
||||||
|
with patch.object(SpeechRecognizer, "best_type") as mock_best:
|
||||||
|
mock_recognizer = MagicMock()
|
||||||
|
mock_recognizer.recognize_speech.return_value = "" # <— triggers the continue branch
|
||||||
|
mock_best.return_value = mock_recognizer
|
||||||
|
|
||||||
|
agent = TranscriptionAgent("tcp://in")
|
||||||
|
|
||||||
|
# Make loop runnable
|
||||||
|
agent._running = True
|
||||||
|
agent.send = AsyncMock()
|
||||||
|
agent.add_behavior = AsyncMock()
|
||||||
|
|
||||||
|
await agent.setup()
|
||||||
|
|
||||||
|
# Execute loop manually
|
||||||
|
try:
|
||||||
|
await agent._transcribing_loop()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# → Because of "continue", NO sending should occur
|
||||||
|
agent.send.assert_not_called()
|
||||||
|
|
||||||
|
# → Continue was hit, so we must have read exactly 2 times:
|
||||||
|
# - first audio
|
||||||
|
# - second CancelledError
|
||||||
|
assert mock_sub.recv.call_count == 2
|
||||||
|
|
||||||
|
# → recognizer was called once (first iteration)
|
||||||
|
assert mock_recognizer.recognize_speech.call_count == 1
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
from unittest.mock import AsyncMock, MagicMock
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pytest
|
import pytest
|
||||||
|
import zmq
|
||||||
|
|
||||||
from control_backend.agents.perception.vad_agent import VADAgent
|
from control_backend.agents.perception.vad_agent import VADAgent
|
||||||
|
|
||||||
@@ -123,3 +124,44 @@ async def test_no_data(audio_out_socket, vad_agent):
|
|||||||
|
|
||||||
audio_out_socket.send.assert_not_called()
|
audio_out_socket.send.assert_not_called()
|
||||||
assert len(vad_agent.audio_buffer) == 0
|
assert len(vad_agent.audio_buffer) == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_vad_model_load_failure_stops_agent(vad_agent):
|
||||||
|
"""
|
||||||
|
Test that if loading the VAD model raises an Exception, it is caught,
|
||||||
|
the agent logs an exception, stops itself, and setup returns.
|
||||||
|
"""
|
||||||
|
# Patch torch.hub.load to raise an exception
|
||||||
|
with patch(
|
||||||
|
"control_backend.agents.perception.vad_agent.torch.hub.load",
|
||||||
|
side_effect=Exception("model fail"),
|
||||||
|
):
|
||||||
|
# Patch stop to an AsyncMock so we can check it was awaited
|
||||||
|
vad_agent.stop = AsyncMock()
|
||||||
|
|
||||||
|
result = await vad_agent.setup()
|
||||||
|
|
||||||
|
# Assert stop was called
|
||||||
|
vad_agent.stop.assert_awaited_once()
|
||||||
|
# Assert setup returned None
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_audio_out_bind_failure_sets_none_and_logs(vad_agent, caplog):
|
||||||
|
"""
|
||||||
|
Test that if binding the output socket raises ZMQBindError,
|
||||||
|
audio_out_socket is set to None, None is returned, and an error is logged.
|
||||||
|
"""
|
||||||
|
mock_socket = MagicMock()
|
||||||
|
mock_socket.bind_to_random_port.side_effect = zmq.ZMQBindError()
|
||||||
|
with patch("control_backend.agents.perception.vad_agent.azmq.Context.instance") as mock_ctx:
|
||||||
|
mock_ctx.return_value.socket.return_value = mock_socket
|
||||||
|
|
||||||
|
with caplog.at_level("ERROR"):
|
||||||
|
port = vad_agent._connect_audio_out_socket()
|
||||||
|
|
||||||
|
assert port is None
|
||||||
|
assert vad_agent.audio_out_socket is None
|
||||||
|
assert caplog.text is not None
|
||||||
|
|||||||
63
test/unit/api/v1/endpoints/test_logs_endpoint.py
Normal file
63
test/unit/api/v1/endpoints/test_logs_endpoint.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from starlette.responses import StreamingResponse
|
||||||
|
|
||||||
|
from control_backend.api.v1.endpoints import logs
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client():
|
||||||
|
"""TestClient with logs router included."""
|
||||||
|
app = FastAPI()
|
||||||
|
app.include_router(logs.router)
|
||||||
|
return TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_log_stream_endpoint_lines(client):
|
||||||
|
"""Call /logs/stream with a mocked ZMQ socket to cover all lines."""
|
||||||
|
|
||||||
|
# Dummy socket to mock ZMQ behavior
|
||||||
|
class DummySocket:
|
||||||
|
def __init__(self):
|
||||||
|
self.subscribed = []
|
||||||
|
self.connected = False
|
||||||
|
self.recv_count = 0
|
||||||
|
|
||||||
|
def subscribe(self, topic):
|
||||||
|
self.subscribed.append(topic)
|
||||||
|
|
||||||
|
def connect(self, addr):
|
||||||
|
self.connected = True
|
||||||
|
|
||||||
|
async def recv_multipart(self):
|
||||||
|
# Return one message, then stop generator
|
||||||
|
if self.recv_count == 0:
|
||||||
|
self.recv_count += 1
|
||||||
|
return (b"INFO", b"test message")
|
||||||
|
else:
|
||||||
|
raise StopAsyncIteration
|
||||||
|
|
||||||
|
dummy_socket = DummySocket()
|
||||||
|
|
||||||
|
# Patch Context.instance().socket() to return dummy socket
|
||||||
|
with patch("control_backend.api.v1.endpoints.logs.Context.instance") as mock_context:
|
||||||
|
mock_context.return_value.socket.return_value = dummy_socket
|
||||||
|
|
||||||
|
# Call the endpoint directly
|
||||||
|
response = await logs.log_stream()
|
||||||
|
assert isinstance(response, StreamingResponse)
|
||||||
|
|
||||||
|
# Fetch one chunk from the generator
|
||||||
|
gen = response.body_iterator
|
||||||
|
chunk = await gen.__anext__()
|
||||||
|
if isinstance(chunk, bytes):
|
||||||
|
chunk = chunk.decode("utf-8")
|
||||||
|
assert "data:" in chunk
|
||||||
|
|
||||||
|
# Optional: assert subscribe/connect were called
|
||||||
|
assert dummy_socket.subscribed # at least some log levels subscribed
|
||||||
|
assert dummy_socket.connected # connect was called
|
||||||
45
test/unit/api/v1/endpoints/test_message_endpoint.py
Normal file
45
test/unit/api/v1/endpoints/test_message_endpoint.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from control_backend.api.v1.endpoints import message
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client():
|
||||||
|
"""FastAPI TestClient for the message router."""
|
||||||
|
from fastapi import FastAPI
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
app.include_router(message.router)
|
||||||
|
return TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
def test_receive_message_post(client, monkeypatch):
|
||||||
|
"""Test POST /message endpoint sends message to pub socket."""
|
||||||
|
|
||||||
|
# Dummy pub socket to capture sent messages
|
||||||
|
class DummyPubSocket:
|
||||||
|
def __init__(self):
|
||||||
|
self.sent = []
|
||||||
|
|
||||||
|
async def send_multipart(self, msg):
|
||||||
|
self.sent.append(msg)
|
||||||
|
|
||||||
|
dummy_socket = DummyPubSocket()
|
||||||
|
|
||||||
|
# Patch app.state.endpoints_pub_socket
|
||||||
|
client.app.state.endpoints_pub_socket = dummy_socket
|
||||||
|
|
||||||
|
data = {"message": "Hello world"}
|
||||||
|
response = client.post("/message", json=data)
|
||||||
|
|
||||||
|
assert response.status_code == 202
|
||||||
|
assert response.json() == {"status": "Message received"}
|
||||||
|
|
||||||
|
# Ensure the message was sent via pub_socket
|
||||||
|
assert len(dummy_socket.sent) == 1
|
||||||
|
topic, body = dummy_socket.sent[0]
|
||||||
|
parsed = json.loads(body.decode("utf-8"))
|
||||||
|
assert parsed["message"] == "Hello world"
|
||||||
16
test/unit/api/v1/endpoints/test_router.py
Normal file
16
test/unit/api/v1/endpoints/test_router.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from fastapi.routing import APIRoute
|
||||||
|
|
||||||
|
from control_backend.api.v1.router import api_router # <--- corrected import
|
||||||
|
|
||||||
|
|
||||||
|
def test_router_includes_expected_paths():
|
||||||
|
"""Ensure api_router includes main router prefixes."""
|
||||||
|
routes = [r for r in api_router.routes if isinstance(r, APIRoute)]
|
||||||
|
paths = [r.path for r in routes]
|
||||||
|
|
||||||
|
# Ensure at least one route under each prefix exists
|
||||||
|
assert any(p.startswith("/robot") for p in paths)
|
||||||
|
assert any(p.startswith("/message") for p in paths)
|
||||||
|
assert any(p.startswith("/sse") for p in paths)
|
||||||
|
assert any(p.startswith("/logs") for p in paths)
|
||||||
|
assert any(p.startswith("/program") for p in paths)
|
||||||
24
test/unit/api/v1/endpoints/test_sse_endpoint.py
Normal file
24
test/unit/api/v1/endpoints/test_sse_endpoint.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from control_backend.api.v1.endpoints import sse
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def app():
|
||||||
|
app = FastAPI()
|
||||||
|
app.include_router(sse.router)
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client(app):
|
||||||
|
return TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
def test_sse_route_exists(client):
|
||||||
|
"""Minimal smoke test to ensure /sse route exists and responds."""
|
||||||
|
response = client.get("/sse")
|
||||||
|
# Since implementation is not done, we only assert it doesn't crash
|
||||||
|
assert response.status_code == 200
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from unittest.mock import AsyncMock
|
from unittest.mock import AsyncMock, MagicMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -70,3 +70,142 @@ async def test_get_agent():
|
|||||||
agent = ConcreteTestAgent("registrant")
|
agent = ConcreteTestAgent("registrant")
|
||||||
assert AgentDirectory.get("registrant") == agent
|
assert AgentDirectory.get("registrant") == agent
|
||||||
assert AgentDirectory.get("non_existent") is None
|
assert AgentDirectory.get("non_existent") is None
|
||||||
|
|
||||||
|
|
||||||
|
class DummyAgent(BaseAgent):
|
||||||
|
async def setup(self):
|
||||||
|
pass # we will test this separately
|
||||||
|
|
||||||
|
async def handle_message(self, msg: InternalMessage):
|
||||||
|
self.last_handled = msg
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_base_agent_setup_is_noop():
|
||||||
|
agent = DummyAgent("dummy")
|
||||||
|
|
||||||
|
# Should simply return without error
|
||||||
|
assert await agent.setup() is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_to_local_agent(monkeypatch):
|
||||||
|
sender = DummyAgent("sender")
|
||||||
|
target = DummyAgent("receiver")
|
||||||
|
|
||||||
|
# Fake logger
|
||||||
|
sender.logger = MagicMock()
|
||||||
|
|
||||||
|
# Patch inbox.put
|
||||||
|
target.inbox.put = AsyncMock()
|
||||||
|
|
||||||
|
message = InternalMessage(to="receiver", sender="sender", body="hello")
|
||||||
|
|
||||||
|
await sender.send(message)
|
||||||
|
|
||||||
|
target.inbox.put.assert_awaited_once_with(message)
|
||||||
|
sender.logger.debug.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_process_inbox_calls_handle_message(monkeypatch):
|
||||||
|
agent = DummyAgent("dummy")
|
||||||
|
agent.logger = MagicMock()
|
||||||
|
|
||||||
|
# Make agent running so loop triggers
|
||||||
|
agent._running = True
|
||||||
|
|
||||||
|
# Prepare inbox to give one message then stop
|
||||||
|
msg = InternalMessage(to="dummy", sender="x", body="test")
|
||||||
|
|
||||||
|
async def get_once():
|
||||||
|
agent._running = False # stop after first iteration
|
||||||
|
return msg
|
||||||
|
|
||||||
|
agent.inbox.get = AsyncMock(side_effect=get_once)
|
||||||
|
agent.handle_message = AsyncMock()
|
||||||
|
|
||||||
|
await agent._process_inbox()
|
||||||
|
|
||||||
|
agent.handle_message.assert_awaited_once_with(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_receive_internal_zmq_loop_success(monkeypatch):
|
||||||
|
agent = DummyAgent("dummy")
|
||||||
|
agent.logger = MagicMock()
|
||||||
|
agent._running = True
|
||||||
|
|
||||||
|
mock_socket = MagicMock()
|
||||||
|
mock_socket.recv_multipart = AsyncMock(
|
||||||
|
side_effect=[
|
||||||
|
(
|
||||||
|
b"topic",
|
||||||
|
InternalMessage(to="dummy", sender="x", body="hi").model_dump_json().encode(),
|
||||||
|
),
|
||||||
|
asyncio.CancelledError(), # stop loop
|
||||||
|
]
|
||||||
|
)
|
||||||
|
agent._internal_sub_socket = mock_socket
|
||||||
|
|
||||||
|
agent.inbox.put = AsyncMock()
|
||||||
|
|
||||||
|
await agent._receive_internal_zmq_loop()
|
||||||
|
|
||||||
|
agent.inbox.put.assert_awaited() # message forwarded
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_receive_internal_zmq_loop_exception_logs_error():
|
||||||
|
agent = DummyAgent("dummy")
|
||||||
|
agent.logger = MagicMock()
|
||||||
|
agent._running = True
|
||||||
|
|
||||||
|
mock_socket = MagicMock()
|
||||||
|
mock_socket.recv_multipart = AsyncMock(
|
||||||
|
side_effect=[Exception("boom"), asyncio.CancelledError()]
|
||||||
|
)
|
||||||
|
agent._internal_sub_socket = mock_socket
|
||||||
|
|
||||||
|
agent.inbox.put = AsyncMock()
|
||||||
|
|
||||||
|
await agent._receive_internal_zmq_loop()
|
||||||
|
|
||||||
|
agent.logger.exception.assert_called_once()
|
||||||
|
assert "Could not process ZMQ message." in agent.logger.exception.call_args[0][0]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_base_agent_handle_message_not_implemented():
|
||||||
|
class RawAgent(BaseAgent):
|
||||||
|
async def setup(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
agent = RawAgent("raw")
|
||||||
|
|
||||||
|
msg = InternalMessage(to="raw", sender="x", body="hi")
|
||||||
|
|
||||||
|
with pytest.raises(NotImplementedError):
|
||||||
|
await BaseAgent.handle_message(agent, msg)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_base_agent_setup_abstract_method_body_executes():
|
||||||
|
"""
|
||||||
|
Covers the 'pass' inside BaseAgent.setup().
|
||||||
|
Since BaseAgent is abstract, we do NOT instantiate it.
|
||||||
|
We call the coroutine function directly on BaseAgent and pass a dummy self.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Dummy:
|
||||||
|
"""Minimal stub to act as 'self'."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
stub = Dummy()
|
||||||
|
|
||||||
|
# Call BaseAgent.setup() as an unbound coroutine, passing stub as 'self'
|
||||||
|
result = await BaseAgent.setup(stub)
|
||||||
|
|
||||||
|
# The method contains only 'pass', so it returns None
|
||||||
|
assert result is None
|
||||||
|
|||||||
@@ -86,3 +86,34 @@ def test_setup_logging_zmq_handler(mock_zmq_context):
|
|||||||
|
|
||||||
args = mock_dict_config.call_args[0][0]
|
args = mock_dict_config.call_args[0][0]
|
||||||
assert "interface_or_socket" in args["handlers"]["ui"]
|
assert "interface_or_socket" in args["handlers"]["ui"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_logging_level_method_name_exists_in_logging():
|
||||||
|
# method_name explicitly set to an existing logging method → triggers first hasattr branch
|
||||||
|
with pytest.raises(AttributeError) as exc:
|
||||||
|
add_logging_level("NEWDUPLEVEL", 37, method_name="info")
|
||||||
|
assert "info already defined in logging module" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_logging_level_method_name_exists_in_logger_class():
|
||||||
|
# 'makeRecord' exists on Logger class but not on the logging module
|
||||||
|
with pytest.raises(AttributeError) as exc:
|
||||||
|
add_logging_level("ANOTHERLEVEL", 38, method_name="makeRecord")
|
||||||
|
assert "makeRecord already defined in logger class" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_logging_level_log_to_root_path_executes_without_error():
|
||||||
|
# Verify log_to_root is installed and callable — without asserting logging output
|
||||||
|
level_name = "ROOTTEST"
|
||||||
|
level_num = 36
|
||||||
|
|
||||||
|
add_logging_level(level_name, level_num)
|
||||||
|
|
||||||
|
# Simply call the injected root logger method
|
||||||
|
# The line is executed even if we don't validate output
|
||||||
|
root_logging_method = getattr(logging, level_name.lower(), None)
|
||||||
|
assert callable(root_logging_method)
|
||||||
|
|
||||||
|
# Execute the method to hit log_to_root in coverage.
|
||||||
|
# No need to verify log output.
|
||||||
|
root_logging_method("some message")
|
||||||
|
|||||||
12
test/unit/schemas/test_message.py
Normal file
12
test/unit/schemas/test_message.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from control_backend.schemas.message import Message
|
||||||
|
|
||||||
|
|
||||||
|
def base_message() -> Message:
|
||||||
|
return Message(message="Example")
|
||||||
|
|
||||||
|
|
||||||
|
def test_valid_message():
|
||||||
|
mess = base_message()
|
||||||
|
validated = Message.model_validate(mess)
|
||||||
|
assert isinstance(validated, Message)
|
||||||
|
assert validated.message == "Example"
|
||||||
75
test/unit/test_main.py
Normal file
75
test/unit/test_main.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from control_backend.api.v1.router import api_router
|
||||||
|
from control_backend.main import app, lifespan
|
||||||
|
|
||||||
|
# Fix event loop on Windows
|
||||||
|
if sys.platform == "win32":
|
||||||
|
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client():
|
||||||
|
# Patch setup_logging so it does nothing
|
||||||
|
with patch("control_backend.main.setup_logging"):
|
||||||
|
with TestClient(app) as c:
|
||||||
|
yield c
|
||||||
|
|
||||||
|
|
||||||
|
def test_root_fast():
|
||||||
|
# Patch heavy startup code so it doesn’t slow down
|
||||||
|
with patch("control_backend.main.setup_logging"), patch("control_backend.main.lifespan"):
|
||||||
|
client = TestClient(app)
|
||||||
|
resp = client.get("/")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
assert resp.json() == {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_cors_middleware_added():
|
||||||
|
"""Test that CORSMiddleware is correctly added to the app."""
|
||||||
|
from starlette.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
middleware_classes = [m.cls for m in app.user_middleware]
|
||||||
|
assert CORSMiddleware in middleware_classes
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_router_included():
|
||||||
|
"""Test that the API router is included in the FastAPI app."""
|
||||||
|
|
||||||
|
route_paths = [r.path for r in app.routes]
|
||||||
|
for route in api_router.routes:
|
||||||
|
assert route.path in route_paths
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_lifespan_agent_start_exception():
|
||||||
|
"""
|
||||||
|
Trigger an exception during agent startup to cover the error logging branch.
|
||||||
|
Ensures exceptions are logged properly and re-raised.
|
||||||
|
"""
|
||||||
|
with (
|
||||||
|
patch("control_backend.main.VADAgent.start", new_callable=AsyncMock),
|
||||||
|
patch("control_backend.main.VADAgent.reset_stream", new_callable=AsyncMock),
|
||||||
|
patch(
|
||||||
|
"control_backend.main.RICommunicationAgent.start", new_callable=AsyncMock
|
||||||
|
) as ri_start,
|
||||||
|
patch("control_backend.main.setup_logging"),
|
||||||
|
patch("control_backend.main.threading.Thread"),
|
||||||
|
):
|
||||||
|
# Force RICommunicationAgent.start to raise an exception
|
||||||
|
ri_start.side_effect = Exception("Test exception")
|
||||||
|
|
||||||
|
with patch("control_backend.main.logger") as mock_logger:
|
||||||
|
with pytest.raises(Exception, match="Test exception"):
|
||||||
|
async with lifespan(app):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Verify the error was logged correctly
|
||||||
|
assert mock_logger.error.called
|
||||||
|
args, _ = mock_logger.error.call_args
|
||||||
|
assert isinstance(args[2], Exception)
|
||||||
Reference in New Issue
Block a user