diff --git a/.gitignore b/.gitignore index f6ad342..b6322a4 100644 --- a/.gitignore +++ b/.gitignore @@ -199,7 +199,7 @@ cython_debug/ # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore # and can be added to the global gitignore or merged into this file. However, if you prefer, # you could uncomment the following to ignore the entire vscode folder -# .vscode/ +.vscode/ # Ruff stuff: .ruff_cache/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f4e1883..f2d3c52 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -22,5 +22,6 @@ test: tags: - test script: - - uv run --only-group test pytest + # - uv run --group integration-test pytest test/integration + - uv run --only-group test pytest test/unit diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..c6ed188 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,10 @@ +repos: +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.14.2 + hooks: + # Run the linter. + - id: ruff-check + args: [ --fix ] + # Run the formatter. + - id: ruff-format \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..b2b8866 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "test" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} \ No newline at end of file diff --git a/README.md b/README.md index c2a8702..45f8f98 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,17 @@ Using UV, installing the packages and virtual environment is as simple as typing uv sync ``` +## Local LLM + +To run a LLM locally download https://lmstudio.ai +When installing select developer mode, download a model (it will already suggest one) and run it (see developer window, status: running) + +copy the url at the top right and replace local_llm_url with it + v1/chat/completions. +This + part might differ based on what model you choose. + +copy the model name in the module loaded and replace local_llm_modelL. In settings. + + ## Running To run the project (development server), execute the following command (while inside the root repository): @@ -24,10 +35,16 @@ uv run fastapi dev src/control_backend/main.py ``` ## Testing -Testing happens automatically when opening a merge request to any branch. If you want to manually run the test suite, you can do so by running the following: +Testing happens automatically when opening a merge request to any branch. If you want to manually run the test suite, you can do so by running the following for unit tests: ```bash -uv run --only-group test pytest +uv run --only-group test pytest test/unit +``` + +Or for integration tests: + +```bash +uv run --group integration-test pytest test/integration ``` ## GitHooks @@ -42,3 +59,9 @@ If your commit fails its either: branch name != /description-of-branch , commit name != : description of the commit. : N25B-Num's + +To add automatic linting and formatting, run: + +```shell +uv run pre-commit install +``` \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 6776668..ee3ca08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,10 +7,15 @@ requires-python = ">=3.13" dependencies = [ "fastapi[all]>=0.115.6", "mlx-whisper>=0.4.3 ; sys_platform == 'darwin'", + "numpy>=2.3.3", "openai-whisper>=20250625", "pyaudio>=0.2.14", "pydantic>=2.12.0", "pydantic-settings>=2.11.0", + "pytest>=8.4.2", + "pytest-asyncio>=1.2.0", + "pytest-cov>=7.0.0", + "pytest-mock>=3.15.1", "pyzmq>=27.1.0", "silero-vad>=6.0.0", "spade>=4.1.0", @@ -20,7 +25,16 @@ dependencies = [ ] [dependency-groups] +dev = [ + "pre-commit>=4.3.0", + "ruff>=0.14.2", + "ruff-format>=0.3.0", +] +integration-test = [ + "soundfile>=0.13.1", +] test = [ + "numpy>=2.3.3", "pytest>=8.4.2", "pytest-asyncio>=1.2.0", "pytest-cov>=7.0.0", @@ -29,3 +43,21 @@ test = [ [tool.pytest.ini_options] pythonpath = ["src"] + +[tool.ruff] +line-length = 100 + +[tool.ruff.lint] +extend-select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort (import sorting) + "UP", # pyupgrade (modernize code) + "B", # flake8-bugbear (common bugs) + "C4", # flake8-comprehensions (unnecessary comprehensions) +] + +ignore = [ + "E226", # spaces around operators + "E701", # multiple statements on a single line +] diff --git a/src/control_backend/agents/bdi/bdi_core.py b/src/control_backend/agents/bdi/bdi_core.py index 7311061..a9b10d2 100644 --- a/src/control_backend/agents/bdi/bdi_core.py +++ b/src/control_backend/agents/bdi/bdi_core.py @@ -1,35 +1,71 @@ import logging import agentspeak +from spade.behaviour import OneShotBehaviour +from spade.message import Message from spade_bdi.bdi import BDIAgent -from control_backend.agents.bdi.behaviours.belief_setter import BeliefSetter +from control_backend.agents.bdi.behaviours.belief_setter import BeliefSetterBehaviour +from control_backend.agents.bdi.behaviours.receive_llm_resp_behaviour import ( + ReceiveLLMResponseBehaviour, +) +from control_backend.core.config import settings + class BDICoreAgent(BDIAgent): """ - This is the Brain agent that does the belief inference with AgentSpeak. + This is the Brain agent that does the belief inference with AgentSpeak. This is a continous process that happens automatically in the background. This class contains all the actions that can be called from AgentSpeak plans. - It has the BeliefSetter behaviour. + It has the BeliefSetter behaviour and can aks and recieve requests from the LLM agent. """ - logger = logging.getLogger("BDI Core") - async def setup(self): - belief_setter = BeliefSetter() - self.add_behaviour(belief_setter) + logger = logging.getLogger("bdi_core_agent") + + async def setup(self) -> None: + """ + Initializes belief behaviors and message routing. + """ + self.logger.info("BDICoreAgent setup started") + + self.add_behaviour(BeliefSetterBehaviour()) + self.add_behaviour(ReceiveLLMResponseBehaviour()) + + await self._send_to_llm("Hi pepper, how are you?") + # This is the example message currently sent to the llm at the start of the Program + + self.logger.info("BDICoreAgent setup complete") + + def add_custom_actions(self, actions) -> None: + """ + Registers custom AgentSpeak actions callable from plans. + """ - def add_custom_actions(self, actions): @actions.add(".reply", 1) - def _reply(agent, term, intention): - message = agentspeak.grounded(term.args[0], intention.scope) - self.logger.info(f"Replying to message: {message}") - reply = self._send_to_llm(message) - self.logger.info(f"Received reply: {reply}") + def _reply(agent: "BDICoreAgent", term, intention): + """ + Sends text to the LLM (AgentSpeak action). + Example: .reply("Hello LLM!") + """ + message_text = agentspeak.grounded(term.args[0], intention.scope) + self.logger.info("Reply action sending: %s", message_text) + self._send_to_llm(message_text) yield - def _send_to_llm(self, message) -> str: - """TODO: implement""" - return f"This is a reply to {message}" + async def _send_to_llm(self, text: str): + """ + Sends a text query to the LLM Agent asynchronously. + """ + class SendBehaviour(OneShotBehaviour): + async def run(self) -> None: + msg = Message( + to= settings.agent_settings.llm_agent_name + '@' + settings.agent_settings.host, + body= text + ) + await self.send(msg) + self.agent.logger.debug("Message sent to LLM: %s", text) + + self.add_behaviour(SendBehaviour()) \ No newline at end of file diff --git a/src/control_backend/agents/bdi/behaviours/belief_setter.py b/src/control_backend/agents/bdi/behaviours/belief_setter.py index 777dda3..3155a38 100644 --- a/src/control_backend/agents/bdi/behaviours/belief_setter.py +++ b/src/control_backend/agents/bdi/behaviours/belief_setter.py @@ -1,28 +1,28 @@ -import asyncio import json import logging from spade.agent import Message from spade.behaviour import CyclicBehaviour -from spade_bdi.bdi import BDIAgent +from spade_bdi.bdi import BDIAgent, BeliefNotInitiated from control_backend.core.config import settings -class BeliefSetter(CyclicBehaviour): + +class BeliefSetterBehaviour(CyclicBehaviour): """ - This is the behaviour that the BDI agent runs. - This behaviour waits for incoming message and processes it based on sender. - Currently, t only waits for messages containing beliefs from Belief Collector and adds these to its KB. + This is the behaviour that the BDI agent runs. This behaviour waits for incoming + message and processes it based on sender. """ + agent: BDIAgent logger = logging.getLogger("BDI/Belief Setter") - + async def run(self): msg = await self.receive(timeout=0.1) if msg: self.logger.info(f"Received message {msg.body}") self._process_message(msg) - await asyncio.sleep(1) + def _process_message(self, message: Message): sender = message.sender.node # removes host from jid and converts to str @@ -33,28 +33,38 @@ class BeliefSetter(CyclicBehaviour): self.logger.debug("Processing message from belief collector.") self._process_belief_message(message) case _: + self.logger.debug("Not the belief agent, discarding message") pass def _process_belief_message(self, message: Message): - if not message.body: return + if not message.body: + return match message.thread: case "beliefs": try: - beliefs: dict[str, list[list[str]]] = json.loads(message.body) + beliefs: dict[str, list[str]] = json.loads(message.body) self._set_beliefs(beliefs) except json.JSONDecodeError as e: self.logger.error("Could not decode beliefs into JSON format: %s", e) case _: pass - - def _set_beliefs(self, beliefs: dict[str, list[list[str]]]): + def _set_beliefs(self, beliefs: dict[str, list[str]]): + """Remove previous values for beliefs and update them with the provided values.""" if self.agent.bdi is None: self.logger.warning("Cannot set beliefs, since agent's BDI is not yet initialized.") return - for belief, arguments_list in beliefs.items(): - for arguments in arguments_list: - self.agent.bdi.set_belief(belief, *arguments) - self.logger.info("Set belief %s with arguments %s", belief, arguments) + # Set new beliefs (outdated beliefs are automatically removed) + for belief, arguments in beliefs.items(): + self.agent.bdi.set_belief(belief, *arguments) + + # Special case: if there's a new user message, flag that we haven't responded yet + if belief == "user_said": + try: + self.agent.bdi.remove_belief("responded") + except BeliefNotInitiated: + pass + + self.logger.info("Set belief %s with arguments %s", belief, arguments) diff --git a/src/control_backend/agents/bdi/behaviours/receive_llm_resp_behaviour.py b/src/control_backend/agents/bdi/behaviours/receive_llm_resp_behaviour.py new file mode 100644 index 0000000..747ab4c --- /dev/null +++ b/src/control_backend/agents/bdi/behaviours/receive_llm_resp_behaviour.py @@ -0,0 +1,26 @@ +import logging + +from spade.behaviour import CyclicBehaviour + +from control_backend.core.config import settings + + +class ReceiveLLMResponseBehaviour(CyclicBehaviour): + """ + Adds behavior to receive responses from the LLM Agent. + """ + logger = logging.getLogger("BDI/LLM Reciever") + async def run(self): + msg = await self.receive(timeout=2) + if not msg: + return + + sender = msg.sender.node + match sender: + case settings.agent_settings.llm_agent_name: + content = msg.body + self.logger.info("Received LLM response: %s", content) + #Here the BDI can pass the message back as a response + case _: + self.logger.debug("Not from the llm, discarding message") + pass \ No newline at end of file diff --git a/src/control_backend/agents/llm/llm.py b/src/control_backend/agents/llm/llm.py new file mode 100644 index 0000000..0f78095 --- /dev/null +++ b/src/control_backend/agents/llm/llm.py @@ -0,0 +1,127 @@ +""" +LLM Agent module for routing text queries from the BDI Core Agent to a local LLM +service and returning its responses back to the BDI Core Agent. +""" + +import logging +from typing import Any + +import httpx +from spade.agent import Agent +from spade.behaviour import CyclicBehaviour +from spade.message import Message + +from control_backend.agents.llm.llm_instructions import LLMInstructions +from control_backend.core.config import settings + + +class LLMAgent(Agent): + """ + Agent responsible for processing user text input and querying a locally + hosted LLM for text generation. Receives messages from the BDI Core Agent + and responds with processed LLM output. + """ + + logger = logging.getLogger("llm_agent") + + class ReceiveMessageBehaviour(CyclicBehaviour): + """ + Cyclic behaviour to continuously listen for incoming messages from + the BDI Core Agent and handle them. + """ + + async def run(self): + """ + Receives SPADE messages and processes only those originating from the + configured BDI agent. + """ + msg = await self.receive(timeout=1) + if not msg: + return + + sender = msg.sender.node + self.agent.logger.info( + "Received message: %s from %s", + msg.body, + sender, + ) + + if sender == settings.agent_settings.bdi_core_agent_name: + self.agent.logger.debug("Processing message from BDI Core Agent") + await self._process_bdi_message(msg) + else: + self.agent.logger.debug("Message ignored (not from BDI Core Agent)") + + async def _process_bdi_message(self, message: Message): + """ + Forwards user text to the LLM and replies with the generated text. + """ + user_text = message.body + llm_response = await self._query_llm(user_text) + await self._reply(llm_response) + + async def _reply(self, msg: str): + """ + Sends a response message back to the BDI Core Agent. + """ + reply = Message( + to=settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host, + body=msg + ) + await self.send(reply) + self.agent.logger.info("Reply sent to BDI Core Agent") + + async def _query_llm(self, prompt: str) -> str: + """ + Sends a chat completion request to the local LLM service. + + :param prompt: Input text prompt to pass to the LLM. + :return: LLM-generated content or fallback message. + """ + async with httpx.AsyncClient(timeout=120.0) as client: + # Example dynamic content for future (optional) + + instructions = LLMInstructions() + developer_instruction = instructions.build_developer_instruction() + + response = await client.post( + settings.llm_settings.local_llm_url, + headers={"Content-Type": "application/json"}, + json={ + "model": settings.llm_settings.local_llm_model, + "messages": [ + { + "role": "developer", + "content": developer_instruction + }, + { + "role": "user", + "content": prompt + } + ], + "temperature": 0.3 + }, + ) + + try: + response.raise_for_status() + data: dict[str, Any] = response.json() + return data.get("choices", [{}])[0].get( + "message", {} + ).get("content", "No response") + except httpx.HTTPError as err: + self.agent.logger.error("HTTP error: %s", err) + return "LLM service unavailable." + except Exception as err: + self.agent.logger.error("Unexpected error: %s", err) + return "Error processing the request." + + async def setup(self): + """ + Sets up the SPADE behaviour to filter and process messages from the + BDI Core Agent. + """ + self.logger.info("LLMAgent setup complete") + + behaviour = self.ReceiveMessageBehaviour() + self.add_behaviour(behaviour) diff --git a/src/control_backend/agents/llm/llm_instructions.py b/src/control_backend/agents/llm/llm_instructions.py new file mode 100644 index 0000000..9636d88 --- /dev/null +++ b/src/control_backend/agents/llm/llm_instructions.py @@ -0,0 +1,44 @@ +class LLMInstructions: + """ + Defines structured instructions that are sent along with each request + to the LLM to guide its behavior (norms, goals, etc.). + """ + + @staticmethod + def default_norms() -> str: + return """ + Be friendly and respectful. + Make the conversation feel natural and engaging. + """.strip() + + @staticmethod + def default_goals() -> str: + return """ + Try to learn the user's name during conversation. + """.strip() + + def __init__(self, norms: str | None = None, goals: str | None = None): + self.norms = norms if norms is not None else self.default_norms() + self.goals = goals if goals is not None else self.default_goals() + + def build_developer_instruction(self) -> str: + """ + Builds a multi-line formatted instruction string for the LLM. + Includes only non-empty structured fields. + """ + sections = [ + "You are a Pepper robot engaging in natural human conversation.", + "Keep responses between 1–5 sentences, unless instructed otherwise.\n", + ] + + if self.norms: + sections.append("Norms to follow:") + sections.append(self.norms) + sections.append("") + + if self.goals: + sections.append("Goals to reach:") + sections.append(self.goals) + sections.append("") + + return "\n".join(sections).strip() diff --git a/src/control_backend/agents/ri_command_agent.py b/src/control_backend/agents/ri_command_agent.py new file mode 100644 index 0000000..01fc824 --- /dev/null +++ b/src/control_backend/agents/ri_command_agent.py @@ -0,0 +1,74 @@ +import json +import logging +from spade.agent import Agent +from spade.behaviour import CyclicBehaviour +import zmq + +from control_backend.core.config import settings +from control_backend.core.zmq_context import context +from control_backend.schemas.ri_message import SpeechCommand + +logger = logging.getLogger(__name__) + + +class RICommandAgent(Agent): + subsocket: zmq.Socket + pubsocket: zmq.Socket + address = "" + bind = False + + def __init__( + self, + jid: str, + password: str, + port: int = 5222, + verify_security: bool = False, + address="tcp://localhost:0000", + bind=False, + ): + super().__init__(jid, password, port, verify_security) + self.address = address + self.bind = bind + + class SendCommandsBehaviour(CyclicBehaviour): + async def run(self): + """ + Run the command publishing loop indefinetely. + """ + assert self.agent is not None + # Get a message internally (with topic command) + topic, body = await self.agent.subsocket.recv_multipart() + + # Try to get body + try: + body = json.loads(body) + message = SpeechCommand.model_validate(body) + + # Send to the robot. + await self.agent.pubsocket.send_json(message.model_dump()) + except Exception as e: + logger.error("Error processing message: %s", e) + + async def setup(self): + """ + Setup the command agent + """ + logger.info("Setting up %s", self.jid) + + # To the robot + self.pubsocket = context.socket(zmq.PUB) + if self.bind: + self.pubsocket.bind(self.address) + else: + self.pubsocket.connect(self.address) + + # Receive internal topics regarding commands + self.subsocket = context.socket(zmq.SUB) + self.subsocket.connect(settings.zmq_settings.internal_comm_address) + self.subsocket.setsockopt(zmq.SUBSCRIBE, b"command") + + # Add behaviour to our agent + commands_behaviour = self.SendCommandsBehaviour() + self.add_behaviour(commands_behaviour) + + logger.info("Finished setting up %s", self.jid) diff --git a/src/control_backend/agents/ri_communication_agent.py b/src/control_backend/agents/ri_communication_agent.py new file mode 100644 index 0000000..504c707 --- /dev/null +++ b/src/control_backend/agents/ri_communication_agent.py @@ -0,0 +1,165 @@ +import asyncio +import json +import logging +from spade.agent import Agent +from spade.behaviour import CyclicBehaviour +import zmq + +from control_backend.core.config import settings +from control_backend.core.zmq_context import context +from control_backend.schemas.message import Message +from control_backend.agents.ri_command_agent import RICommandAgent + +logger = logging.getLogger(__name__) + + +class RICommunicationAgent(Agent): + req_socket: zmq.Socket + _address = "" + _bind = True + + def __init__( + self, + jid: str, + password: str, + port: int = 5222, + verify_security: bool = False, + address="tcp://localhost:0000", + bind=False, + ): + super().__init__(jid, password, port, verify_security) + self._address = address + self._bind = bind + + class ListenBehaviour(CyclicBehaviour): + async def run(self): + """ + Run the listening (ping) loop indefinetely. + """ + assert self.agent is not None + + # We need to listen and sent pings. + message = {"endpoint": "ping", "data": {"id": "e.g. some reference id"}} + await self.agent.req_socket.send_json(message) + + # Wait up to three seconds for a reply:) + try: + message = await asyncio.wait_for(self.agent.req_socket.recv_json(), timeout=3.0) + + # We didnt get a reply :( + except asyncio.TimeoutError as e: + logger.info("No ping retrieved in 3 seconds, killing myself.") + self.kill() + + logger.debug('Received message "%s"', message) + if "endpoint" not in message: + logger.error("No received endpoint in message, excepted ping endpoint.") + return + + # See what endpoint we received + match message["endpoint"]: + case "ping": + await asyncio.sleep(1) + case _: + logger.info( + "Received message with topic different than ping, while ping expected." + ) + + async def setup(self, max_retries: int = 5): + """ + Try to setup the communication agent, we have 5 retries in case we dont have a response yet. + """ + logger.info("Setting up %s", self.jid) + retries = 0 + + # Let's try a certain amount of times before failing connection + while retries < max_retries: + # Bind request socket + self.req_socket = context.socket(zmq.REQ) + if self._bind: + self.req_socket.bind(self._address) + else: + self.req_socket.connect(self._address) + + # Send our message and receive one back:) + message = {"endpoint": "negotiate/ports", "data": None} + await self.req_socket.send_json(message) + + try: + received_message = await asyncio.wait_for(self.req_socket.recv_json(), timeout=20.0) + + except asyncio.TimeoutError: + logger.warning( + "No connection established in 20 seconds (attempt %d/%d)", + retries + 1, + max_retries, + ) + retries += 1 + continue + + except Exception as e: + logger.error("Unexpected error during negotiation: %s", e) + retries += 1 + continue + + # Validate endpoint + endpoint = received_message.get("endpoint") + if endpoint != "negotiate/ports": + # TODO: Should this send a message back? + logger.error( + "Invalid endpoint '%s' received (attempt %d/%d)", + endpoint, + retries + 1, + max_retries, + ) + retries += 1 + continue + + # At this point, we have a valid response + try: + for port_data in received_message["data"]: + id = port_data["id"] + port = port_data["port"] + bind = port_data["bind"] + + if not bind: + addr = f"tcp://localhost:{port}" + else: + addr = f"tcp://*:{port}" + + match id: + case "main": + if addr != self._address: + if not bind: + self.req_socket.connect(addr) + else: + self.req_socket.bind(addr) + case "actuation": + ri_commands_agent = RICommandAgent( + settings.agent_settings.ri_command_agent_name + + "@" + + settings.agent_settings.host, + settings.agent_settings.ri_command_agent_name, + address=addr, + bind=bind, + ) + await ri_commands_agent.start() + case _: + logger.warning("Unhandled negotiation id: %s", id) + + except Exception as e: + logger.error("Error unpacking negotiation data: %s", e) + retries += 1 + continue + + # setup succeeded + break + + else: + logger.error("Failed to set up RICommunicationAgent after %d retries", max_retries) + return + + # Set up ping behaviour + listen_behaviour = self.ListenBehaviour() + self.add_behaviour(listen_behaviour) + logger.info("Finished setting up %s", self.jid) diff --git a/src/control_backend/agents/vad_agent.py b/src/control_backend/agents/vad_agent.py new file mode 100644 index 0000000..7b87fbb --- /dev/null +++ b/src/control_backend/agents/vad_agent.py @@ -0,0 +1,156 @@ +import logging + +import numpy as np +import torch +import zmq +import zmq.asyncio as azmq +from spade.agent import Agent +from spade.behaviour import CyclicBehaviour + +from control_backend.core.config import settings +from control_backend.core.zmq_context import context as zmq_context + +logger = logging.getLogger(__name__) + + +class SocketPoller[T]: + """ + Convenience class for polling a socket for data with a timeout, persisting a zmq.Poller for + multiple usages. + """ + + def __init__(self, socket: azmq.Socket, timeout_ms: int = 100): + """ + :param socket: The socket to poll and get data from. + :param timeout_ms: A timeout in milliseconds to wait for data. + """ + self.socket = socket + self.poller = zmq.Poller() + self.poller.register(self.socket, zmq.POLLIN) + self.timeout_ms = timeout_ms + + async def poll(self, timeout_ms: int | None = None) -> T | None: + """ + Get data from the socket, or None if the timeout is reached. + + :param timeout_ms: If given, the timeout. Otherwise, `self.timeout_ms` is used. + :return: Data from the socket or None. + """ + timeout_ms = timeout_ms or self.timeout_ms + socks = dict(self.poller.poll(timeout_ms)) + if socks.get(self.socket) == zmq.POLLIN: + return await self.socket.recv() + return None + + +class Streaming(CyclicBehaviour): + def __init__(self, audio_in_socket: azmq.Socket, audio_out_socket: azmq.Socket): + super().__init__() + self.audio_in_poller = SocketPoller[bytes](audio_in_socket) + self.model, _ = torch.hub.load( + repo_or_dir="snakers4/silero-vad", model="silero_vad", force_reload=False + ) + self.audio_out_socket = audio_out_socket + + self.audio_buffer = np.array([], dtype=np.float32) + self.i_since_speech = 100 # Used to allow small pauses in speech + + async def run(self) -> None: + data = await self.audio_in_poller.poll() + if data is None: + if len(self.audio_buffer) > 0: + logger.debug("No audio data received. Discarding buffer until new data arrives.") + self.audio_buffer = np.array([], dtype=np.float32) + self.i_since_speech = 100 + return + + # copy otherwise Torch will be sad that it's immutable + chunk = np.frombuffer(data, dtype=np.float32).copy() + prob = self.model(torch.from_numpy(chunk), 16000).item() + + if prob > 0.5: + if self.i_since_speech > 3: + logger.debug("Speech started.") + self.audio_buffer = np.append(self.audio_buffer, chunk) + self.i_since_speech = 0 + return + self.i_since_speech += 1 + + # prob < 0.5, so speech maybe ended. Wait a bit more before to be more certain + if self.i_since_speech <= 3: + self.audio_buffer = np.append(self.audio_buffer, chunk) + return + + # Speech probably ended. Make sure we have a usable amount of data. + if len(self.audio_buffer) >= 3 * len(chunk): + logger.debug("Speech ended.") + await self.audio_out_socket.send(self.audio_buffer[: -2 * len(chunk)].tobytes()) + + # At this point, we know that the speech has ended. + # Prepend the last chunk that had no speech, for a more fluent boundary + self.audio_buffer = chunk + + +class VADAgent(Agent): + """ + An agent which listens to an audio stream, does Voice Activity Detection (VAD), and sends + fragments with detected speech to other agents over ZeroMQ. + """ + + def __init__(self, audio_in_address: str, audio_in_bind: bool): + jid = settings.agent_settings.vad_agent_name + "@" + settings.agent_settings.host + super().__init__(jid, settings.agent_settings.vad_agent_name) + + self.audio_in_address = audio_in_address + self.audio_in_bind = audio_in_bind + + self.audio_in_socket: azmq.Socket | None = None + self.audio_out_socket: azmq.Socket | None = None + + async def stop(self): + """ + Stop listening to audio, stop publishing audio, close sockets. + """ + if self.audio_in_socket is not None: + self.audio_in_socket.close() + self.audio_in_socket = None + if self.audio_out_socket is not None: + self.audio_out_socket.close() + self.audio_out_socket = None + return await super().stop() + + def _connect_audio_in_socket(self): + self.audio_in_socket = zmq_context.socket(zmq.SUB) + self.audio_in_socket.setsockopt_string(zmq.SUBSCRIBE, "") + if self.audio_in_bind: + self.audio_in_socket.bind(self.audio_in_address) + else: + self.audio_in_socket.connect(self.audio_in_address) + self.audio_in_poller = SocketPoller[bytes](self.audio_in_socket) + + def _connect_audio_out_socket(self) -> int | None: + """Returns the port bound, or None if binding failed.""" + try: + self.audio_out_socket = zmq_context.socket(zmq.PUB) + return self.audio_out_socket.bind_to_random_port("tcp://*", max_tries=100) + except zmq.ZMQBindError: + logger.error("Failed to bind an audio output socket after 100 tries.") + self.audio_out_socket = None + return None + + async def setup(self): + logger.info("Setting up %s", self.jid) + + self._connect_audio_in_socket() + + audio_out_port = self._connect_audio_out_socket() + if audio_out_port is None: + await self.stop() + return + + streaming = Streaming(self.audio_in_socket, self.audio_out_socket) + self.add_behaviour(streaming) + + # ... start agents dependent on the output audio fragments here + + logger.info("Finished setting up %s", self.jid) diff --git a/src/control_backend/api/v1/endpoints/command.py b/src/control_backend/api/v1/endpoints/command.py new file mode 100644 index 0000000..badaf90 --- /dev/null +++ b/src/control_backend/api/v1/endpoints/command.py @@ -0,0 +1,22 @@ +from fastapi import APIRouter, Request +import logging + +from zmq import Socket + +from control_backend.schemas.ri_message import SpeechCommand, RIEndpoint + +logger = logging.getLogger(__name__) + +router = APIRouter() + + +@router.post("/command", status_code=202) +async def receive_command(command: SpeechCommand, request: Request): + # Validate and retrieve data. + SpeechCommand.model_validate(command) + topic = b"command" + pub_socket: Socket = request.app.state.internal_comm_socket + pub_socket.send_multipart([topic, command.model_dump_json().encode()]) + + + return {"status": "Command received"} diff --git a/src/control_backend/api/v1/endpoints/message.py b/src/control_backend/api/v1/endpoints/message.py index fef07b8..1053c3c 100644 --- a/src/control_backend/api/v1/endpoints/message.py +++ b/src/control_backend/api/v1/endpoints/message.py @@ -1,6 +1,6 @@ -from fastapi import APIRouter, Request import logging +from fastapi import APIRouter, Request from zmq import Socket from control_backend.schemas.message import Message @@ -9,6 +9,7 @@ logger = logging.getLogger(__name__) router = APIRouter() + @router.post("/message", status_code=202) async def receive_message(message: Message, request: Request): logger.info("Received message: %s", message.message) diff --git a/src/control_backend/api/v1/endpoints/sse.py b/src/control_backend/api/v1/endpoints/sse.py index e16b7e2..190e517 100644 --- a/src/control_backend/api/v1/endpoints/sse.py +++ b/src/control_backend/api/v1/endpoints/sse.py @@ -2,7 +2,8 @@ from fastapi import APIRouter, Request router = APIRouter() + # TODO: implement @router.get("/sse") async def sse(request: Request): - pass \ No newline at end of file + pass diff --git a/src/control_backend/api/v1/router.py b/src/control_backend/api/v1/router.py index 559b4d3..dc7aea9 100644 --- a/src/control_backend/api/v1/router.py +++ b/src/control_backend/api/v1/router.py @@ -1,15 +1,11 @@ from fastapi.routing import APIRouter -from control_backend.api.v1.endpoints import message, sse +from control_backend.api.v1.endpoints import message, sse, command api_router = APIRouter() -api_router.include_router( - message.router, - tags=["Messages"] -) +api_router.include_router(message.router, tags=["Messages"]) -api_router.include_router( - sse.router, - tags=["SSE"] -) +api_router.include_router(sse.router, tags=["SSE"]) + +api_router.include_router(command.router, tags=["Commands"]) diff --git a/src/control_backend/core/config.py b/src/control_backend/core/config.py index 07a828d..5d539d0 100644 --- a/src/control_backend/core/config.py +++ b/src/control_backend/core/config.py @@ -1,16 +1,27 @@ -from re import L from pydantic import BaseModel from pydantic_settings import BaseSettings, SettingsConfigDict + class ZMQSettings(BaseModel): internal_comm_address: str = "tcp://localhost:5560" + class AgentSettings(BaseModel): - host: str = "localhost" + host: str = "localhost" bdi_core_agent_name: str = "bdi_core" belief_collector_agent_name: str = "belief_collector" + vad_agent_name: str = "vad_agent" + llm_agent_name: str = "llm_agent" test_agent_name: str = "test_agent" + ri_communication_agent_name: str = "ri_communication_agent" + ri_command_agent_name: str = "ri_command_agent" + + +class LLMSettings(BaseModel): + local_llm_url: str = "http://145.107.82.68:1234/v1/chat/completions" + local_llm_model: str = "openai/gpt-oss-120b" + class Settings(BaseSettings): app_title: str = "PepperPlus" @@ -19,7 +30,9 @@ class Settings(BaseSettings): zmq_settings: ZMQSettings = ZMQSettings() agent_settings: AgentSettings = AgentSettings() - + + llm_settings: LLMSettings = LLMSettings() + model_config = SettingsConfigDict(env_file=".env") settings = Settings() diff --git a/src/control_backend/main.py b/src/control_backend/main.py index 513f747..998067b 100644 --- a/src/control_backend/main.py +++ b/src/control_backend/main.py @@ -1,27 +1,28 @@ # Standard library imports -import asyncio -import json # External imports import contextlib +import logging + +import zmq from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware -import logging -from spade.agent import Agent, Message -from spade.behaviour import OneShotBehaviour -import zmq # Internal imports +from control_backend.agents.ri_communication_agent import RICommunicationAgent from control_backend.agents.bdi.bdi_core import BDICoreAgent +from control_backend.agents.vad_agent import VADAgent +from control_backend.agents.llm.llm import LLMAgent from control_backend.agents.bdi.text_extractor import TBeliefExtractor from control_backend.agents.bdi.test_agent import SenderAgent from control_backend.api.v1.router import api_router -from control_backend.core.config import AgentSettings, settings +from control_backend.core.config import settings from control_backend.core.zmq_context import context logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) + @contextlib.asynccontextmanager async def lifespan(app: FastAPI): logger.info("%s starting up.", app.title) @@ -33,29 +34,50 @@ async def lifespan(app: FastAPI): app.state.internal_comm_socket = internal_comm_socket logger.info("Internal publishing socket bound to %s", internal_comm_socket) + # Initiate agents - bdi_core = BDICoreAgent(settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host, "placeholder", "src/control_backend/agents/bdi/rules.asl") + ri_communication_agent = RICommunicationAgent( + settings.agent_settings.ri_communication_agent_name + "@" + settings.agent_settings.host, + settings.agent_settings.ri_communication_agent_name, + address="tcp://*:5555", + bind=True, + ) + await ri_communication_agent.start() + + llm_agent = LLMAgent(settings.agent_settings.llm_agent_name + '@' + settings.agent_settings.host, + settings.agent_settings.llm_agent_name) + await llm_agent.start() + + bdi_core = BDICoreAgent(settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host, + settings.agent_settings.bdi_core_agent_name, "src/control_backend/agents/bdi/rules.asl") await bdi_core.start() + text_belief_extractor = TBeliefExtractor(settings.agent_settings.belief_collector_agent_name + '@' + settings.agent_settings.host, "placehodler") await text_belief_extractor.start() + test_agent = SenderAgent(settings.agent_settings.test_agent_name + '@' + settings.agent_settings.host, "placeholder") await test_agent.start() - + + _temp_vad_agent = VADAgent("tcp://localhost:5558", False) + await _temp_vad_agent.start() + yield - + logger.info("%s shutting down.", app.title) + # if __name__ == "__main__": app = FastAPI(title=settings.app_title, lifespan=lifespan) # This middleware allows other origins to communicate with us app.add_middleware( - CORSMiddleware, # https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/CORS - allow_origins=[settings.ui_url], # address of our UI application - allow_methods=["*"], # GET, POST, etc. + CORSMiddleware, # https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/CORS + allow_origins=[settings.ui_url], # address of our UI application + allow_methods=["*"], # GET, POST, etc. ) -app.include_router(api_router, prefix="") # TODO: make prefix /api/v1 +app.include_router(api_router, prefix="") # TODO: make prefix /api/v1 + @app.get("/") async def root(): diff --git a/src/control_backend/schemas/message.py b/src/control_backend/schemas/message.py index a128ae7..8b65c80 100644 --- a/src/control_backend/schemas/message.py +++ b/src/control_backend/schemas/message.py @@ -1,4 +1,5 @@ from pydantic import BaseModel + class Message(BaseModel): - message: str \ No newline at end of file + message: str diff --git a/src/control_backend/schemas/ri_message.py b/src/control_backend/schemas/ri_message.py new file mode 100644 index 0000000..97b7930 --- /dev/null +++ b/src/control_backend/schemas/ri_message.py @@ -0,0 +1,20 @@ +from enum import Enum +from typing import Any, Literal + +from pydantic import BaseModel, Field, ValidationError + + +class RIEndpoint(str, Enum): + SPEECH = "actuate/speech" + PING = "ping" + NEGOTIATE_PORTS = "negotiate/ports" + + +class RIMessage(BaseModel): + endpoint: RIEndpoint + data: Any + + +class SpeechCommand(RIMessage): + endpoint: RIEndpoint = RIEndpoint(RIEndpoint.SPEECH) + data: str diff --git a/test/__init__.py b/test/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/integration/agents/test_ri_commands_agent.py b/test/integration/agents/test_ri_commands_agent.py new file mode 100644 index 0000000..219d682 --- /dev/null +++ b/test/integration/agents/test_ri_commands_agent.py @@ -0,0 +1,102 @@ +import asyncio +import zmq +import json +import pytest +from unittest.mock import AsyncMock, MagicMock, patch +from control_backend.agents.ri_command_agent import RICommandAgent +from control_backend.schemas.ri_message import SpeechCommand + + +@pytest.mark.asyncio +async def test_setup_bind(monkeypatch): + """Test setup with bind=True""" + fake_socket = MagicMock() + monkeypatch.setattr( + "control_backend.agents.ri_command_agent.context.socket", lambda _: fake_socket + ) + + agent = RICommandAgent("test@server", "password", address="tcp://localhost:5555", bind=True) + monkeypatch.setattr( + "control_backend.agents.ri_command_agent.settings", + MagicMock(zmq_settings=MagicMock(internal_comm_address="tcp://internal:1234")), + ) + + await agent.setup() + + # Ensure PUB socket bound + fake_socket.bind.assert_any_call("tcp://localhost:5555") + # Ensure SUB socket connected to internal address and subscribed + fake_socket.connect.assert_any_call("tcp://internal:1234") + fake_socket.setsockopt.assert_any_call(zmq.SUBSCRIBE, b"command") + + # Ensure behaviour attached + assert any(isinstance(b, agent.SendCommandsBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_setup_connect(monkeypatch): + """Test setup with bind=False""" + fake_socket = MagicMock() + monkeypatch.setattr( + "control_backend.agents.ri_command_agent.context.socket", lambda _: fake_socket + ) + + agent = RICommandAgent("test@server", "password", address="tcp://localhost:5555", bind=False) + monkeypatch.setattr( + "control_backend.agents.ri_command_agent.settings", + MagicMock(zmq_settings=MagicMock(internal_comm_address="tcp://internal:1234")), + ) + + await agent.setup() + + # Ensure PUB socket connected + fake_socket.connect.assert_any_call("tcp://localhost:5555") + + +@pytest.mark.asyncio +async def test_send_commands_behaviour_valid_message(): + """Test behaviour with valid JSON message""" + fake_socket = AsyncMock() + message_dict = {"message": "hello"} + fake_socket.recv_multipart = AsyncMock( + return_value=(b"command", json.dumps(message_dict).encode("utf-8")) + ) + fake_socket.send_json = AsyncMock() + + agent = RICommandAgent("test@server", "password") + agent.subsocket = fake_socket + agent.pubsocket = fake_socket + + behaviour = agent.SendCommandsBehaviour() + behaviour.agent = agent + + with patch("control_backend.agents.ri_command_agent.SpeechCommand") as MockSpeechCommand: + mock_message = MagicMock() + MockSpeechCommand.model_validate.return_value = mock_message + + await behaviour.run() + + fake_socket.recv_multipart.assert_awaited() + fake_socket.send_json.assert_awaited_with(mock_message.model_dump()) + + +@pytest.mark.asyncio +async def test_send_commands_behaviour_invalid_message(caplog): + """Test behaviour with invalid JSON message triggers error logging""" + fake_socket = AsyncMock() + fake_socket.recv_multipart = AsyncMock(return_value=(b"command", b"{invalid_json}")) + fake_socket.send_json = AsyncMock() + + agent = RICommandAgent("test@server", "password") + agent.subsocket = fake_socket + agent.pubsocket = fake_socket + + behaviour = agent.SendCommandsBehaviour() + behaviour.agent = agent + + with caplog.at_level("ERROR"): + await behaviour.run() + + fake_socket.recv_multipart.assert_awaited() + fake_socket.send_json.assert_not_awaited() + assert "Error processing message" in caplog.text diff --git a/test/integration/agents/test_ri_communication_agent.py b/test/integration/agents/test_ri_communication_agent.py new file mode 100644 index 0000000..3e4a056 --- /dev/null +++ b/test/integration/agents/test_ri_communication_agent.py @@ -0,0 +1,591 @@ +import asyncio +import pytest +from unittest.mock import AsyncMock, MagicMock, patch, ANY +from control_backend.agents.ri_communication_agent import RICommunicationAgent + + +def fake_json_correct_negototiate_1(): + return AsyncMock( + return_value={ + "endpoint": "negotiate/ports", + "data": [ + {"id": "main", "port": 5555, "bind": False}, + {"id": "actuation", "port": 5556, "bind": True}, + ], + } + ) + + +def fake_json_correct_negototiate_2(): + return AsyncMock( + return_value={ + "endpoint": "negotiate/ports", + "data": [ + {"id": "main", "port": 5555, "bind": False}, + {"id": "actuation", "port": 5557, "bind": True}, + ], + } + ) + + +def fake_json_correct_negototiate_3(): + return AsyncMock( + return_value={ + "endpoint": "negotiate/ports", + "data": [ + {"id": "main", "port": 5555, "bind": True}, + {"id": "actuation", "port": 5557, "bind": True}, + ], + } + ) + + +def fake_json_correct_negototiate_4(): + # Different port, do bind + return AsyncMock( + return_value={ + "endpoint": "negotiate/ports", + "data": [ + {"id": "main", "port": 4555, "bind": True}, + {"id": "actuation", "port": 5557, "bind": True}, + ], + } + ) + + +def fake_json_correct_negototiate_5(): + # Different port, dont bind. + return AsyncMock( + return_value={ + "endpoint": "negotiate/ports", + "data": [ + {"id": "main", "port": 4555, "bind": False}, + {"id": "actuation", "port": 5557, "bind": True}, + ], + } + ) + + +def fake_json_wrong_negototiate_1(): + return AsyncMock(return_value={"endpoint": "ping", "data": ""}) + + +def fake_json_invalid_id_negototiate(): + return AsyncMock( + return_value={ + "endpoint": "negotiate/ports", + "data": [ + {"id": "banana", "port": 4555, "bind": False}, + {"id": "tomato", "port": 5557, "bind": True}, + ], + } + ) + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_1(monkeypatch): + """ + Test the setup of the communication agent + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = fake_json_correct_negototiate_1() + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Mock RICommandAgent agent startup + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + await agent.setup() + + # --- Assert --- + fake_socket.connect.assert_any_call("tcp://localhost:5555") + fake_socket.send_json.assert_any_call({"endpoint": "negotiate/ports", "data": None}) + fake_socket.recv_json.assert_awaited() + fake_agent_instance.start.assert_awaited() + MockCommandAgent.assert_called_once_with( + ANY, # Server Name + ANY, # Server Password + address="tcp://*:5556", # derived from the 'port' value in negotiation + bind=True, + ) + # Ensure the agent attached a ListenBehaviour + assert any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_2(monkeypatch): + """ + Test the setup of the communication agent + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = fake_json_correct_negototiate_2() + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Mock RICommandAgent agent startup + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + await agent.setup() + + # --- Assert --- + fake_socket.connect.assert_any_call("tcp://localhost:5555") + fake_socket.send_json.assert_any_call({"endpoint": "negotiate/ports", "data": None}) + fake_socket.recv_json.assert_awaited() + fake_agent_instance.start.assert_awaited() + MockCommandAgent.assert_called_once_with( + ANY, # Server Name + ANY, # Server Password + address="tcp://*:5557", # derived from the 'port' value in negotiation + bind=True, + ) + # Ensure the agent attached a ListenBehaviour + assert any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_3(monkeypatch, caplog): + """ + Test the functionality of setup with incorrect negotiation message + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = fake_json_wrong_negototiate_1() + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Mock RICommandAgent agent startup + + # We are sending wrong negotiation info to the communication agent, so we should retry and expect a + # better response, within a limited time. + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + with caplog.at_level("ERROR"): + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + await agent.setup(max_retries=1) + + # --- Assert --- + fake_socket.connect.assert_any_call("tcp://localhost:5555") + fake_socket.recv_json.assert_awaited() + + # Since it failed, there should not be any command agent. + fake_agent_instance.start.assert_not_awaited() + assert "Failed to set up RICommunicationAgent" in caplog.text + + # Ensure the agent did not attach a ListenBehaviour + assert not any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_4(monkeypatch): + """ + Test the setup of the communication agent with different bind value + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = fake_json_correct_negototiate_3() + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Mock RICommandAgent agent startup + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=True + ) + await agent.setup() + + # --- Assert --- + fake_socket.bind.assert_any_call("tcp://localhost:5555") + fake_socket.send_json.assert_any_call({"endpoint": "negotiate/ports", "data": None}) + fake_socket.recv_json.assert_awaited() + fake_agent_instance.start.assert_awaited() + MockCommandAgent.assert_called_once_with( + ANY, # Server Name + ANY, # Server Password + address="tcp://*:5557", # derived from the 'port' value in negotiation + bind=True, + ) + # Ensure the agent attached a ListenBehaviour + assert any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_5(monkeypatch): + """ + Test the setup of the communication agent + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = fake_json_correct_negototiate_4() + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Mock RICommandAgent agent startup + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + await agent.setup() + + # --- Assert --- + fake_socket.connect.assert_any_call("tcp://localhost:5555") + fake_socket.send_json.assert_any_call({"endpoint": "negotiate/ports", "data": None}) + fake_socket.recv_json.assert_awaited() + fake_agent_instance.start.assert_awaited() + MockCommandAgent.assert_called_once_with( + ANY, # Server Name + ANY, # Server Password + address="tcp://*:5557", # derived from the 'port' value in negotiation + bind=True, + ) + # Ensure the agent attached a ListenBehaviour + assert any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_6(monkeypatch): + """ + Test the setup of the communication agent + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = fake_json_correct_negototiate_5() + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Mock RICommandAgent agent startup + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + await agent.setup() + + # --- Assert --- + fake_socket.connect.assert_any_call("tcp://localhost:5555") + fake_socket.send_json.assert_any_call({"endpoint": "negotiate/ports", "data": None}) + fake_socket.recv_json.assert_awaited() + fake_agent_instance.start.assert_awaited() + MockCommandAgent.assert_called_once_with( + ANY, # Server Name + ANY, # Server Password + address="tcp://*:5557", # derived from the 'port' value in negotiation + bind=True, + ) + # Ensure the agent attached a ListenBehaviour + assert any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_7(monkeypatch, caplog): + """ + Test the functionality of setup with incorrect id + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = fake_json_invalid_id_negototiate() + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Mock RICommandAgent agent startup + + # We are sending wrong negotiation info to the communication agent, so we should retry and expect a + # better response, within a limited time. + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + with caplog.at_level("WARNING"): + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + await agent.setup(max_retries=1) + + # --- Assert --- + fake_socket.connect.assert_any_call("tcp://localhost:5555") + fake_socket.recv_json.assert_awaited() + + # Since it failed, there should not be any command agent. + fake_agent_instance.start.assert_not_awaited() + assert "Unhandled negotiation id:" in caplog.text + + +@pytest.mark.asyncio +async def test_setup_creates_socket_and_negotiate_timeout(monkeypatch, caplog): + """ + Test the functionality of setup with incorrect negotiation message + """ + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = AsyncMock(side_effect=asyncio.TimeoutError) + + # Mock context.socket to return our fake socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + # --- Act --- + with caplog.at_level("WARNING"): + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + await agent.setup(max_retries=1) + + # --- Assert --- + fake_socket.connect.assert_any_call("tcp://localhost:5555") + + # Since it failed, there should not be any command agent. + fake_agent_instance.start.assert_not_awaited() + assert "No connection established in 20 seconds" in caplog.text + + # Ensure the agent did not attach a ListenBehaviour + assert not any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) + + +@pytest.mark.asyncio +async def test_listen_behaviour_ping_correct(caplog): + fake_socket = AsyncMock() + fake_socket.send_json = AsyncMock() + fake_socket.recv_json = AsyncMock(return_value={"endpoint": "ping", "data": {}}) + + # TODO: Integration test between actual server and password needed for spade agents + agent = RICommunicationAgent("test@server", "password") + agent.req_socket = fake_socket + + behaviour = agent.ListenBehaviour() + agent.add_behaviour(behaviour) + + # Run once (CyclicBehaviour normally loops) + with caplog.at_level("DEBUG"): + await behaviour.run() + + fake_socket.send_json.assert_awaited() + fake_socket.recv_json.assert_awaited() + assert "Received message" in caplog.text + + +@pytest.mark.asyncio +async def test_listen_behaviour_ping_wrong_endpoint(caplog): + """ + Test if our listen behaviour can work with wrong messages (wrong endpoint) + """ + fake_socket = AsyncMock() + fake_socket.send_json = AsyncMock() + + # This is a message for the wrong endpoint >:( + fake_socket.recv_json = AsyncMock( + return_value={ + "endpoint": "negotiate/ports", + "data": [ + {"id": "main", "port": 5555, "bind": False}, + {"id": "actuation", "port": 5556, "bind": True}, + ], + } + ) + + agent = RICommunicationAgent("test@server", "password") + agent.req_socket = fake_socket + + behaviour = agent.ListenBehaviour() + agent.add_behaviour(behaviour) + + # Run once (CyclicBehaviour normally loops) + with caplog.at_level("INFO"): + await behaviour.run() + + assert "Received message with topic different than ping, while ping expected." in caplog.text + fake_socket.send_json.assert_awaited() + fake_socket.recv_json.assert_awaited() + + +@pytest.mark.asyncio +async def test_listen_behaviour_timeout(caplog): + fake_socket = AsyncMock() + fake_socket.send_json = AsyncMock() + # recv_json will never resolve, simulate timeout + fake_socket.recv_json = AsyncMock(side_effect=asyncio.TimeoutError) + + agent = RICommunicationAgent("test@server", "password") + agent.req_socket = fake_socket + + behaviour = agent.ListenBehaviour() + agent.add_behaviour(behaviour) + + with caplog.at_level("INFO"): + await behaviour.run() + + assert "No ping retrieved in 3 seconds" in caplog.text + + +@pytest.mark.asyncio +async def test_listen_behaviour_ping_no_endpoint(caplog): + """ + Test if our listen behaviour can work with wrong messages (wrong endpoint) + """ + fake_socket = AsyncMock() + fake_socket.send_json = AsyncMock() + + # This is a message without endpoint >:( + fake_socket.recv_json = AsyncMock( + return_value={ + "data": "I dont have an endpoint >:)", + } + ) + + agent = RICommunicationAgent("test@server", "password") + agent.req_socket = fake_socket + + behaviour = agent.ListenBehaviour() + agent.add_behaviour(behaviour) + + # Run once (CyclicBehaviour normally loops) + with caplog.at_level("ERROR"): + await behaviour.run() + + assert "No received endpoint in message, excepted ping endpoint." in caplog.text + fake_socket.send_json.assert_awaited() + fake_socket.recv_json.assert_awaited() + + +@pytest.mark.asyncio +async def test_setup_unexpected_exception(monkeypatch, caplog): + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + # Simulate unexpected exception during recv_json() + fake_socket.recv_json = AsyncMock(side_effect=Exception("boom!")) + + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + + with caplog.at_level("ERROR"): + await agent.setup(max_retries=1) + + # Ensure that the error was logged + assert "Unexpected error during negotiation: boom!" in caplog.text + + +@pytest.mark.asyncio +async def test_setup_unpacking_exception(monkeypatch, caplog): + # --- Arrange --- + fake_socket = MagicMock() + fake_socket.send_json = AsyncMock() + + # Make recv_json return malformed negotiation data to trigger unpacking exception + malformed_data = { + "endpoint": "negotiate/ports", + "data": [{"id": "main"}], + } # missing 'port' and 'bind' + fake_socket.recv_json = AsyncMock(return_value=malformed_data) + + # Patch context.socket + monkeypatch.setattr( + "control_backend.agents.ri_communication_agent.context.socket", lambda _: fake_socket + ) + + # Patch RICommandAgent so it won't actually start + with patch( + "control_backend.agents.ri_communication_agent.RICommandAgent", autospec=True + ) as MockCommandAgent: + fake_agent_instance = MockCommandAgent.return_value + fake_agent_instance.start = AsyncMock() + + agent = RICommunicationAgent( + "test@server", "password", address="tcp://localhost:5555", bind=False + ) + + # --- Act & Assert --- + with caplog.at_level("ERROR"): + await agent.setup(max_retries=1) + + # Ensure the unpacking exception was logged + assert "Error unpacking negotiation data" in caplog.text + + # Ensure no command agent was started + fake_agent_instance.start.assert_not_awaited() + + # Ensure no behaviour was attached + assert not any(isinstance(b, agent.ListenBehaviour) for b in agent.behaviours) diff --git a/test/integration/agents/vad_agent/speech_with_pauses_16k_1c_float32.wav b/test/integration/agents/vad_agent/speech_with_pauses_16k_1c_float32.wav new file mode 100644 index 0000000..530bc0a Binary files /dev/null and b/test/integration/agents/vad_agent/speech_with_pauses_16k_1c_float32.wav differ diff --git a/test/integration/agents/vad_agent/test_vad_agent.py b/test/integration/agents/vad_agent/test_vad_agent.py new file mode 100644 index 0000000..293912e --- /dev/null +++ b/test/integration/agents/vad_agent/test_vad_agent.py @@ -0,0 +1,99 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +import zmq +from spade.agent import Agent + +from control_backend.agents.vad_agent import VADAgent + + +@pytest.fixture +def zmq_context(mocker): + return mocker.patch("control_backend.agents.vad_agent.zmq_context") + + +@pytest.fixture +def streaming(mocker): + return mocker.patch("control_backend.agents.vad_agent.Streaming") + + +@pytest.mark.asyncio +async def test_normal_setup(streaming): + """ + Test that during normal setup, the VAD agent creates a Streaming behavior and creates audio + sockets. + """ + vad_agent = VADAgent("tcp://localhost:12345", False) + vad_agent.add_behaviour = MagicMock() + + await vad_agent.setup() + + streaming.assert_called_once() + vad_agent.add_behaviour.assert_called_once_with(streaming.return_value) + assert vad_agent.audio_in_socket is not None + assert vad_agent.audio_out_socket is not None + + +@pytest.mark.parametrize("do_bind", [True, False]) +def test_in_socket_creation(zmq_context, do_bind: bool): + """ + Test that the VAD agent creates an audio input socket, differentiating between binding and + connecting. + """ + vad_agent = VADAgent(f"tcp://{'*' if do_bind else 'localhost'}:12345", do_bind) + + vad_agent._connect_audio_in_socket() + + assert vad_agent.audio_in_socket is not None + + zmq_context.socket.assert_called_once_with(zmq.SUB) + zmq_context.socket.return_value.setsockopt_string.assert_called_once_with(zmq.SUBSCRIBE, "") + + if do_bind: + zmq_context.socket.return_value.bind.assert_called_once_with("tcp://*:12345") + else: + zmq_context.socket.return_value.connect.assert_called_once_with("tcp://localhost:12345") + + +def test_out_socket_creation(zmq_context): + """ + Test that the VAD agent creates an audio output socket correctly. + """ + vad_agent = VADAgent("tcp://localhost:12345", False) + + vad_agent._connect_audio_out_socket() + + assert vad_agent.audio_out_socket is not None + + zmq_context.socket.assert_called_once_with(zmq.PUB) + zmq_context.socket.return_value.bind_to_random_port.assert_called_once() + + +@pytest.mark.asyncio +async def test_out_socket_creation_failure(zmq_context): + """ + Test setup failure when the audio output socket cannot be created. + """ + with patch.object(Agent, "stop", new_callable=AsyncMock) as mock_super_stop: + zmq_context.socket.return_value.bind_to_random_port.side_effect = zmq.ZMQBindError + vad_agent = VADAgent("tcp://localhost:12345", False) + + await vad_agent.setup() + + assert vad_agent.audio_out_socket is None + mock_super_stop.assert_called_once() + + +@pytest.mark.asyncio +async def test_stop(zmq_context): + """ + Test that when the VAD agent is stopped, the sockets are closed correctly. + """ + vad_agent = VADAgent("tcp://localhost:12345", False) + + await vad_agent.setup() + await vad_agent.stop() + + assert zmq_context.socket.return_value.close.call_count == 2 + assert vad_agent.audio_in_socket is None + assert vad_agent.audio_out_socket is None diff --git a/test/integration/agents/vad_agent/test_vad_with_audio.py b/test/integration/agents/vad_agent/test_vad_with_audio.py new file mode 100644 index 0000000..7d10aa3 --- /dev/null +++ b/test/integration/agents/vad_agent/test_vad_with_audio.py @@ -0,0 +1,57 @@ +import os +from unittest.mock import AsyncMock, MagicMock + +import pytest +import soundfile as sf +import zmq + +from control_backend.agents.vad_agent import Streaming + + +def get_audio_chunks() -> list[bytes]: + curr_file = os.path.realpath(__file__) + curr_dir = os.path.dirname(curr_file) + file = f"{curr_dir}/speech_with_pauses_16k_1c_float32.wav" + + chunk_size = 512 + + chunks = [] + + with sf.SoundFile(file, "r") as f: + assert f.samplerate == 16000 + assert f.channels == 1 + assert f.subtype == "FLOAT" + + while True: + data = f.read(chunk_size, dtype="float32") + if len(data) != chunk_size: + break + + chunks.append(data.tobytes()) + + return chunks + + +@pytest.mark.asyncio +async def test_real_audio(mocker): + """ + Test the VAD agent with only input and output mocked. Using the real model, using real audio as + input. Ensure that it outputs some fragments with audio. + """ + audio_chunks = get_audio_chunks() + audio_in_socket = AsyncMock() + audio_in_socket.recv.side_effect = audio_chunks + + mock_poller: MagicMock = mocker.patch("control_backend.agents.vad_agent.zmq.Poller") + mock_poller.return_value.poll.return_value = [(audio_in_socket, zmq.POLLIN)] + + audio_out_socket = AsyncMock() + + vad_streamer = Streaming(audio_in_socket, audio_out_socket) + for _ in audio_chunks: + await vad_streamer.run() + + audio_out_socket.send.assert_called() + for args in audio_out_socket.send.call_args_list: + assert isinstance(args[0][0], bytes) + assert len(args[0][0]) >= 512 * 4 * 3 # Should be at least 3 chunks of audio diff --git a/test/integration/api/endpoints/test_command_endpoint.py b/test/integration/api/endpoints/test_command_endpoint.py new file mode 100644 index 0000000..07bd866 --- /dev/null +++ b/test/integration/api/endpoints/test_command_endpoint.py @@ -0,0 +1,63 @@ +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient +from unittest.mock import MagicMock + +from control_backend.api.v1.endpoints import command +from control_backend.schemas.ri_message import SpeechCommand + + +@pytest.fixture +def app(): + """ + Creates a FastAPI test app and attaches the router under test. + Also sets up a mock internal_comm_socket. + """ + app = FastAPI() + app.include_router(command.router) + app.state.internal_comm_socket = MagicMock() # mock ZMQ socket + return app + + +@pytest.fixture +def client(app): + """Create a test client for the app.""" + return TestClient(app) + + +def test_receive_command_endpoint(client, app): + """ + Test that a POST to /command sends the right multipart message + and returns a 202 with the expected JSON body. + """ + mock_socket = app.state.internal_comm_socket + + # Prepare test payload that matches SpeechCommand + payload = {"endpoint": "actuate/speech", "data": "yooo"} + + # Send POST request + response = client.post("/command", json=payload) + + # Check response + assert response.status_code == 202 + assert response.json() == {"status": "Command received"} + + # Verify that the socket was called with the correct data + assert mock_socket.send_multipart.called, "Socket should be used to send data" + + args, kwargs = mock_socket.send_multipart.call_args + sent_data = args[0] + + assert sent_data[0] == b"command" + # Check JSON encoding roughly matches + assert isinstance(SpeechCommand.model_validate_json(sent_data[1].decode()), SpeechCommand) + + +def test_receive_command_invalid_payload(client): + """ + Test invalid data handling (schema validation). + """ + # Missing required field(s) + bad_payload = {"invalid": "data"} + response = client.post("/command", json=bad_payload) + assert response.status_code == 422 # validation error diff --git a/test/integration/schemas/test_ri_message.py b/test/integration/schemas/test_ri_message.py new file mode 100644 index 0000000..aef9ae6 --- /dev/null +++ b/test/integration/schemas/test_ri_message.py @@ -0,0 +1,36 @@ +import pytest +from control_backend.schemas.ri_message import RIMessage, RIEndpoint, SpeechCommand +from pydantic import ValidationError + + +def valid_command_1(): + return SpeechCommand(data="Hallo?") + + +def invalid_command_1(): + return RIMessage(endpoint=RIEndpoint.PING, data="Hello again.") + + +def test_valid_speech_command_1(): + command = valid_command_1() + try: + RIMessage.model_validate(command) + SpeechCommand.model_validate(command) + assert True + except ValidationError: + assert False + + +def test_invalid_speech_command_1(): + command = invalid_command_1() + passed_ri_message_validation = False + try: + # Should succeed, still. + RIMessage.model_validate(command) + passed_ri_message_validation = True + + # Should fail. + SpeechCommand.model_validate(command) + assert False + except ValidationError: + assert passed_ri_message_validation diff --git a/test/unit/agents/bdi/behaviours/test_belief_setter.py b/test/unit/agents/bdi/behaviours/test_belief_setter.py index 8932834..85277da 100644 --- a/test/unit/agents/bdi/behaviours/test_belief_setter.py +++ b/test/unit/agents/bdi/behaviours/test_belief_setter.py @@ -1,10 +1,10 @@ import json import logging -from unittest.mock import MagicMock, AsyncMock, call +from unittest.mock import AsyncMock, MagicMock, call import pytest -from control_backend.agents.bdi.behaviours.belief_setter import BeliefSetter +from control_backend.agents.bdi.behaviours.belief_setter import BeliefSetterBehaviour # Define a constant for the collector agent name to use in tests COLLECTOR_AGENT_NAME = "belief_collector" @@ -22,16 +22,14 @@ def mock_agent(mocker): @pytest.fixture def belief_setter(mock_agent, mocker): - """Fixture to create an instance of BeliefSetter with a mocked agent.""" + """Fixture to create an instance of BeliefSetterBehaviour with a mocked agent.""" # Patch the settings to use a predictable agent name mocker.patch( "control_backend.agents.bdi.behaviours.belief_setter.settings.agent_settings.belief_collector_agent_name", - COLLECTOR_AGENT_NAME + COLLECTOR_AGENT_NAME, ) - # Patch asyncio.sleep to prevent tests from actually waiting - mocker.patch("asyncio.sleep", return_value=None) - - setter = BeliefSetter() + + setter = BeliefSetterBehaviour() setter.agent = mock_agent # Mock the receive method, we will control its return value in each test setter.receive = AsyncMock() @@ -69,7 +67,7 @@ async def test_run_message_received(belief_setter, mocker): Test that when a message is received, _process_message is called. """ # Arrange - msg = MagicMock(); + msg = MagicMock() belief_setter.receive.return_value = msg mocker.patch.object(belief_setter, "_process_message") @@ -115,14 +113,9 @@ def test_process_belief_message_valid_json(belief_setter, mocker): Test processing a valid belief message with correct thread and JSON body. """ # Arrange - beliefs_payload = { - "is_hot": [["kitchen"]], - "is_clean": [["kitchen"], ["bathroom"]] - } + beliefs_payload = {"is_hot": ["kitchen"], "is_clean": ["kitchen", "bathroom"]} msg = create_mock_message( - sender_node=COLLECTOR_AGENT_JID, - body=json.dumps(beliefs_payload), - thread="beliefs" + sender_node=COLLECTOR_AGENT_JID, body=json.dumps(beliefs_payload), thread="beliefs" ) mock_set_beliefs = mocker.patch.object(belief_setter, "_set_beliefs") @@ -139,9 +132,7 @@ def test_process_belief_message_invalid_json(belief_setter, mocker, caplog): """ # Arrange msg = create_mock_message( - sender_node=COLLECTOR_AGENT_JID, - body="this is not a json string", - thread="beliefs" + sender_node=COLLECTOR_AGENT_JID, body="this is not a json string", thread="beliefs" ) mock_set_beliefs = mocker.patch.object(belief_setter, "_set_beliefs") @@ -160,9 +151,7 @@ def test_process_belief_message_wrong_thread(belief_setter, mocker): """ # Arrange msg = create_mock_message( - sender_node=COLLECTOR_AGENT_JID, - body='{"some": "data"}', - thread="not_beliefs" + sender_node=COLLECTOR_AGENT_JID, body='{"some": "data"}', thread="not_beliefs" ) mock_set_beliefs = mocker.patch.object(belief_setter, "_set_beliefs") @@ -172,16 +161,13 @@ def test_process_belief_message_wrong_thread(belief_setter, mocker): # Assert mock_set_beliefs.assert_not_called() + def test_process_belief_message_empty_body(belief_setter, mocker): """ Test that a message with an empty body is ignored. """ # Arrange - msg = create_mock_message( - sender_node=COLLECTOR_AGENT_JID, - body="", - thread="beliefs" - ) + msg = create_mock_message(sender_node=COLLECTOR_AGENT_JID, body="", thread="beliefs") mock_set_beliefs = mocker.patch.object(belief_setter, "_set_beliefs") # Act @@ -197,10 +183,10 @@ def test_set_beliefs_success(belief_setter, mock_agent, caplog): """ # Arrange beliefs_to_set = { - "is_hot": [["kitchen"], ["living_room"]], - "door_is": [["front_door", "closed"]] + "is_hot": ["kitchen"], + "door_opened": ["front_door", "back_door"], } - + # Act with caplog.at_level(logging.INFO): belief_setter._set_beliefs(beliefs_to_set) @@ -208,17 +194,25 @@ def test_set_beliefs_success(belief_setter, mock_agent, caplog): # Assert expected_calls = [ call("is_hot", "kitchen"), - call("is_hot", "living_room"), - call("door_is", "front_door", "closed") + call("door_opened", "front_door", "back_door"), ] mock_agent.bdi.set_belief.assert_has_calls(expected_calls, any_order=True) - assert mock_agent.bdi.set_belief.call_count == 3 - + assert mock_agent.bdi.set_belief.call_count == 2 + # Check logs assert "Set belief is_hot with arguments ['kitchen']" in caplog.text - assert "Set belief is_hot with arguments ['living_room']" in caplog.text - assert "Set belief door_is with arguments ['front_door', 'closed']" in caplog.text + assert "Set belief door_opened with arguments ['front_door', 'back_door']" in caplog.text +def test_responded_unset(belief_setter, mock_agent): + # Arrange + new_beliefs = {"user_said": ["message"]} + + # Act + belief_setter._set_beliefs(new_beliefs) + + # Assert + mock_agent.bdi.set_belief.assert_has_calls([call("user_said", "message")]) + mock_agent.bdi.remove_belief.assert_has_calls([call("responded")]) def test_set_beliefs_bdi_not_initialized(belief_setter, mock_agent, caplog): """ @@ -226,7 +220,7 @@ def test_set_beliefs_bdi_not_initialized(belief_setter, mock_agent, caplog): """ # Arrange mock_agent.bdi = None # Simulate BDI not being ready - beliefs_to_set = {"is_hot": [["kitchen"]]} + beliefs_to_set = {"is_hot": ["kitchen"]} # Act with caplog.at_level(logging.WARNING): diff --git a/test/unit/agents/test_vad_socket_poller.py b/test/unit/agents/test_vad_socket_poller.py new file mode 100644 index 0000000..aaf8d0f --- /dev/null +++ b/test/unit/agents/test_vad_socket_poller.py @@ -0,0 +1,46 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest +import zmq + +from control_backend.agents.vad_agent import SocketPoller + + +@pytest.fixture +def socket(): + return AsyncMock() + + +@pytest.mark.asyncio +async def test_socket_poller_with_data(socket, mocker): + socket_data = b"test" + socket.recv.return_value = socket_data + + mock_poller: MagicMock = mocker.patch("control_backend.agents.vad_agent.zmq.Poller") + mock_poller.return_value.poll.return_value = [(socket, zmq.POLLIN)] + + poller = SocketPoller(socket) + # Calling `poll` twice to be able to check that the poller is reused + await poller.poll() + data = await poller.poll() + + assert data == socket_data + + # Ensure that the poller was reused + mock_poller.assert_called_once_with() + mock_poller.return_value.register.assert_called_once_with(socket, zmq.POLLIN) + + assert socket.recv.call_count == 2 + + +@pytest.mark.asyncio +async def test_socket_poller_no_data(socket, mocker): + mock_poller: MagicMock = mocker.patch("control_backend.agents.vad_agent.zmq.Poller") + mock_poller.return_value.poll.return_value = [] + + poller = SocketPoller(socket) + data = await poller.poll() + + assert data is None + + socket.recv.assert_not_called() diff --git a/test/unit/agents/test_vad_streaming.py b/test/unit/agents/test_vad_streaming.py new file mode 100644 index 0000000..9b38cd0 --- /dev/null +++ b/test/unit/agents/test_vad_streaming.py @@ -0,0 +1,95 @@ +from unittest.mock import AsyncMock, MagicMock + +import numpy as np +import pytest + +from control_backend.agents.vad_agent import Streaming + + +@pytest.fixture +def audio_in_socket(): + return AsyncMock() + + +@pytest.fixture +def audio_out_socket(): + return AsyncMock() + + +@pytest.fixture +def streaming(audio_in_socket, audio_out_socket): + import torch + + torch.hub.load.return_value = (..., ...) # Mock + return Streaming(audio_in_socket, audio_out_socket) + + +async def simulate_streaming_with_probabilities(streaming, probabilities: list[float]): + """ + Simulates a streaming scenario with given VAD model probabilities for testing purposes. + + :param streaming: The streaming component to be tested. + :param probabilities: A list of probabilities representing the outputs of the VAD model. + """ + model_item = MagicMock() + model_item.item.side_effect = probabilities + streaming.model = MagicMock() + streaming.model.return_value = model_item + + audio_in_poller = AsyncMock() + audio_in_poller.poll.return_value = np.empty(shape=512, dtype=np.float32) + streaming.audio_in_poller = audio_in_poller + + for _ in probabilities: + await streaming.run() + + +@pytest.mark.asyncio +async def test_voice_activity_detected(audio_in_socket, audio_out_socket, streaming): + """ + Test a scenario where there is voice activity detected between silences. + :return: + """ + speech_chunk_count = 5 + probabilities = [0.0] * 5 + [1.0] * speech_chunk_count + [0.0] * 5 + await simulate_streaming_with_probabilities(streaming, probabilities) + + audio_out_socket.send.assert_called_once() + data = audio_out_socket.send.call_args[0][0] + assert isinstance(data, bytes) + # each sample has 512 frames of 4 bytes, expecting 7 chunks (5 with speech, 2 as padding) + assert len(data) == 512 * 4 * (speech_chunk_count + 2) + + +@pytest.mark.asyncio +async def test_voice_activity_short_pause(audio_in_socket, audio_out_socket, streaming): + """ + Test a scenario where there is a short pause between speech, checking whether it ignores the + short pause. + """ + speech_chunk_count = 5 + probabilities = ( + [0.0] * 5 + [1.0] * speech_chunk_count + [0.0] + [1.0] * speech_chunk_count + [0.0] * 5 + ) + await simulate_streaming_with_probabilities(streaming, probabilities) + + audio_out_socket.send.assert_called_once() + data = audio_out_socket.send.call_args[0][0] + assert isinstance(data, bytes) + # Expecting 13 chunks (2*5 with speech, 1 pause between, 2 as padding) + assert len(data) == 512 * 4 * (speech_chunk_count * 2 + 1 + 2) + + +@pytest.mark.asyncio +async def test_no_data(audio_in_socket, audio_out_socket, streaming): + """ + Test a scenario where there is no data received. This should not cause errors. + """ + audio_in_poller = AsyncMock() + audio_in_poller.poll.return_value = None + streaming.audio_in_poller = audio_in_poller + + await streaming.run() + + audio_out_socket.send.assert_not_called() + assert len(streaming.audio_buffer) == 0 diff --git a/test/conftest.py b/test/unit/conftest.py similarity index 52% rename from test/conftest.py rename to test/unit/conftest.py index 1e51aca..76ef272 100644 --- a/test/conftest.py +++ b/test/unit/conftest.py @@ -1,8 +1,6 @@ import sys from unittest.mock import MagicMock -import sys -from unittest.mock import MagicMock def pytest_configure(config): """ @@ -17,21 +15,31 @@ def pytest_configure(config): mock_spade_bdi.bdi = MagicMock() mock_spade.agent.Message = MagicMock() - mock_spade.behaviour.CyclicBehaviour = type('CyclicBehaviour', (object,), {}) - mock_spade_bdi.bdi.BDIAgent = type('BDIAgent', (object,), {}) + mock_spade.behaviour.CyclicBehaviour = type("CyclicBehaviour", (object,), {}) + mock_spade_bdi.bdi.BDIAgent = type("BDIAgent", (object,), {}) - sys.modules['spade'] = mock_spade - sys.modules['spade.agent'] = mock_spade.agent - sys.modules['spade.behaviour'] = mock_spade.behaviour - sys.modules['spade_bdi'] = mock_spade_bdi - sys.modules['spade_bdi.bdi'] = mock_spade_bdi.bdi + sys.modules["spade"] = mock_spade + sys.modules["spade.agent"] = mock_spade.agent + sys.modules["spade.behaviour"] = mock_spade.behaviour + sys.modules["spade_bdi"] = mock_spade_bdi + sys.modules["spade_bdi.bdi"] = mock_spade_bdi.bdi # --- Mock the config module to prevent Pydantic ImportError --- mock_config_module = MagicMock() - + # The code under test does `from ... import settings`, so our mock module # must have a `settings` attribute. We'll make it a MagicMock so we can # configure it later in our tests using mocker.patch. mock_config_module.settings = MagicMock() - - sys.modules['control_backend.core.config'] = mock_config_module + + sys.modules["control_backend.core.config"] = mock_config_module + + # --- Mock torch and zmq for VAD --- + mock_torch = MagicMock() + mock_zmq = MagicMock() + mock_zmq.asyncio = mock_zmq + + # In individual tests, these can be imported and the return values changed + sys.modules["torch"] = mock_torch + sys.modules["zmq"] = mock_zmq + sys.modules["zmq.asyncio"] = mock_zmq.asyncio diff --git a/uv.lock b/uv.lock index 050aa28..c2bb61a 100644 --- a/uv.lock +++ b/uv.lock @@ -240,6 +240,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.3" @@ -394,6 +403,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/c6/c09cee6968add5ff868525c3815e5dccc0e3c6e89eec58dc9135d3c40e88/cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb", size = 3070445, upload-time = "2024-09-03T20:03:21.179Z" }, ] +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + [[package]] name = "dnspython" version = "2.8.0" @@ -701,6 +719,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, ] +[[package]] +name = "identify" +version = "2.6.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -1014,6 +1041,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406, upload-time = "2025-05-29T11:35:04.961Z" }, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + [[package]] name = "numba" version = "0.62.1" @@ -1296,10 +1332,15 @@ source = { virtual = "." } dependencies = [ { name = "fastapi", extra = ["all"] }, { name = "mlx-whisper", marker = "sys_platform == 'darwin'" }, + { name = "numpy" }, { name = "openai-whisper" }, { name = "pyaudio" }, { name = "pydantic" }, { name = "pydantic-settings" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, { name = "pyzmq" }, { name = "silero-vad" }, { name = "spade" }, @@ -1309,7 +1350,16 @@ dependencies = [ ] [package.dev-dependencies] +dev = [ + { name = "pre-commit" }, + { name = "ruff" }, + { name = "ruff-format" }, +] +integration-test = [ + { name = "soundfile" }, +] test = [ + { name = "numpy" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, @@ -1320,10 +1370,15 @@ test = [ requires-dist = [ { name = "fastapi", extras = ["all"], specifier = ">=0.115.6" }, { name = "mlx-whisper", marker = "sys_platform == 'darwin'", specifier = ">=0.4.3" }, + { name = "numpy", specifier = ">=2.3.3" }, { name = "openai-whisper", specifier = ">=20250625" }, { name = "pyaudio", specifier = ">=0.2.14" }, { name = "pydantic", specifier = ">=2.12.0" }, { name = "pydantic-settings", specifier = ">=2.11.0" }, + { name = "pytest", specifier = ">=8.4.2" }, + { name = "pytest-asyncio", specifier = ">=1.2.0" }, + { name = "pytest-cov", specifier = ">=7.0.0" }, + { name = "pytest-mock", specifier = ">=3.15.1" }, { name = "pyzmq", specifier = ">=27.1.0" }, { name = "silero-vad", specifier = ">=6.0.0" }, { name = "spade", specifier = ">=4.1.0" }, @@ -1333,13 +1388,29 @@ requires-dist = [ ] [package.metadata.requires-dev] +dev = [ + { name = "pre-commit", specifier = ">=4.3.0" }, + { name = "ruff", specifier = ">=0.14.2" }, + { name = "ruff-format", specifier = ">=0.3.0" }, +] +integration-test = [{ name = "soundfile", specifier = ">=0.13.1" }] test = [ + { name = "numpy", specifier = ">=2.3.3" }, { name = "pytest", specifier = ">=8.4.2" }, { name = "pytest-asyncio", specifier = ">=1.2.0" }, { name = "pytest-cov", specifier = ">=7.0.0" }, { name = "pytest-mock", specifier = ">=3.15.1" }, ] +[[package]] +name = "platformdirs" +version = "4.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -1349,6 +1420,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "pre-commit" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, +] + [[package]] name = "propcache" version = "0.4.0" @@ -1967,6 +2054,62 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1c/63/0d7df1237c6353d1a85d8a0bc1797ac766c68e8bc6fbca241db74124eb61/rignore-0.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2401637dc8ab074f5e642295f8225d2572db395ae504ffc272a8d21e9fe77b2c", size = 717404, upload-time = "2025-10-02T13:26:29.936Z" }, ] +[[package]] +name = "ruff" +version = "0.14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/34/8218a19b2055b80601e8fd201ec723c74c7fe1ca06d525a43ed07b6d8e85/ruff-0.14.2.tar.gz", hash = "sha256:98da787668f239313d9c902ca7c523fe11b8ec3f39345553a51b25abc4629c96", size = 5539663, upload-time = "2025-10-23T19:37:00.956Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/dd/23eb2db5ad9acae7c845700493b72d3ae214dce0b226f27df89216110f2b/ruff-0.14.2-py3-none-linux_armv6l.whl", hash = "sha256:7cbe4e593505bdec5884c2d0a4d791a90301bc23e49a6b1eb642dd85ef9c64f1", size = 12533390, upload-time = "2025-10-23T19:36:18.044Z" }, + { url = "https://files.pythonhosted.org/packages/5a/8c/5f9acff43ddcf3f85130d0146d0477e28ccecc495f9f684f8f7119b74c0d/ruff-0.14.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8d54b561729cee92f8d89c316ad7a3f9705533f5903b042399b6ae0ddfc62e11", size = 12887187, upload-time = "2025-10-23T19:36:22.664Z" }, + { url = "https://files.pythonhosted.org/packages/99/fa/047646491479074029665022e9f3dc6f0515797f40a4b6014ea8474c539d/ruff-0.14.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c8753dfa44ebb2cde10ce5b4d2ef55a41fb9d9b16732a2c5df64620dbda44a3", size = 11925177, upload-time = "2025-10-23T19:36:24.778Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/c44cf7fe6e59ab24a9d939493a11030b503bdc2a16622cede8b7b1df0114/ruff-0.14.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d0bbeffb8d9f4fccf7b5198d566d0bad99a9cb622f1fc3467af96cb8773c9e3", size = 12358285, upload-time = "2025-10-23T19:36:26.979Z" }, + { url = "https://files.pythonhosted.org/packages/45/01/47701b26254267ef40369aea3acb62a7b23e921c27372d127e0f3af48092/ruff-0.14.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7047f0c5a713a401e43a88d36843d9c83a19c584e63d664474675620aaa634a8", size = 12303832, upload-time = "2025-10-23T19:36:29.192Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5c/ae7244ca4fbdf2bee9d6405dcd5bc6ae51ee1df66eb7a9884b77b8af856d/ruff-0.14.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bf8d2f9aa1602599217d82e8e0af7fd33e5878c4d98f37906b7c93f46f9a839", size = 13036995, upload-time = "2025-10-23T19:36:31.861Z" }, + { url = "https://files.pythonhosted.org/packages/27/4c/0860a79ce6fd4c709ac01173f76f929d53f59748d0dcdd662519835dae43/ruff-0.14.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1c505b389e19c57a317cf4b42db824e2fca96ffb3d86766c1c9f8b96d32048a7", size = 14512649, upload-time = "2025-10-23T19:36:33.915Z" }, + { url = "https://files.pythonhosted.org/packages/7f/7f/d365de998069720a3abfc250ddd876fc4b81a403a766c74ff9bde15b5378/ruff-0.14.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a307fc45ebd887b3f26b36d9326bb70bf69b01561950cdcc6c0bdf7bb8e0f7cc", size = 14088182, upload-time = "2025-10-23T19:36:36.983Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ea/d8e3e6b209162000a7be1faa41b0a0c16a133010311edc3329753cc6596a/ruff-0.14.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61ae91a32c853172f832c2f40bd05fd69f491db7289fb85a9b941ebdd549781a", size = 13599516, upload-time = "2025-10-23T19:36:39.208Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ea/c7810322086db68989fb20a8d5221dd3b79e49e396b01badca07b433ab45/ruff-0.14.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1967e40286f63ee23c615e8e7e98098dedc7301568bd88991f6e544d8ae096", size = 13272690, upload-time = "2025-10-23T19:36:41.453Z" }, + { url = "https://files.pythonhosted.org/packages/a9/39/10b05acf8c45786ef501d454e00937e1b97964f846bf28883d1f9619928a/ruff-0.14.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:2877f02119cdebf52a632d743a2e302dea422bfae152ebe2f193d3285a3a65df", size = 13496497, upload-time = "2025-10-23T19:36:43.61Z" }, + { url = "https://files.pythonhosted.org/packages/59/a1/1f25f8301e13751c30895092485fada29076e5e14264bdacc37202e85d24/ruff-0.14.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e681c5bc777de5af898decdcb6ba3321d0d466f4cb43c3e7cc2c3b4e7b843a05", size = 12266116, upload-time = "2025-10-23T19:36:45.625Z" }, + { url = "https://files.pythonhosted.org/packages/5c/fa/0029bfc9ce16ae78164e6923ef392e5f173b793b26cc39aa1d8b366cf9dc/ruff-0.14.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e21be42d72e224736f0c992cdb9959a2fa53c7e943b97ef5d081e13170e3ffc5", size = 12281345, upload-time = "2025-10-23T19:36:47.618Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ab/ece7baa3c0f29b7683be868c024f0838770c16607bea6852e46b202f1ff6/ruff-0.14.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b8264016f6f209fac16262882dbebf3f8be1629777cf0f37e7aff071b3e9b92e", size = 12629296, upload-time = "2025-10-23T19:36:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7f/638f54b43f3d4e48c6a68062794e5b367ddac778051806b9e235dfb7aa81/ruff-0.14.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5ca36b4cb4db3067a3b24444463ceea5565ea78b95fe9a07ca7cb7fd16948770", size = 13371610, upload-time = "2025-10-23T19:36:51.882Z" }, + { url = "https://files.pythonhosted.org/packages/8d/35/3654a973ebe5b32e1fd4a08ed2d46755af7267da7ac710d97420d7b8657d/ruff-0.14.2-py3-none-win32.whl", hash = "sha256:41775927d287685e08f48d8eb3f765625ab0b7042cc9377e20e64f4eb0056ee9", size = 12415318, upload-time = "2025-10-23T19:36:53.961Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/3758bcf9e0b6a4193a6f51abf84254aba00887dfa8c20aba18aa366c5f57/ruff-0.14.2-py3-none-win_amd64.whl", hash = "sha256:0df3424aa5c3c08b34ed8ce099df1021e3adaca6e90229273496b839e5a7e1af", size = 13565279, upload-time = "2025-10-23T19:36:56.578Z" }, + { url = "https://files.pythonhosted.org/packages/2e/5d/aa883766f8ef9ffbe6aa24f7192fb71632f31a30e77eb39aa2b0dc4290ac/ruff-0.14.2-py3-none-win_arm64.whl", hash = "sha256:ea9d635e83ba21569fbacda7e78afbfeb94911c9434aff06192d9bc23fd5495a", size = 12554956, upload-time = "2025-10-23T19:36:58.714Z" }, +] + +[[package]] +name = "ruff-format" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/3c/71dfce0e8269271969381b1a629772aeeb62c693f8aca8560bf145e413ca/ruff_format-0.3.0.tar.gz", hash = "sha256:f579b32b9dd041b0fe7b04da9ba932ff5d108f7ce4c763bd58e659a03f1d408a", size = 15541, upload-time = "2025-10-10T03:13:11.805Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/b9/5866b53f870231f61716753b471cca1c79042678b96d25bff75ca1ee361a/ruff_format-0.3.0-cp311-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46e543b0c6c858d963ca337ded9e37887ba6fc903caf13bd7200274faef9178c", size = 2127810, upload-time = "2025-10-10T03:12:42.416Z" }, + { url = "https://files.pythonhosted.org/packages/42/0a/311803a69bb9302749eb22b4a193cc87dfe172a5ee6940d3e4c9362418f5/ruff_format-0.3.0-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:d549c4cd5e6ae1fac9c4c083b5c3d51bca5b1fdb622384bd5dd2c1d01f99dc66", size = 2059792, upload-time = "2025-10-10T03:12:40.849Z" }, + { url = "https://files.pythonhosted.org/packages/17/bb/7e09e91464291dc1f4b947d858d1206b3df618fdb96cda17fad3bc245977/ruff_format-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb10f784ff0dc8f57183d7edbf33ce32d8efd8582794e9415c8a53a0e6d0e0b", size = 2247834, upload-time = "2025-10-10T03:12:06.404Z" }, + { url = "https://files.pythonhosted.org/packages/6d/20/8d1d5c63acacee481e7a92e8d5a9cfa1fa6266082bf844f66c981033b43b/ruff_format-0.3.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:adf38aae1b1468c55f4f8732d077bb30dd705599875cf6783bbb1808373d9fa4", size = 2187813, upload-time = "2025-10-10T03:12:13.535Z" }, + { url = "https://files.pythonhosted.org/packages/bd/87/c23b0ef5efa4624882601fbcacc8e64f4f1687387acb1873babb82413e27/ruff_format-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:642e8edadbc348718ef4aaf750ffa993376338669d5bf7c085c66d1a181ea26f", size = 3076735, upload-time = "2025-10-10T03:12:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/df/60/2dd758eac6f835505de4bdcf7be5c993a930e6f6c475bec21e92df1359e5/ruff_format-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e894da47f72e538731793953b213c80e17aeea5635067e2054c9a8ffe71331b", size = 2393207, upload-time = "2025-10-10T03:12:28.3Z" }, + { url = "https://files.pythonhosted.org/packages/29/8c/f55bcc419596929da754ffa59f415e498a17be1a32b2a59c472440526625/ruff_format-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2c73eabe1f9a08ca7430f317c358bb31c3e0017b262488bac636a50cc7d7948d", size = 2429534, upload-time = "2025-10-10T03:12:43.675Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ae/24e1bf20a13d67fd4b4629efa8c015a20de9fa09ec3767b27a5e0beec4c7/ruff_format-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:470ca14276c98eb06404c0966d3b306c63c1560fd926416fd5c6c00f24f3410c", size = 2445547, upload-time = "2025-10-10T03:12:50.626Z" }, + { url = "https://files.pythonhosted.org/packages/c8/aa/5c343854a1d6c74a1db7ecd345f7fa6712f7b73adabd9c6ceb5db4356a69/ruff_format-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ebdf4a35223860e7a697ef3a2d5dc0cf1c94656b09ba9139b400c1602c18db3a", size = 2452623, upload-time = "2025-10-10T03:12:57.66Z" }, + { url = "https://files.pythonhosted.org/packages/fd/0f/8ffaa38f228176478ca6f1e9faf23749220f3fd97ad804559ac85e3cfc98/ruff_format-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3bf308531ad99a745438701df88d306a416d002a36143b23c5b5dad85965a42", size = 2473830, upload-time = "2025-10-10T03:13:05.376Z" }, + { url = "https://files.pythonhosted.org/packages/13/2f/3f53cfb6f14d2f2bfcf29fef41712ee04caa84155334e4602db1e08523d8/ruff_format-0.3.0-cp314-abi3-win32.whl", hash = "sha256:cc9e2bf654290999a2d0bdac8dd289302dcbc8cced2db5e1600f1d1850b4066e", size = 1785021, upload-time = "2025-10-10T03:13:13.785Z" }, + { url = "https://files.pythonhosted.org/packages/64/49/81c0ebc86540f856e0f1ffa6d47a95111328306650f63d6a453d34f05295/ruff_format-0.3.0-cp314-abi3-win_amd64.whl", hash = "sha256:52d47afcf18cd070e9ea8eb7701b6942a28323089fdd4a7a8934c68e57228475", size = 1892439, upload-time = "2025-10-10T03:13:12.546Z" }, + { url = "https://files.pythonhosted.org/packages/e0/5e/bfaf109bb50cc1c108d494288072419ba3acf0e9bfcf3be587b707454c50/ruff_format-0.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:623156d3a1e2ef8ece2b7195aa64f122c036605ce495e06e99c53a52927b7871", size = 2249416, upload-time = "2025-10-10T03:12:08.096Z" }, + { url = "https://files.pythonhosted.org/packages/8c/01/113a0e8f15dc1309b6331695a084bc36207b26fad065c26abfadbf24f5a7/ruff_format-0.3.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:073d4be5fb2fbb6668e14fb9a3aae1b03bbb2ef6d63622979e5657d22a69fb36", size = 2190621, upload-time = "2025-10-10T03:12:14.806Z" }, + { url = "https://files.pythonhosted.org/packages/66/8d/979b6ccde9fe4018b01a9a4215cc4c3455519465943c9862876311e239da/ruff_format-0.3.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45e34fe85e7bc833f85e873f6cb9e3606510e678760c7128c737b009e3b9fdfd", size = 3077988, upload-time = "2025-10-10T03:12:22.204Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/791ce063a6bf17c783fe036f302bfcec8a9e1f99bf591e8b0cc73a25b719/ruff_format-0.3.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:135f1306e51198790fcf402c6574539e51dc1bcfa6d8c67e8b51c701d9ebab11", size = 2395129, upload-time = "2025-10-10T03:12:29.808Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/08df01b8925ea4fdf7959199ccffc599314a179695fa8bc886146971b30b/ruff_format-0.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451d3502ccd85ec055fdc1ce52f60f6c8d469bda3b8c7a3e9ac5fa99a64fde9c", size = 2302808, upload-time = "2025-10-10T03:12:38.299Z" }, + { url = "https://files.pythonhosted.org/packages/b4/0d/24d3616081e283b38cf228a6765b913fd1320e780febd4ea3ec98a0db5ff/ruff_format-0.3.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:76e0c088e18bd23b124d225926b8d64db6419a7f86b3a123346e2bacae679940", size = 2364885, upload-time = "2025-10-10T03:12:35.341Z" }, + { url = "https://files.pythonhosted.org/packages/05/2f/3efec36107cd974ed48ab63b61b15e49139575ff305daf0c52c24ea14cdb/ruff_format-0.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:81651ba409a6de07f5c6b25ac609401649a3cccdd19c7cb76e735481e6ed859a", size = 2431420, upload-time = "2025-10-10T03:12:45.127Z" }, + { url = "https://files.pythonhosted.org/packages/f7/bb/9ec44a9203f668974a896efc9cf26c9e332226b578f7ae6ca3449642e7cb/ruff_format-0.3.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:da2d9cc4d0c4cfd5b8180a19f0b8eda86cc2cffc0e5d01dd2b6133eb85e7e76f", size = 2447058, upload-time = "2025-10-10T03:12:51.926Z" }, + { url = "https://files.pythonhosted.org/packages/a0/57/be709bc005ec1008773a9361b0d1dac23fc0425ea2510b3b575cb3d44865/ruff_format-0.3.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c0f1c971a9eb50b7145158fd96ac29d5d5aaf4373c9d4c438113a1a09a97be03", size = 2453965, upload-time = "2025-10-10T03:12:59.07Z" }, + { url = "https://files.pythonhosted.org/packages/d3/a4/3a09b363d5bf7c4e2b97f770b308973759dce2acdf296b4023c3239ae7a7/ruff_format-0.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c905725e0dad3016a0c7cd16eea64edec7bc42cd60036378a4e206a56ee565fd", size = 2475816, upload-time = "2025-10-10T03:13:06.68Z" }, +] + [[package]] name = "scipy" version = "1.16.2" @@ -2081,6 +2224,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] +[[package]] +name = "soundfile" +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/41/9b873a8c055582859b239be17902a85339bec6a30ad162f98c9b0288a2cc/soundfile-0.13.1.tar.gz", hash = "sha256:b2c68dab1e30297317080a5b43df57e302584c49e2942defdde0acccc53f0e5b", size = 46156, upload-time = "2025-01-25T09:17:04.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/28/e2a36573ccbcf3d57c00626a21fe51989380636e821b341d36ccca0c1c3a/soundfile-0.13.1-py2.py3-none-any.whl", hash = "sha256:a23c717560da2cf4c7b5ae1142514e0fd82d6bbd9dfc93a50423447142f2c445", size = 25751, upload-time = "2025-01-25T09:16:44.235Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ab/73e97a5b3cc46bba7ff8650a1504348fa1863a6f9d57d7001c6b67c5f20e/soundfile-0.13.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:82dc664d19831933fe59adad199bf3945ad06d84bc111a5b4c0d3089a5b9ec33", size = 1142250, upload-time = "2025-01-25T09:16:47.583Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e5/58fd1a8d7b26fc113af244f966ee3aecf03cb9293cb935daaddc1e455e18/soundfile-0.13.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:743f12c12c4054921e15736c6be09ac26b3b3d603aef6fd69f9dde68748f2593", size = 1101406, upload-time = "2025-01-25T09:16:49.662Z" }, + { url = "https://files.pythonhosted.org/packages/58/ae/c0e4a53d77cf6e9a04179535766b3321b0b9ced5f70522e4caf9329f0046/soundfile-0.13.1-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9c9e855f5a4d06ce4213f31918653ab7de0c5a8d8107cd2427e44b42df547deb", size = 1235729, upload-time = "2025-01-25T09:16:53.018Z" }, + { url = "https://files.pythonhosted.org/packages/57/5e/70bdd9579b35003a489fc850b5047beeda26328053ebadc1fb60f320f7db/soundfile-0.13.1-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:03267c4e493315294834a0870f31dbb3b28a95561b80b134f0bd3cf2d5f0e618", size = 1313646, upload-time = "2025-01-25T09:16:54.872Z" }, + { url = "https://files.pythonhosted.org/packages/fe/df/8c11dc4dfceda14e3003bb81a0d0edcaaf0796dd7b4f826ea3e532146bba/soundfile-0.13.1-py2.py3-none-win32.whl", hash = "sha256:c734564fab7c5ddf8e9be5bf70bab68042cd17e9c214c06e365e20d64f9a69d5", size = 899881, upload-time = "2025-01-25T09:16:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/14/e9/6b761de83277f2f02ded7e7ea6f07828ec78e4b229b80e4ca55dd205b9dc/soundfile-0.13.1-py2.py3-none-win_amd64.whl", hash = "sha256:1e70a05a0626524a69e9f0f4dd2ec174b4e9567f4d8b6c11d38b5c289be36ee9", size = 1019162, upload-time = "2025-01-25T09:16:59.573Z" }, +] + [[package]] name = "spade" version = "4.1.0" @@ -2425,6 +2587,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, ] +[[package]] +name = "virtualenv" +version = "20.35.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/d5/b0ccd381d55c8f45d46f77df6ae59fbc23d19e901e2d523395598e5f4c93/virtualenv-20.35.3.tar.gz", hash = "sha256:4f1a845d131133bdff10590489610c98c168ff99dc75d6c96853801f7f67af44", size = 6002907, upload-time = "2025-10-10T21:23:33.178Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/73/d9a94da0e9d470a543c1b9d3ccbceb0f59455983088e727b8a1824ed90fb/virtualenv-20.35.3-py3-none-any.whl", hash = "sha256:63d106565078d8c8d0b206d48080f938a8b25361e19432d2c9db40d2899c810a", size = 5981061, upload-time = "2025-10-10T21:23:30.433Z" }, +] + [[package]] name = "watchfiles" version = "1.1.0"