128 lines
4.6 KiB
Python
128 lines
4.6 KiB
Python
"""
|
|
LLM Agent module for routing text queries from the BDI Core Agent to a local LLM
|
|
service and returning its responses back to the BDI Core Agent.
|
|
"""
|
|
|
|
import logging
|
|
from typing import Any
|
|
|
|
import httpx
|
|
from spade.agent import Agent
|
|
from spade.behaviour import CyclicBehaviour
|
|
from spade.message import Message
|
|
|
|
from control_backend.agents.llm.llm_instructions import LLMInstructions
|
|
from control_backend.core.config import settings
|
|
|
|
|
|
class LLMAgent(Agent):
|
|
"""
|
|
Agent responsible for processing user text input and querying a locally
|
|
hosted LLM for text generation. Receives messages from the BDI Core Agent
|
|
and responds with processed LLM output.
|
|
"""
|
|
|
|
logger = logging.getLogger("llm_agent")
|
|
|
|
class ReceiveMessageBehaviour(CyclicBehaviour):
|
|
"""
|
|
Cyclic behaviour to continuously listen for incoming messages from
|
|
the BDI Core Agent and handle them.
|
|
"""
|
|
|
|
async def run(self):
|
|
"""
|
|
Receives SPADE messages and processes only those originating from the
|
|
configured BDI agent.
|
|
"""
|
|
msg = await self.receive(timeout=1)
|
|
if not msg:
|
|
return
|
|
|
|
sender = msg.sender.node
|
|
self.agent.logger.info(
|
|
"Received message: %s from %s",
|
|
msg.body,
|
|
sender,
|
|
)
|
|
|
|
if sender == settings.agent_settings.bdi_core_agent_name:
|
|
self.agent.logger.debug("Processing message from BDI Core Agent")
|
|
await self._process_bdi_message(msg)
|
|
else:
|
|
self.agent.logger.debug("Message ignored (not from BDI Core Agent)")
|
|
|
|
async def _process_bdi_message(self, message: Message):
|
|
"""
|
|
Forwards user text to the LLM and replies with the generated text.
|
|
"""
|
|
user_text = message.body
|
|
llm_response = await self._query_llm(user_text)
|
|
await self._reply(llm_response)
|
|
|
|
async def _reply(self, msg: str):
|
|
"""
|
|
Sends a response message back to the BDI Core Agent.
|
|
"""
|
|
reply = Message(
|
|
to=settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host,
|
|
body=msg
|
|
)
|
|
await self.send(reply)
|
|
self.agent.logger.info("Reply sent to BDI Core Agent")
|
|
|
|
async def _query_llm(self, prompt: str) -> str:
|
|
"""
|
|
Sends a chat completion request to the local LLM service.
|
|
|
|
:param prompt: Input text prompt to pass to the LLM.
|
|
:return: LLM-generated content or fallback message.
|
|
"""
|
|
async with httpx.AsyncClient(timeout=120.0) as client:
|
|
# Example dynamic content for future (optional)
|
|
|
|
instructions = LLMInstructions()
|
|
developer_instruction = instructions.build_developer_instruction()
|
|
|
|
response = await client.post(
|
|
settings.llm_settings.local_llm_url,
|
|
headers={"Content-Type": "application/json"},
|
|
json={
|
|
"model": settings.llm_settings.local_llm_model,
|
|
"messages": [
|
|
{
|
|
"role": "developer",
|
|
"content": developer_instruction
|
|
},
|
|
{
|
|
"role": "user",
|
|
"content": prompt
|
|
}
|
|
],
|
|
"temperature": 0.3
|
|
},
|
|
)
|
|
|
|
try:
|
|
response.raise_for_status()
|
|
data: dict[str, Any] = response.json()
|
|
return data.get("choices", [{}])[0].get(
|
|
"message", {}
|
|
).get("content", "No response")
|
|
except httpx.HTTPError as err:
|
|
self.agent.logger.error("HTTP error: %s", err)
|
|
return "LLM service unavailable."
|
|
except Exception as err:
|
|
self.agent.logger.error("Unexpected error: %s", err)
|
|
return "Error processing the request."
|
|
|
|
async def setup(self):
|
|
"""
|
|
Sets up the SPADE behaviour to filter and process messages from the
|
|
BDI Core Agent.
|
|
"""
|
|
self.logger.info("LLMAgent setup complete")
|
|
|
|
behaviour = self.ReceiveMessageBehaviour()
|
|
self.add_behaviour(behaviour)
|