chore: moved behavoir
moved recieve llm behavoir into a the behavoir folder ref: N25B-207
This commit is contained in:
@@ -7,6 +7,7 @@ from spade.template import Template
|
||||
from spade_bdi.bdi import BDIAgent
|
||||
|
||||
from control_backend.agents.bdi.behaviours.belief_setter import BeliefSetter
|
||||
from control_backend.agents.bdi.behaviours.recieve_llm_resp_behavoir import ReceiveLLMResponseBehaviour
|
||||
from control_backend.core.config import settings
|
||||
|
||||
|
||||
@@ -27,7 +28,7 @@ class BDICoreAgent(BDIAgent):
|
||||
self.logger.info("BDICoreAgent setup started")
|
||||
|
||||
self.add_behaviour(BeliefSetter())
|
||||
self._add_llm_response_receiver()
|
||||
self.add_behaviour(ReceiveLLMResponseBehaviour())
|
||||
|
||||
await self._send_to_llm("Hello we are the Pepper plus team")
|
||||
# This is the example message currently sent to the llm at the start of the Program
|
||||
@@ -63,36 +64,9 @@ class BDICoreAgent(BDIAgent):
|
||||
body= text,
|
||||
thread= "llm_request",
|
||||
)
|
||||
msg.set_metadata("performative", "inform")
|
||||
|
||||
await self.send(msg)
|
||||
self.agent.logger.debug("Message sent to LLM: %s", text)
|
||||
|
||||
self.add_behaviour(SendBehaviour())
|
||||
return "LLM message dispatch scheduled"
|
||||
|
||||
def _add_llm_response_receiver(self) -> None:
|
||||
"""
|
||||
Adds behavior to receive responses from the LLM Agent.
|
||||
"""
|
||||
|
||||
class ReceiveLLMResponseBehaviour(CyclicBehaviour):
|
||||
async def run(self) -> None:
|
||||
msg = await self.receive(timeout=2)
|
||||
if not msg:
|
||||
return
|
||||
|
||||
sender = msg.sender.node
|
||||
match sender:
|
||||
case settings.agent_settings.llm_agent_name:
|
||||
content = msg.body
|
||||
self.agent.logger.info("Received LLM response: %s", content)
|
||||
#Here the BDI can pass the message back as a response
|
||||
case _:
|
||||
self.logger.debug("Not from the llm, discarding message")
|
||||
pass
|
||||
|
||||
|
||||
template = Template()
|
||||
template.thread = "llm_response"
|
||||
|
||||
self.add_behaviour(ReceiveLLMResponseBehaviour(), template)
|
||||
return "LLM message dispatch scheduled"
|
||||
@@ -0,0 +1,29 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
from spade.agent import Message
|
||||
from spade.behaviour import CyclicBehaviour
|
||||
from spade_bdi.bdi import BDIAgent
|
||||
|
||||
from control_backend.core.config import settings
|
||||
|
||||
class ReceiveLLMResponseBehaviour(CyclicBehaviour):
|
||||
"""
|
||||
Adds behavior to receive responses from the LLM Agent.
|
||||
"""
|
||||
logger = logging.getLogger("BDI/LLM Reciever")
|
||||
async def run(self):
|
||||
msg = await self.receive(timeout=2)
|
||||
if not msg:
|
||||
return
|
||||
|
||||
sender = msg.sender.node
|
||||
match sender:
|
||||
case settings.agent_settings.llm_agent_name:
|
||||
content = msg.body
|
||||
self.logger.info("Received LLM response: %s", content)
|
||||
#Here the BDI can pass the message back as a response
|
||||
case _:
|
||||
self.logger.debug("Not from the llm, discarding message")
|
||||
pass
|
||||
Reference in New Issue
Block a user