chore: cleanup

made bdi match incoming messages
changed llm from test agent to llm agent in config.

ref: N25B-207
This commit is contained in:
JobvAlewijk
2025-10-28 12:03:30 +01:00
parent 17056da832
commit 281bc57b6e
2 changed files with 12 additions and 9 deletions

View File

@@ -21,10 +21,10 @@ uv sync
To run a LLM locally download https://lmstudio.ai
When installing select developer mode, download a model (it will already suggest one) and run it (see developer window, status: running)
copy the url at the top right and replace LOCAL_LLM_URL with it + v1/chat/completions.
copy the url at the top right and replace local_llm_url with it + v1/chat/completions.
This + part might differ based on what model you choose.
copy the model name in the module loaded and replace LOCAL_LLM_MODEL.
copy the model name in the module loaded and replace local_llm_modelL. In settings.
## Running

View File

@@ -59,7 +59,7 @@ class BDICoreAgent(BDIAgent):
class SendBehaviour(OneShotBehaviour):
async def run(self) -> None:
msg = Message(
to= settings.agent_settings.test_agent_name + '@' + settings.agent_settings.host,
to= settings.agent_settings.llm_agent_name + '@' + settings.agent_settings.host,
body= text,
thread= "llm_request",
)
@@ -81,13 +81,16 @@ class BDICoreAgent(BDIAgent):
if not msg:
return
content = msg.body
self.agent.logger.info("Received LLM response: %s", content)
sender = msg.sender.node
match sender:
case settings.agent_settings.llm_agent_name:
content = msg.body
self.agent.logger.info("Received LLM response: %s", content)
#Here the BDI can pass the message back as a response
case _:
self.logger.debug("Not from the llm, discarding message")
pass
# TODO: Convert response into a belief (optional future feature)
# Example:
# self.agent.add_belief("llm_response", content)
# self.agent.logger.debug("Added belief: llm_response(%s)", content)
template = Template()
template.thread = "llm_response"