chore: cleanup
made llm get url from settings cleanup uneceserry fstring ref: N25B-207
This commit is contained in:
@@ -32,9 +32,6 @@ class LLMAgent(Agent):
|
||||
the BDI Core Agent and handle them.
|
||||
"""
|
||||
|
||||
LOCAL_LLM_URL: str = "http://127.0.0.1:1234/v1/chat/completions"
|
||||
LOCAL_LLM_MODEL: str = "openai/gpt-oss-20b"
|
||||
|
||||
async def run(self) -> None:
|
||||
"""
|
||||
Receives SPADE messages and processes only those originating from the
|
||||
@@ -70,10 +67,9 @@ class LLMAgent(Agent):
|
||||
Sends a response message back to the BDI Core Agent.
|
||||
"""
|
||||
reply = Message(
|
||||
to=f"{settings.agent_settings.bdi_core_agent_name}@"
|
||||
f"{settings.agent_settings.host}",
|
||||
body=msg,
|
||||
thread="llm_response",
|
||||
to= settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host,
|
||||
body= msg,
|
||||
thread= "llm_response",
|
||||
)
|
||||
await self.send(reply)
|
||||
self.agent.logger.info("Reply sent to BDI Core Agent")
|
||||
@@ -87,10 +83,10 @@ class LLMAgent(Agent):
|
||||
"""
|
||||
async with httpx.AsyncClient(timeout=120.0) as client:
|
||||
response = await client.post(
|
||||
self.LOCAL_LLM_URL,
|
||||
settings.llm_settings.local_llm_url,
|
||||
headers={"Content-Type": "application/json"},
|
||||
json={
|
||||
"model": self.LOCAL_LLM_MODEL,
|
||||
"model": settings.llm_settings.local_llm_model,
|
||||
"messages": [{"role": "user", "content": prompt}],
|
||||
"temperature": 0.3,
|
||||
},
|
||||
@@ -116,10 +112,7 @@ class LLMAgent(Agent):
|
||||
self.logger.info("LLMAgent setup complete")
|
||||
|
||||
template = Template()
|
||||
template.sender = (
|
||||
f"{settings.agent_settings.bdi_core_agent_name}@"
|
||||
f"{settings.agent_settings.host}"
|
||||
)
|
||||
template.sender = settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host
|
||||
|
||||
behaviour = self.ReceiveMessageBehaviour()
|
||||
self.add_behaviour(behaviour, template)
|
||||
|
||||
Reference in New Issue
Block a user