chore: cleanup

made llm get url from settings
cleanup uneceserry fstring

ref: N25B-207
This commit is contained in:
JobvAlewijk
2025-10-28 11:07:28 +01:00
parent c5b71450fc
commit 17056da832
3 changed files with 17 additions and 17 deletions

View File

@@ -59,10 +59,9 @@ class BDICoreAgent(BDIAgent):
class SendBehaviour(OneShotBehaviour):
async def run(self) -> None:
msg = Message(
to=f"{settings.agent_settings.test_agent_name}@"
f"{settings.agent_settings.host}",
body=text,
thread="llm_request",
to= settings.agent_settings.test_agent_name + '@' + settings.agent_settings.host,
body= text,
thread= "llm_request",
)
msg.set_metadata("performative", "inform")
await self.send(msg)

View File

@@ -32,9 +32,6 @@ class LLMAgent(Agent):
the BDI Core Agent and handle them.
"""
LOCAL_LLM_URL: str = "http://127.0.0.1:1234/v1/chat/completions"
LOCAL_LLM_MODEL: str = "openai/gpt-oss-20b"
async def run(self) -> None:
"""
Receives SPADE messages and processes only those originating from the
@@ -70,10 +67,9 @@ class LLMAgent(Agent):
Sends a response message back to the BDI Core Agent.
"""
reply = Message(
to=f"{settings.agent_settings.bdi_core_agent_name}@"
f"{settings.agent_settings.host}",
body=msg,
thread="llm_response",
to= settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host,
body= msg,
thread= "llm_response",
)
await self.send(reply)
self.agent.logger.info("Reply sent to BDI Core Agent")
@@ -87,10 +83,10 @@ class LLMAgent(Agent):
"""
async with httpx.AsyncClient(timeout=120.0) as client:
response = await client.post(
self.LOCAL_LLM_URL,
settings.llm_settings.local_llm_url,
headers={"Content-Type": "application/json"},
json={
"model": self.LOCAL_LLM_MODEL,
"model": settings.llm_settings.local_llm_model,
"messages": [{"role": "user", "content": prompt}],
"temperature": 0.3,
},
@@ -116,10 +112,7 @@ class LLMAgent(Agent):
self.logger.info("LLMAgent setup complete")
template = Template()
template.sender = (
f"{settings.agent_settings.bdi_core_agent_name}@"
f"{settings.agent_settings.host}"
)
template.sender = settings.agent_settings.bdi_core_agent_name + '@' + settings.agent_settings.host
behaviour = self.ReceiveMessageBehaviour()
self.add_behaviour(behaviour, template)

View File

@@ -12,6 +12,10 @@ class AgentSettings(BaseModel):
llm_agent_name: str = "llm_agent"
test_agent_name: str = "test_agent"
class LLMSettings(BaseModel):
local_llm_url: str = "http://127.0.0.1:1234/v1/chat/completions"
local_llm_model: str = "openai/gpt-oss-20b"
class Settings(BaseSettings):
app_title: str = "PepperPlus"
@@ -20,7 +24,11 @@ class Settings(BaseSettings):
zmq_settings: ZMQSettings = ZMQSettings()
agent_settings: AgentSettings = AgentSettings()
llm_settings: LLMSettings = LLMSettings()
model_config = SettingsConfigDict(env_file=".env")
settings = Settings()