feat: implemented extra log level for LLM token stream

This commit is contained in:
Luijkx,S.O.H. (Storm)
2025-12-16 11:26:35 +00:00
committed by JobvAlewijk
parent 2366255b92
commit 78abad55d3
4 changed files with 23 additions and 6 deletions

View File

@@ -49,6 +49,9 @@ async def test_llm_processing_success(mock_httpx_client, mock_settings):
agent = LLMAgent("llm_agent")
agent.send = AsyncMock() # Mock the send method to verify replies
mock_logger = MagicMock()
agent.logger = mock_logger
# Simulate receiving a message from BDI
prompt = LLMPromptMessage(text="Hi", norms=[], goals=[])
msg = InternalMessage(