feat: implemented extra log level for LLM token stream
This commit is contained in:
committed by
JobvAlewijk
parent
2366255b92
commit
78abad55d3
@@ -49,6 +49,9 @@ async def test_llm_processing_success(mock_httpx_client, mock_settings):
|
||||
agent = LLMAgent("llm_agent")
|
||||
agent.send = AsyncMock() # Mock the send method to verify replies
|
||||
|
||||
mock_logger = MagicMock()
|
||||
agent.logger = mock_logger
|
||||
|
||||
# Simulate receiving a message from BDI
|
||||
prompt = LLMPromptMessage(text="Hi", norms=[], goals=[])
|
||||
msg = InternalMessage(
|
||||
|
||||
Reference in New Issue
Block a user