Use ZMQ's global context instance and setup an XPUB/XSUB proxy intermediary to allow for easier multi-pubs. close: N25B-217
44 lines
1.2 KiB
Python
44 lines
1.2 KiB
Python
from pydantic import BaseModel
|
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
|
|
|
|
class ZMQSettings(BaseModel):
|
|
internal_pub_address: str = "tcp://localhost:5560"
|
|
internal_sub_address: str = "tcp://localhost:5561"
|
|
|
|
|
|
class AgentSettings(BaseModel):
|
|
host: str = "localhost"
|
|
bdi_core_agent_name: str = "bdi_core"
|
|
belief_collector_agent_name: str = "belief_collector"
|
|
text_belief_extractor_agent_name: str = "text_belief_extractor"
|
|
vad_agent_name: str = "vad_agent"
|
|
llm_agent_name: str = "llm_agent"
|
|
test_agent_name: str = "test_agent"
|
|
transcription_agent_name: str = "transcription_agent"
|
|
|
|
ri_communication_agent_name: str = "ri_communication_agent"
|
|
ri_command_agent_name: str = "ri_command_agent"
|
|
|
|
|
|
class LLMSettings(BaseModel):
|
|
local_llm_url: str = "http://localhost:1234/v1/chat/completions"
|
|
local_llm_model: str = "openai/gpt-oss-20b"
|
|
|
|
|
|
class Settings(BaseSettings):
|
|
app_title: str = "PepperPlus"
|
|
|
|
ui_url: str = "http://localhost:5173"
|
|
|
|
zmq_settings: ZMQSettings = ZMQSettings()
|
|
|
|
agent_settings: AgentSettings = AgentSettings()
|
|
|
|
llm_settings: LLMSettings = LLMSettings()
|
|
|
|
model_config = SettingsConfigDict(env_file=".env")
|
|
|
|
|
|
settings = Settings()
|