45 lines
1.4 KiB
Python
45 lines
1.4 KiB
Python
import os
|
||
import shlex
|
||
|
||
|
||
def _env_bool(name: str, default: bool) -> bool:
|
||
value = os.getenv(name)
|
||
if value is None:
|
||
return default
|
||
return value.strip().lower() in {"1", "true", "yes", "on"}
|
||
|
||
|
||
def _env_args(name: str, default: str = "") -> list[str]:
|
||
value = os.getenv(name, default)
|
||
if not value.strip():
|
||
return []
|
||
return shlex.split(value)
|
||
|
||
|
||
SYSTEM_MESSAGE = (
|
||
"你是一个友好、幽默的AI虚拟主播。你可以看到用户摄像头传来的画面,也能听到他们的话。"
|
||
"请用简短、自然、热情的中文口语回答,每次回答控制在两三句话以内,不要输出任何 Markdown 格式。"
|
||
"当用户询问实时天气、最新新闻或网页信息时,优先使用可用工具先查询再回答。"
|
||
)
|
||
|
||
WHISPER_MODEL_NAME = "base"
|
||
WHISPER_DEVICE = "cpu"
|
||
WHISPER_COMPUTE_TYPE = "int8"
|
||
WHISPER_LANGUAGE = "zh"
|
||
WHISPER_BEAM_SIZE = 5
|
||
|
||
TTS_VOICE = "zh-CN-XiaoxiaoNeural"
|
||
OLLAMA_MODEL = "qwen3-vl:latest"
|
||
|
||
SERVER_HOST = "0.0.0.0"
|
||
SERVER_PORT = 8000
|
||
|
||
ENABLE_MCP_TOOLS = _env_bool("ENABLE_MCP_TOOLS", True)
|
||
MCP_SERVER_READ_TIMEOUT_SECONDS = float(os.getenv("MCP_SERVER_READ_TIMEOUT_SECONDS", "30"))
|
||
|
||
MCP_WEATHER_SERVER_COMMAND = os.getenv("MCP_WEATHER_SERVER_COMMAND", "")
|
||
MCP_WEATHER_SERVER_ARGS = _env_args("MCP_WEATHER_SERVER_ARGS")
|
||
|
||
MCP_WEBSEARCH_SERVER_COMMAND = os.getenv("MCP_WEBSEARCH_SERVER_COMMAND", "")
|
||
MCP_WEBSEARCH_SERVER_ARGS = _env_args("MCP_WEBSEARCH_SERVER_ARGS")
|