配置 OpenClaw
注意
推荐使用 openclaw 命令行工具去修改配置,这样能实时更新缓存,旧数据也会自动备份。
在 openclaw 2026.3.11 版本中,如果直接修改 openclaw.json 会出现很多其他生成的文件内容不一致问题。
配置大模型
1. 启用配置覆盖模式
默认是合并模式,这里推荐使用覆盖模式
bash
# 1. 设置为覆盖模式(替换旧配置,不合并)
openclaw config set models.mode replace
# 2. 验证是否生效
openclaw config get models.mode
# 应输出:replace2. 命令行添加模型
命令行添加 Ollama 提供商 + 模型
提示
OpenClaw 的配置指令基于 Playbooks, 不支持追加,只能做覆盖!
bash
# 覆盖式添加/更新 Ollama 提供商(直接替换整个配置)
openclaw config set models.providers.ollama '{
"baseUrl": "http://127.0.0.1:11434",
"apiKey": "OLLAMA_API_KEY",
"api": "ollama",
"models": [
{
"id": "qwen3.5:0.8b",
"name": "qwen3.5:0.8b",
"reasoning": false,
"input": [
"text"
],
"cost": {
"input": 0,
"output": 0,
"cacheRead": 0,
"cacheWrite": 0
},
"contextWindow": 262144,
"maxTokens": 8192
}
]
}'bash
# 覆盖式添加/更新 Ollama 提供商(直接替换整个配置)
openclaw config set models.providers.bailian '{
"baseUrl": "https://dashscope.aliyuncs.com/compatible-mode/v1",
"apiKey": "sk-6e6a2dd7445a4b1987ea8207d1d1dbd8",
"api": "openai-completions",
"models": [{
"id": "qwen-plus-2025-12-01",
"name": "qwen-plus-2025-12-01",
"reasoning": false,
"input": ["text"],
"contextWindow": 1048576,
"maxTokens": 32768
}, {
"id": "qwen-plus-1220",
"name": "qwen-plus-1220",
"reasoning": false,
"input": ["text"],
"contextWindow": 131072,
"maxTokens": 8192
}, {
"id": "qwen-plus-latest",
"name": "qwen-plus-latest",
"reasoning": false,
"input": ["text"],
"contextWindow": 1048576,
"maxTokens": 32768
}, {
"id": "qwen-plus-2025-09-11",
"name": "qwen-plus-2025-09-11",
"reasoning": false,
"input": ["text"],
"contextWindow": 1048576,
"maxTokens": 32768
}, {
"id": "qwen-plus-2025-01-25",
"name": "qwen-plus-2025-01-25",
"reasoning": false,
"input": ["text"],
"contextWindow": 131072,
"maxTokens": 8192
}, {
"id": "qwen-plus-2025-04-28",
"name": "qwen-plus-2025-04-28",
"reasoning": false,
"input": ["text"],
"contextWindow": 131072,
"maxTokens": 32768
}, {
"id": "qwen-plus-2025-07-14",
"name": "qwen-plus-2025-07-14",
"reasoning": false,
"input": ["text"],
"contextWindow": 1048576,
"maxTokens": 16384
}, {
"id": "qwen-plus-character",
"name": "qwen-plus-character",
"reasoning": false,
"input": ["text"],
"contextWindow": 32768,
"maxTokens": 4096
}, {
"id": "qwen-max-0919",
"name": "qwen-max-0919",
"reasoning": false,
"input": ["text"],
"contextWindow": 32768,
"maxTokens": 8192
}, {
"id": "qwen-max-2025-01-25",
"name": "qwen-max-2025-01-25",
"reasoning": false,
"input": ["text"],
"contextWindow": 131072,
"maxTokens": 8192
}, {
"id": "qwen-max-latest",
"name": "qwen-max-latest",
"reasoning": false,
"input": ["text"],
"contextWindow": 131072,
"maxTokens": 8192
}, {
"id": "qwen-max",
"name": "qwen-max",
"reasoning": false,
"input": ["text"],
"contextWindow": 32768,
"maxTokens": 8192
}
]
}'bash
# 设为默认主模型(必须执行,才算“启用”)
openclaw models set bailian/qwen-flash
# 查看当前默认模型
openclaw models statusbash
# 先获取已经加入的白名单
openclaw config get agents.defaults.models
# 将所有模型加入agents的模型白名单中
openclaw config set agents.defaults.models '{
"bailian/qwen-plus-2025-12-01": {},
"bailian/qwen-plus-1220": {},
"bailian/qwen-plus-latest": {},
"bailian/qwen-plus-2025-09-11": {},
"bailian/qwen-plus-2025-01-25": {},
"bailian/qwen-plus-2025-04-28": {},
"bailian/qwen-plus-2025-07-14": {},
"bailian/qwen-plus-character": {},
"bailian/qwen-max-0919": {},
"bailian/qwen-max-2025-01-25": {},
"bailian/qwen-max-latest": {},
"bailian/qwen-max": {}
}'bash
# 先获取已设置信息
openclaw config get agents.defaults.model.fallbacks
# Fallback回退机制,允许上一个模型的 Token 消耗完(或报错/限额)后,自动切换调用下一个模型
openclaw config set agents.defaults.model.fallbacks '[
"bailian/qwen-plus-2025-07-28",
"bailian/qwen-plus-0112",
"bailian/qwen-plus-2025-12-01",
"bailian/qwen-plus-1220",
"bailian/qwen-plus-latest",
"bailian/qwen-plus-2025-09-11",
"bailian/qwen-plus-2025-01-25",
"bailian/qwen-plus-2025-04-28",
"bailian/qwen-plus-2025-07-14",
"bailian/qwen-plus-character",
"bailian/qwen-max-0919",
"bailian/qwen-max-2025-01-25",
"bailian/qwen-max-latest",
"bailian/qwen-max"
]'bash
# 配置完记得重启服务
systemctl --user restart openclaw-gateway.service便捷指令
便捷指令
bash
# 修改登录 Token,生成官方标准 48 位(和安装时位数一样)
openclaw config set gateway.auth.token "$(openssl rand -hex 24)"
# 查看新 Token,使用 openclaw config get 会加密
cat ~/.openclaw/openclaw.json | grep -C 3 '"token":'
# 修改远程大模型API Token
# 修改登录 Token,生成官方标准 48 位(和安装时位数一样)
openclaw config set models.providers.bailian.apiKey <Api Token>
# 查看新 Token
cat ~/.openclaw/openclaw.json | grep -C 3 '"apiKey":'本地大模型
本地大模型推荐使用 ollama 管理。具体内容见 👉 ollama使用指南
管理大模型
bash
# 查看本地模型
openclaw models list --local
# 查看指定提供商的模型列表
openclaw models list --provider ollama
# 查看模型列表更多指令帮助
openclaw models list --helpbash
# 获取 ollama 提供商的模型列表
openclaw config get models.providers.ollama.models --json
# 移除指定模型
# 注意:索引从 0 开始,[1] 代表列表中的第2项
openclaw config unset "models.providers.ollama.models[0]"gateway 重置 token
bash
systemctl --user stop openclaw-gateway.service
# 生成官方标准 48 位 Token(和安装时一模一样)
openclaw config set gateway.auth.token "$(openssl rand -hex 24)"
# 查看新 Token
cat ~/.openclaw/openclaw.json | grep -C 3 '"token":'
systemctl --user start openclaw-gateway.service升级 OpenClaw
bash
systemctl --user stop openclaw-gateway.service
# 推荐使用 openclaw 升级
openclaw update
# 使用 pnpm 升级到最新版
pnpm add openclaw@latest -g
# 小版本更新
pnpm update openclaw -g
systemctl --user start openclaw-gateway.service修复
升级后需要修复,OpenClaw遇到问题,也可能需要修复
bash
# 从正在运行的网关获取健康情况
# → 前提:需要先启动 gateway
# → --verbose 详细模式
# → --json 使用 JSON 格式输出(终端上不够直观)
openclaw health --verbose --json
# 修复+迁移工具
# → 前提:不正常再执行修复
openclaw doctor