下载oMlX
https://github.com/jundot/omlx
模型下载
https://huggingface.co/Qwen/Qwen3.5-4B
局域网请求测试
curl -H "Authorization: Bearer ApiKey" \
-H "Content-Type: application/json" \
-X POST http://192.168.2.20:8000/v1/completions \
-d '{
"model": "Qwen3.5-4B-MLX-4bit",
"prompt": "你好啊",
"max_tokens": 50
}'
OpenClaw 配置本地
"models": {
"mode": "merge",
"providers": {
"anthropic": {
"baseUrl": "http://192.168.2.20:8000/v1",
"apiKey": "ApiKey", #ApiKye
"api": "openai-completions",
"models": [
{
"id": "Qwen3.5-4B-MLX-4bit",
"name": "Qwen3.5-4B-MLX-4bit",
"reasoning": true,
"input": [
"text"
],
"contextWindow": 32000,
"maxTokens": 2048
}
]
}
}
},
"agents": {
"defaults": {
"model": {
"primary": "anthropic/Qwen3.5-4B-MLX-4bit"
},
"models": {
"anthropic/Qwen3.5-4B-MLX-4bit": {
"alias": "Qwen3.5-4B-MLX-4bit"
}
},
"workspace": "/Users/abin/.openclaw/workspace"
}
},
同一个url不同模型
"models": {
"mode": "merge",
"providers": {
"anthropic": {
"baseUrl": "http://192.168.2.20:8000/v1",
"apiKey": "ApiKey", #ApiKye
"api": "openai-completions",
"models": [
{
"id": "Qwen3.5-4B-MLX-4bit",
"name": "Qwen3.5-4B-MLX-4bit",
"reasoning": true,
"input": [
"text"
],
"contextWindow": 32000,
"maxTokens": 2048
},
{
"id": "Qwen3.5-4B",
"name": "Qwen3.5-4B",
"reasoning": true,
"input": [
"text"
],
"contextWindow": 32000,
"maxTokens": 2048
}
]
},
}
},
"agents": {
"defaults": {
"model": {
"primary": "anthropic/Qwen3.5-4B-MLX-4bit"
},
"models": {
"anthropic/Qwen3.5-4B-MLX-4bit": {
"alias": "Qwen3.5-4B-MLX-4bit"
},
"anthropic/Qwen3.5-4B": {
"alias": "Qwen3.5-4B"
},
},
"workspace": "/Users/abin/.openclaw/workspace"
}
},
评论区