feat(docker): OpenClaw agent fleet compose + real configs (MS22-P1a)
All checks were successful
ci/woodpecker/push/infra Pipeline was successful
All checks were successful
ci/woodpecker/push/infra Pipeline was successful
This commit is contained in:
40
docker/openclaw-instances/jarvis-operations.json
Normal file
40
docker/openclaw-instances/jarvis-operations.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"gateway": {
|
||||
"mode": "local",
|
||||
"port": 18789,
|
||||
"bind": "lan",
|
||||
"auth": { "mode": "token" },
|
||||
"http": {
|
||||
"endpoints": {
|
||||
"chatCompletions": { "enabled": true }
|
||||
}
|
||||
}
|
||||
},
|
||||
"agents": {
|
||||
"defaults": {
|
||||
"workspace": "/home/node/workspace",
|
||||
"model": { "primary": "ollama/cogito" }
|
||||
}
|
||||
},
|
||||
// Operations uses local Ollama Cogito as the primary model.
|
||||
"models": {
|
||||
"mode": "merge",
|
||||
"providers": {
|
||||
"ollama": {
|
||||
"baseUrl": "http://10.1.1.42:11434/v1",
|
||||
"api": "openai-completions",
|
||||
"models": [
|
||||
{
|
||||
"id": "cogito",
|
||||
"name": "Cogito (Local Reasoning)",
|
||||
"reasoning": false,
|
||||
"input": ["text"],
|
||||
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
|
||||
"contextWindow": 128000,
|
||||
"maxTokens": 8192
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user