Configure Claude Task Master AI for VS Code MCP integration

- Updated .cursor/mcp.json to use local Nix-built Task Master binary
- Configured Task Master to use local Ollama models via OpenAI-compatible API
- Set up three models: qwen3:4b (main), deepseek-r1:1.5b (research), gemma3:4b-it-qat (fallback)
- Created comprehensive integration status documentation
- Task Master successfully running as MCP server with 23+ available tools
- Ready for VS Code/Cursor AI chat integration
This commit is contained in:
Geir Okkenhaug Jerstad 2025-06-14 16:35:09 +02:00
parent ae5b0cf8d0
commit 13114d7868
4 changed files with 96 additions and 0 deletions

37
.taskmaster/config.json Normal file
View file

@ -0,0 +1,37 @@
{
"models": {
"main": {
"provider": "openai",
"modelId": "qwen3:4b",
"maxTokens": 4096,
"temperature": 0.2,
"baseURL": "http://grey-area:11434/v1"
},
"research": {
"provider": "openai",
"modelId": "deepseek-r1:1.5b",
"maxTokens": 4096,
"temperature": 0.1,
"baseURL": "http://grey-area:11434/v1"
},
"fallback": {
"provider": "openai",
"modelId": "gemma3:4b-it-qat",
"maxTokens": 4096,
"temperature": 0.3,
"baseURL": "http://grey-area:11434/v1"
}
},
"global": {
"logLevel": "info",
"debug": false,
"defaultSubtasks": 5,
"defaultPriority": "medium",
"projectName": "Home Lab Infrastructure",
"ollamaBaseURL": "http://grey-area:11434/v1",
"bedrockBaseURL": "https://bedrock.us-east-1.amazonaws.com",
"vertexProjectId": "your-gcp-project-id",
"vertexLocation": "us-central1",
"userId": "1234567890"
}
}