
- Fix provider configuration from 'openai' to 'ollama' in .taskmaster/config.json - Remove conflicting MCP configurations (.cursor/mcp.json, packages/.cursor/mcp.json) - Standardize on single .vscode/mcp.json configuration for VS Code - Update environment variables for proper Ollama integration - Add .env.taskmaster for easy environment setup - Verify AI functionality: task creation, expansion, and research working - All models (qwen2.5-coder:7b, deepseek-r1:7b, llama3.1:8b) operational - Cost: /run/current-system/sw/bin/zsh (using local Ollama server at grey-area:11434) Resolves configuration conflicts and enables full AI-powered task management with local models instead of external API dependencies.
35 lines
No EOL
874 B
JSON
35 lines
No EOL
874 B
JSON
{
|
|
"models": {
|
|
"main": {
|
|
"provider": "ollama",
|
|
"modelId": "qwen2.5-coder:7b",
|
|
"maxTokens": 8192,
|
|
"temperature": 0.3
|
|
},
|
|
"research": {
|
|
"provider": "ollama",
|
|
"modelId": "deepseek-r1:7b",
|
|
"maxTokens": 8192,
|
|
"temperature": 0.1
|
|
},
|
|
"fallback": {
|
|
"provider": "ollama",
|
|
"modelId": "llama3.1:8b",
|
|
"maxTokens": 8192,
|
|
"temperature": 0.1
|
|
}
|
|
},
|
|
"global": {
|
|
"logLevel": "info",
|
|
"debug": false,
|
|
"defaultSubtasks": 5,
|
|
"defaultPriority": "medium",
|
|
"projectName": "Taskmaster",
|
|
"ollamaBaseURL": "http://grey-area:11434/api",
|
|
"openaiBaseURL": "http://grey-area:11434/v1",
|
|
"bedrockBaseURL": "https://bedrock.us-east-1.amazonaws.com",
|
|
"defaultTag": "master",
|
|
"azureOpenaiBaseURL": "https://your-endpoint.openai.azure.com/",
|
|
"userId": "1234567890"
|
|
}
|
|
} |