fix: resolve Taskmaster AI MCP integration with local Ollama models

- Fix provider configuration from 'openai' to 'ollama' in .taskmaster/config.json
- Remove conflicting MCP configurations (.cursor/mcp.json, packages/.cursor/mcp.json)
- Standardize on single .vscode/mcp.json configuration for VS Code
- Update environment variables for proper Ollama integration
- Add .env.taskmaster for easy environment setup
- Verify AI functionality: task creation, expansion, and research working
- All models (qwen2.5-coder:7b, deepseek-r1:7b, llama3.1:8b) operational
- Cost: /run/current-system/sw/bin/zsh (using local Ollama server at grey-area:11434)

Resolves configuration conflicts and enables full AI-powered task management
with local models instead of external API dependencies.
This commit is contained in:
Geir Okkenhaug Jerstad 2025-06-18 16:16:27 +02:00
parent 2e193e00e9
commit 54e80f5c13
4 changed files with 25 additions and 45 deletions

View file

@ -1,12 +0,0 @@
{
"mcpServers": {
"task-master-ai": {
"command": "/home/geir/Home-lab/result/bin/task-master-ai",
"args": [],
"env": {
"OPENAI_API_KEY": "fake-key-for-local-ollama",
"OPENAI_BASE_URL": "http://grey-area:11434/v1"
}
}
}
}

14
.env.taskmaster Normal file
View file

@ -0,0 +1,14 @@
# Taskmaster AI Environment Variables
# Source this file to set up Ollama integration for taskmaster
export OPENAI_API_BASE="http://grey-area:11434/v1"
export OPENAI_API_KEY="ollama"
export OPENAI_BASE_URL="http://grey-area:11434/v1"
export OLLAMA_BASE_URL="http://grey-area:11434/api"
export MODEL="qwen2.5-coder:7b"
export RESEARCH_MODEL="deepseek-r1:7b"
export FALLBACK_MODEL="llama3.1:8b"
export MAX_TOKENS="8192"
export TEMPERATURE="0.3"
echo "✅ Taskmaster AI environment variables loaded"
echo "🤖 Using Ollama models at grey-area:11434"

View file

@ -1,20 +1,20 @@
{
"models": {
"main": {
"provider": "anthropic",
"modelId": "claude-3-7-sonnet-20250219",
"maxTokens": 120000,
"temperature": 0.2
"provider": "ollama",
"modelId": "qwen2.5-coder:7b",
"maxTokens": 8192,
"temperature": 0.3
},
"research": {
"provider": "perplexity",
"modelId": "sonar-pro",
"maxTokens": 8700,
"provider": "ollama",
"modelId": "deepseek-r1:7b",
"maxTokens": 8192,
"temperature": 0.1
},
"fallback": {
"provider": "anthropic",
"modelId": "claude-3-5-sonnet-20240620",
"provider": "ollama",
"modelId": "llama3.1:8b",
"maxTokens": 8192,
"temperature": 0.1
}
@ -25,7 +25,8 @@
"defaultSubtasks": 5,
"defaultPriority": "medium",
"projectName": "Taskmaster",
"ollamaBaseURL": "http://localhost:11434/api",
"ollamaBaseURL": "http://grey-area:11434/api",
"openaiBaseURL": "http://grey-area:11434/v1",
"bedrockBaseURL": "https://bedrock.us-east-1.amazonaws.com",
"defaultTag": "master",
"azureOpenaiBaseURL": "https://your-endpoint.openai.azure.com/",

View file

@ -1,23 +0,0 @@
{
"mcpServers": {
"task-master-ai": {
"command": "npx",
"args": [
"-y",
"--package=task-master-ai",
"task-master-ai"
],
"env": {
"ANTHROPIC_API_KEY": "ANTHROPIC_API_KEY_HERE",
"PERPLEXITY_API_KEY": "PERPLEXITY_API_KEY_HERE",
"OPENAI_API_KEY": "OPENAI_API_KEY_HERE",
"GOOGLE_API_KEY": "GOOGLE_API_KEY_HERE",
"XAI_API_KEY": "XAI_API_KEY_HERE",
"OPENROUTER_API_KEY": "OPENROUTER_API_KEY_HERE",
"MISTRAL_API_KEY": "MISTRAL_API_KEY_HERE",
"AZURE_OPENAI_API_KEY": "AZURE_OPENAI_API_KEY_HERE",
"OLLAMA_API_KEY": "OLLAMA_API_KEY_HERE"
}
}
}
}