conclave.yaml 2.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. # ~/.claude/conclave.yaml
  2. # Configuration for /conclave command - Multi-LLM task dispatch
  3. # API Keys (preferred over environment variables)
  4. # Uncomment and add your keys:
  5. # api_keys:
  6. # gemini: "your-gemini-api-key"
  7. # openai: "your-openai-api-key"
  8. # anthropic: "your-anthropic-api-key"
  9. # perplexity: "your-perplexity-api-key"
  10. # Provider Configuration
  11. providers:
  12. gemini:
  13. model: gemini-2.5-pro # Recommended: 1M context, best for code analysis
  14. # model: gemini-2.5-flash # Faster, still 1M context
  15. # model: gemini-3-pro # Requires Ultra subscription (waitlist for free tier)
  16. default_flags:
  17. - --output-format
  18. - text
  19. # IMPORTANT: Use stdin piping for file content, not @file.md syntax
  20. # Example: cat file.md | gemini -m gemini-2.5-pro "prompt"
  21. openai:
  22. model: gpt-5.2 # Strongest (requires ChatGPT login)
  23. # model: gpt-5.1-codex-max # Default for Pro subscription
  24. # model: gpt-4o # API key only (pay per token)
  25. auth: chatgpt # chatgpt (subscription) or api-key
  26. default_flags:
  27. - --json
  28. claude:
  29. model: sonnet # Best balance of speed and capability
  30. # model: opus # Most capable - complex reasoning
  31. # model: haiku # Fastest - quick analysis
  32. default_flags:
  33. - --print
  34. - --output-format
  35. - text
  36. # Uses ANTHROPIC_API_KEY env var or existing Claude Code login
  37. perplexity:
  38. model: sonar-pro # Best balance: complex queries, more citations
  39. # model: sonar # Fast, cost-effective for quick facts
  40. # model: sonar-reasoning # Multi-step problem solving
  41. # model: sonar-reasoning-pro # Deep reasoning (DeepSeek-R1 based)
  42. default_flags: []
  43. # Unique: returns web citations with every response
  44. # Get API key at: https://www.perplexity.ai/settings/api
  45. # Consensus Mode (--all) Settings
  46. consensus:
  47. providers: [gemini, openai, claude] # Which providers participate
  48. # providers: [gemini, openai, claude, perplexity] # Include Perplexity for web-grounded consensus
  49. require_consensus: true # All must agree for YES/NO verdicts
  50. # Default behavior
  51. defaults:
  52. provider: gemini # Default provider when none specified
  53. output: text # text or json
  54. verbosity: default # brief, default, or detailed