opencode/opencode-schema.json
2025-04-28 15:42:57 +02:00

322 lines
8.6 KiB
JSON

{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"agent": {
"description": "Agent configuration",
"properties": {
"maxTokens": {
"description": "Maximum tokens for the agent",
"minimum": 1,
"type": "integer"
},
"model": {
"description": "Model ID for the agent",
"enum": [
"bedrock.claude-3.7-sonnet",
"claude-3-haiku",
"claude-3.7-sonnet",
"claude-3.5-haiku",
"o3",
"azure.o3",
"gpt-4.5-preview",
"azure.gpt-4.5-preview",
"o1-pro",
"o4-mini",
"azure.o4-mini",
"gpt-4.1",
"azure.gpt-4.1",
"o3-mini",
"azure.o3-mini",
"gpt-4.1-nano",
"azure.gpt-4.1-nano",
"gpt-4o-mini",
"azure.gpt-4o-mini",
"o1",
"azure.o1",
"gemini-2.5-flash",
"qwen-qwq",
"meta-llama/llama-4-maverick-17b-128e-instruct",
"claude-3-opus",
"gpt-4o",
"azure.gpt-4o",
"gemini-2.0-flash-lite",
"gemini-2.0-flash",
"deepseek-r1-distill-llama-70b",
"llama-3.3-70b-versatile",
"claude-3.5-sonnet",
"o1-mini",
"azure.o1-mini",
"gpt-4.1-mini",
"azure.gpt-4.1-mini",
"gemini-2.5",
"meta-llama/llama-4-scout-17b-16e-instruct"
],
"type": "string"
},
"reasoningEffort": {
"description": "Reasoning effort for models that support it (OpenAI, Anthropic)",
"enum": [
"low",
"medium",
"high"
],
"type": "string"
}
},
"required": [
"model"
],
"type": "object"
}
},
"description": "Configuration schema for the OpenCode application",
"properties": {
"agents": {
"additionalProperties": {
"description": "Agent configuration",
"properties": {
"maxTokens": {
"description": "Maximum tokens for the agent",
"minimum": 1,
"type": "integer"
},
"model": {
"description": "Model ID for the agent",
"enum": [
"bedrock.claude-3.7-sonnet",
"claude-3-haiku",
"claude-3.7-sonnet",
"claude-3.5-haiku",
"o3",
"azure.o3",
"gpt-4.5-preview",
"azure.gpt-4.5-preview",
"o1-pro",
"o4-mini",
"azure.o4-mini",
"gpt-4.1",
"azure.gpt-4.1",
"o3-mini",
"azure.o3-mini",
"gpt-4.1-nano",
"azure.gpt-4.1-nano",
"gpt-4o-mini",
"azure.gpt-4o-mini",
"o1",
"azure.o1",
"gemini-2.5-flash",
"qwen-qwq",
"meta-llama/llama-4-maverick-17b-128e-instruct",
"claude-3-opus",
"gpt-4o",
"azure.gpt-4o",
"gemini-2.0-flash-lite",
"gemini-2.0-flash",
"deepseek-r1-distill-llama-70b",
"llama-3.3-70b-versatile",
"claude-3.5-sonnet",
"o1-mini",
"azure.o1-mini",
"gpt-4.1-mini",
"azure.gpt-4.1-mini",
"gemini-2.5",
"meta-llama/llama-4-scout-17b-16e-instruct"
],
"type": "string"
},
"reasoningEffort": {
"description": "Reasoning effort for models that support it (OpenAI, Anthropic)",
"enum": [
"low",
"medium",
"high"
],
"type": "string"
}
},
"required": [
"model"
],
"type": "object"
},
"description": "Agent configurations",
"properties": {
"coder": {
"$ref": "#/definitions/agent"
},
"task": {
"$ref": "#/definitions/agent"
},
"title": {
"$ref": "#/definitions/agent"
}
},
"type": "object"
},
"contextPaths": {
"default": [
".github/copilot-instructions.md",
".cursorrules",
".cursor/rules/",
"CLAUDE.md",
"CLAUDE.local.md",
"opencode.md",
"opencode.local.md",
"OpenCode.md",
"OpenCode.local.md",
"OPENCODE.md",
"OPENCODE.local.md"
],
"description": "Context paths for the application",
"items": {
"type": "string"
},
"type": "array"
},
"data": {
"description": "Storage configuration",
"properties": {
"directory": {
"default": ".opencode",
"description": "Directory where application data is stored",
"type": "string"
}
},
"required": [
"directory"
],
"type": "object"
},
"debug": {
"default": false,
"description": "Enable debug mode",
"type": "boolean"
},
"debugLSP": {
"default": false,
"description": "Enable LSP debug mode",
"type": "boolean"
},
"lsp": {
"additionalProperties": {
"description": "LSP configuration for a language",
"properties": {
"args": {
"description": "Command arguments for the LSP server",
"items": {
"type": "string"
},
"type": "array"
},
"command": {
"description": "Command to execute for the LSP server",
"type": "string"
},
"disabled": {
"default": false,
"description": "Whether the LSP is disabled",
"type": "boolean"
},
"options": {
"description": "Additional options for the LSP server",
"type": "object"
}
},
"required": [
"command"
],
"type": "object"
},
"description": "Language Server Protocol configurations",
"type": "object"
},
"mcpServers": {
"additionalProperties": {
"description": "MCP server configuration",
"properties": {
"args": {
"description": "Command arguments for the MCP server",
"items": {
"type": "string"
},
"type": "array"
},
"command": {
"description": "Command to execute for the MCP server",
"type": "string"
},
"env": {
"description": "Environment variables for the MCP server",
"items": {
"type": "string"
},
"type": "array"
},
"headers": {
"additionalProperties": {
"type": "string"
},
"description": "HTTP headers for SSE type MCP servers",
"type": "object"
},
"type": {
"default": "stdio",
"description": "Type of MCP server",
"enum": [
"stdio",
"sse"
],
"type": "string"
},
"url": {
"description": "URL for SSE type MCP servers",
"type": "string"
}
},
"required": [
"command"
],
"type": "object"
},
"description": "Model Control Protocol server configurations",
"type": "object"
},
"providers": {
"additionalProperties": {
"description": "Provider configuration",
"properties": {
"apiKey": {
"description": "API key for the provider",
"type": "string"
},
"disabled": {
"default": false,
"description": "Whether the provider is disabled",
"type": "boolean"
},
"provider": {
"description": "Provider type",
"enum": [
"anthropic",
"openai",
"gemini",
"groq",
"bedrock",
"azure"
],
"type": "string"
}
},
"type": "object"
},
"description": "LLM provider configurations",
"type": "object"
},
"wd": {
"description": "Working directory for the application",
"type": "string"
}
},
"title": "OpenCode Configuration",
"type": "object"
}