Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
256 changes: 256 additions & 0 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,18 @@ export namespace Provider {
},
}
},
maple: async () => {
// Maple AI uses a local proxy (maple-proxy) that handles TEE attestation
// and encryption. The proxy runs on localhost:8080 by default.
// Users need to run the maple-proxy or use the Maple desktop app.
// See: https://blog.trymaple.ai/maple-proxy-documentation/
return {
autoload: false,
options: {
baseURL: "http://localhost:8080/v1",
},
}
},
}

export const Model = z
Expand Down Expand Up @@ -585,6 +597,250 @@ export namespace Provider {
}
}

// Add Maple AI provider - uses maple-proxy for TEE attestation
// Maple AI provides private, encrypted LLM access via Trusted Execution Environments
// See: https://blog.trymaple.ai/maple-proxy-documentation/
database["maple"] = {
id: "maple",
name: "Maple AI",
source: "custom",
env: ["MAPLE_API_KEY"],
options: {},
models: {
"llama-3.3-70b": {
id: "llama-3.3-70b",
providerID: "maple",
name: "Llama 3.3 70B",
family: "llama",
api: {
id: "llama-3.3-70b",
url: "http://localhost:8080/v1",
npm: "@ai-sdk/openai-compatible",
},
status: "active",
headers: {},
options: {},
cost: {
input: 4,
output: 4,
cache: { read: 0, write: 0 },
},
limit: {
context: 128000,
output: 8192,
},
capabilities: {
temperature: true,
reasoning: false,
attachment: false,
toolcall: true,
input: { text: true, audio: false, image: false, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2024-12-01",
},
"deepseek-r1-0528": {
id: "deepseek-r1-0528",
providerID: "maple",
name: "DeepSeek R1",
family: "deepseek",
api: {
id: "deepseek-r1-0528",
url: "http://localhost:8080/v1",
npm: "@ai-sdk/openai-compatible",
},
status: "active",
headers: {},
options: {},
cost: {
input: 4,
output: 4,
cache: { read: 0, write: 0 },
},
limit: {
context: 128000,
output: 8192,
},
capabilities: {
temperature: true,
reasoning: true,
attachment: false,
toolcall: true,
input: { text: true, audio: false, image: false, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2025-05-28",
},
"gpt-oss-120b": {
id: "gpt-oss-120b",
providerID: "maple",
name: "GPT-OSS 120B",
family: "gpt-oss",
api: {
id: "gpt-oss-120b",
url: "http://localhost:8080/v1",
npm: "@ai-sdk/openai-compatible",
},
status: "active",
headers: {},
options: {},
cost: {
input: 4,
output: 4,
cache: { read: 0, write: 0 },
},
limit: {
context: 128000,
output: 8192,
},
capabilities: {
temperature: true,
reasoning: false,
attachment: false,
toolcall: true,
input: { text: true, audio: false, image: false, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2025-01-01",
},
"qwen3-coder-480b": {
id: "qwen3-coder-480b",
providerID: "maple",
name: "Qwen3 Coder 480B",
family: "qwen3",
api: {
id: "qwen3-coder-480b",
url: "http://localhost:8080/v1",
npm: "@ai-sdk/openai-compatible",
},
status: "active",
headers: {},
options: {},
cost: {
input: 4,
output: 4,
cache: { read: 0, write: 0 },
},
limit: {
context: 200000,
output: 65536,
},
capabilities: {
temperature: true,
reasoning: false,
attachment: false,
toolcall: true,
input: { text: true, audio: false, image: false, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2025-07-22",
},
"qwen2-5-72b": {
id: "qwen2-5-72b",
providerID: "maple",
name: "Qwen 2.5 72B",
family: "qwen2.5",
api: {
id: "qwen2-5-72b",
url: "http://localhost:8080/v1",
npm: "@ai-sdk/openai-compatible",
},
status: "active",
headers: {},
options: {},
cost: {
input: 4,
output: 4,
cache: { read: 0, write: 0 },
},
limit: {
context: 128000,
output: 8192,
},
capabilities: {
temperature: true,
reasoning: false,
attachment: false,
toolcall: true,
input: { text: true, audio: false, image: false, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2024-09-01",
},
"mistral-small-3-1-24b": {
id: "mistral-small-3-1-24b",
providerID: "maple",
name: "Mistral Small 3.1 24B",
family: "mistral",
api: {
id: "mistral-small-3-1-24b",
url: "http://localhost:8080/v1",
npm: "@ai-sdk/openai-compatible",
},
status: "active",
headers: {},
options: {},
cost: {
input: 4,
output: 4,
cache: { read: 0, write: 0 },
},
limit: {
context: 128000,
output: 8192,
},
capabilities: {
temperature: true,
reasoning: false,
attachment: true,
toolcall: true,
input: { text: true, audio: false, image: true, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2025-01-01",
},
"gemma-3-27b-it": {
id: "leon-se/gemma-3-27b-it-fp8-dynamic",
providerID: "maple",
name: "Gemma 3 27B (Image Analysis)",
family: "gemma",
api: {
id: "leon-se/gemma-3-27b-it-fp8-dynamic",
url: "http://localhost:8080/v1",
npm: "@ai-sdk/openai-compatible",
},
status: "active",
headers: {},
options: {},
cost: {
input: 10,
output: 10,
cache: { read: 0, write: 0 },
},
limit: {
context: 128000,
output: 8192,
},
capabilities: {
temperature: true,
reasoning: false,
attachment: true,
toolcall: true,
input: { text: true, audio: false, image: true, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2025-01-01",
},
},
}

function mergeProvider(providerID: string, provider: Partial<Info>) {
const existing = providers[providerID]
if (existing) {
Expand Down
Loading