Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/ISSUE_TEMPLATE/bug_report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ body:
- AWS Bedrock
- OpenAI
- OpenAI Compatible
- LM Studio
- Ollama
validations:
required: true
Expand Down
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Change Log

## [2.1.6]

- Add LM Studio as an API provider option (make sure to start the LM Studio server to use it with the extension!)

## [2.1.5]

- Add support for prompt caching for new Claude model IDs on OpenRouter (e.g. `anthropic/claude-3.5-sonnet-20240620`)

## [2.1.4]

- AWS Bedrock fixes (add missing regions, support for cross-region inference, and older Sonnet model for regions where new model is not available)
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ Thanks to [Claude 3.5 Sonnet's agentic coding capabilities](https://www-cdn.ant

### Use any API and Model

Cline supports API providers like OpenRouter, Anthropic, OpenAI, Google Gemini, AWS Bedrock, Azure, and GCP Vertex. You can also configure any OpenAI compatible API, or use a local model through Ollama. If you're using OpenRouter, the extension fetches their latest model list, allowing you to use the newest models as soon as they're available.
Cline supports API providers like OpenRouter, Anthropic, OpenAI, Google Gemini, AWS Bedrock, Azure, and GCP Vertex. You can also configure any OpenAI compatible API, or use a local model through LM Studio/Ollama. If you're using OpenRouter, the extension fetches their latest model list, allowing you to use the newest models as soon as they're available.

The extension also keeps track of total tokens and API usage cost for the entire task loop and individual requests, keeping you informed of spend every step of the way.

Expand Down Expand Up @@ -138,7 +138,7 @@ To contribute to the project, start by exploring [open issues](https://github.co
<details>
<summary>Local Development Instructions</summary>

1. Clone the repository:
1. Clone the repository _(Requires [git-lfs](https://git-lfs.com/))_:
```bash
git clone https://github.com/cline/cline.git
```
Expand Down
Binary file added bin/roo-cline-2.0.1.vsix
Binary file not shown.
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "roo-cline",
"displayName": "Roo Cline",
"description": "Autonomous coding agent right in your IDE, capable of creating/editing files, running commands, using the browser, and more with your permission every step of the way.",
"version": "2.0.0",
"version": "2.0.1",
"icon": "assets/icons/icon.png",
"galleryBanner": {
"color": "#617A91",
Expand Down
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { OpenRouterHandler } from "./providers/openrouter"
import { VertexHandler } from "./providers/vertex"
import { OpenAiHandler } from "./providers/openai"
import { OllamaHandler } from "./providers/ollama"
import { LmStudioHandler } from "./providers/lmstudio"
import { GeminiHandler } from "./providers/gemini"
import { OpenAiNativeHandler } from "./providers/openai-native"
import { ApiStream } from "./transform/stream"
Expand All @@ -30,6 +31,8 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
return new OpenAiHandler(options)
case "ollama":
return new OllamaHandler(options)
case "lmstudio":
return new LmStudioHandler(options)
case "gemini":
return new GeminiHandler(options)
case "openai-native":
Expand Down
56 changes: 56 additions & 0 deletions src/api/providers/lmstudio.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"
import { ApiHandler } from "../"
import { ApiHandlerOptions, ModelInfo, openAiModelInfoSaneDefaults } from "../../shared/api"
import { convertToOpenAiMessages } from "../transform/openai-format"
import { ApiStream } from "../transform/stream"

export class LmStudioHandler implements ApiHandler {
private options: ApiHandlerOptions
private client: OpenAI

constructor(options: ApiHandlerOptions) {
this.options = options
this.client = new OpenAI({
baseURL: (this.options.lmStudioBaseUrl || "http://localhost:1234") + "/v1",
apiKey: "noop",
})
}

async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
...convertToOpenAiMessages(messages),
]

try {
const stream = await this.client.chat.completions.create({
model: this.getModel().id,
messages: openAiMessages,
temperature: 0,
stream: true,
})
for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta
if (delta?.content) {
yield {
type: "text",
text: delta.content,
}
}
}
} catch (error) {
// LM Studio doesn't return an error code/body for now
throw new Error(
"Please check the LM Studio developer logs to debug what went wrong. You may need to load the model with a larger context length to work with Cline's prompts."
)
}
}

getModel(): { id: string; info: ModelInfo } {
return {
id: this.options.lmStudioModelId || "",
info: openAiModelInfoSaneDefaults,
}
}
}
16 changes: 16 additions & 0 deletions src/api/providers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,19 @@ export class OpenRouterHandler implements ApiHandler {
]

// prompt caching: https://openrouter.ai/docs/prompt-caching
// this is specifically for claude models (some models may 'support prompt caching' automatically without this)
switch (this.getModel().id) {
case "anthropic/claude-3.5-sonnet":
case "anthropic/claude-3.5-sonnet:beta":
case "anthropic/claude-3.5-sonnet-20240620":
case "anthropic/claude-3.5-sonnet-20240620:beta":
case "anthropic/claude-3-5-haiku":
case "anthropic/claude-3-5-haiku:beta":
case "anthropic/claude-3-5-haiku-20241022":
case "anthropic/claude-3-5-haiku-20241022:beta":
case "anthropic/claude-3-haiku":
case "anthropic/claude-3-haiku:beta":
case "anthropic/claude-3-opus":
case "anthropic/claude-3-opus:beta":
openAiMessages[0] = {
role: "system",
Expand Down Expand Up @@ -76,6 +86,12 @@ export class OpenRouterHandler implements ApiHandler {
switch (this.getModel().id) {
case "anthropic/claude-3.5-sonnet":
case "anthropic/claude-3.5-sonnet:beta":
case "anthropic/claude-3.5-sonnet-20240620":
case "anthropic/claude-3.5-sonnet-20240620:beta":
case "anthropic/claude-3-5-haiku":
case "anthropic/claude-3-5-haiku:beta":
case "anthropic/claude-3-5-haiku-20241022":
case "anthropic/claude-3-5-haiku-20241022:beta":
maxTokens = 8_192
break
}
Expand Down
35 changes: 35 additions & 0 deletions src/core/webview/ClineProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ type GlobalStateKey =
| "openAiModelId"
| "ollamaModelId"
| "ollamaBaseUrl"
| "lmStudioModelId"
| "lmStudioBaseUrl"
| "anthropicBaseUrl"
| "azureApiVersion"
| "openRouterModelId"
Expand Down Expand Up @@ -363,6 +365,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openAiModelId,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
lmStudioBaseUrl,
anthropicBaseUrl,
geminiApiKey,
openAiNativeApiKey,
Expand All @@ -386,6 +390,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
await this.updateGlobalState("openAiModelId", openAiModelId)
await this.updateGlobalState("ollamaModelId", ollamaModelId)
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
await this.updateGlobalState("lmStudioModelId", lmStudioModelId)
await this.updateGlobalState("lmStudioBaseUrl", lmStudioBaseUrl)
await this.updateGlobalState("anthropicBaseUrl", anthropicBaseUrl)
await this.storeSecret("geminiApiKey", geminiApiKey)
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
Expand Down Expand Up @@ -460,6 +466,10 @@ export class ClineProvider implements vscode.WebviewViewProvider {
const ollamaModels = await this.getOllamaModels(message.text)
this.postMessageToWebview({ type: "ollamaModels", ollamaModels })
break
case "requestLmStudioModels":
const lmStudioModels = await this.getLmStudioModels(message.text)
this.postMessageToWebview({ type: "lmStudioModels", lmStudioModels })
break
case "refreshOpenRouterModels":
await this.refreshOpenRouterModels()
break
Expand Down Expand Up @@ -527,6 +537,25 @@ export class ClineProvider implements vscode.WebviewViewProvider {
}
}

// LM Studio

async getLmStudioModels(baseUrl?: string) {
try {
if (!baseUrl) {
baseUrl = "http://localhost:1234"
}
if (!URL.canParse(baseUrl)) {
return []
}
const response = await axios.get(`${baseUrl}/v1/models`)
const modelsArray = response.data?.data?.map((model: any) => model.id) || []
const models = [...new Set<string>(modelsArray)]
return models
} catch (error) {
return []
}
}

// OpenRouter

async handleOpenRouterCallback(code: string) {
Expand Down Expand Up @@ -855,6 +884,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openAiModelId,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
lmStudioBaseUrl,
anthropicBaseUrl,
geminiApiKey,
openAiNativeApiKey,
Expand Down Expand Up @@ -884,6 +915,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
this.getGlobalState("openAiModelId") as Promise<string | undefined>,
this.getGlobalState("ollamaModelId") as Promise<string | undefined>,
this.getGlobalState("ollamaBaseUrl") as Promise<string | undefined>,
this.getGlobalState("lmStudioModelId") as Promise<string | undefined>,
this.getGlobalState("lmStudioBaseUrl") as Promise<string | undefined>,
this.getGlobalState("anthropicBaseUrl") as Promise<string | undefined>,
this.getSecret("geminiApiKey") as Promise<string | undefined>,
this.getSecret("openAiNativeApiKey") as Promise<string | undefined>,
Expand Down Expand Up @@ -930,6 +963,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openAiModelId,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
lmStudioBaseUrl,
anthropicBaseUrl,
geminiApiKey,
openAiNativeApiKey,
Expand Down
2 changes: 2 additions & 0 deletions src/shared/ExtensionMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ export interface ExtensionMessage {
| "state"
| "selectedImages"
| "ollamaModels"
| "lmStudioModels"
| "theme"
| "workspaceUpdated"
| "invoke"
Expand All @@ -21,6 +22,7 @@ export interface ExtensionMessage {
state?: ExtensionState
images?: string[]
ollamaModels?: string[]
lmStudioModels?: string[]
filePaths?: string[]
partialMessage?: ClineMessage
openRouterModels?: Record<string, ModelInfo>
Expand Down
1 change: 1 addition & 0 deletions src/shared/WebviewMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ export interface WebviewMessage {
| "exportTaskWithId"
| "resetState"
| "requestOllamaModels"
| "requestLmStudioModels"
| "openImage"
| "openFile"
| "openMention"
Expand Down
3 changes: 3 additions & 0 deletions src/shared/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ export type ApiProvider =
| "vertex"
| "openai"
| "ollama"
| "lmstudio"
| "gemini"
| "openai-native"

Expand All @@ -27,6 +28,8 @@ export interface ApiHandlerOptions {
openAiModelId?: string
ollamaModelId?: string
ollamaBaseUrl?: string
lmStudioModelId?: string
lmStudioBaseUrl?: string
geminiApiKey?: string
openAiNativeApiKey?: string
azureApiVersion?: string
Expand Down
1 change: 1 addition & 0 deletions webview-ui/src/components/chat/TaskHeader.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ const TaskHeader: React.FC<TaskHeaderProps> = ({
return (
apiConfiguration?.apiProvider !== "openai" &&
apiConfiguration?.apiProvider !== "ollama" &&
apiConfiguration?.apiProvider !== "lmstudio" &&
apiConfiguration?.apiProvider !== "gemini"
)
}, [apiConfiguration?.apiProvider])
Expand Down
Loading