[PROTOBUS] Move refreshOpenRouterModels to protobus (#3401)

* refreshOpenRouterModels protobus migration

* changeset

* cleanup

* Ellipsis inspired changes

* one small change
This commit is contained in:
canvrno 2025-05-08 20:14:49 -07:00 committed by GitHub
parent e1389a62c7
commit 8827b167ca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 609 additions and 146 deletions

View File

@ -0,0 +1,5 @@
---
"claude-dev": patch
---
refreshOpenRouterModels protobus migration

View File

@ -14,6 +14,8 @@ service ModelsService {
rpc getLmStudioModels(StringRequest) returns (StringArray);
// Fetches available models from VS Code LM API
rpc getVsCodeLmModels(EmptyRequest) returns (VsCodeLmModelsArray);
// Refreshes and returns OpenRouter models
rpc refreshOpenRouterModels(EmptyRequest) returns (OpenRouterModels);
}
// List of VS Code LM models
@ -28,3 +30,22 @@ message VsCodeLmModel {
string version = 3;
string id = 4;
}
// For ModelInfo structure in OpenRouterModels
message OpenRouterModelInfo {
int32 max_tokens = 1;
int32 context_window = 2;
bool supports_images = 3;
bool supports_prompt_cache = 4;
double input_price = 5;
double output_price = 6;
double cache_writes_price = 7;
double cache_reads_price = 8;
string description = 9;
}
// Response message for OpenRouter models
message OpenRouterModels {
map<string, OpenRouterModelInfo> models = 1;
}

View File

@ -8,6 +8,8 @@ import pWaitFor from "p-wait-for"
import * as path from "path"
import * as vscode from "vscode"
import { handleGrpcRequest, handleGrpcRequestCancel } from "./grpc-handler"
import { handleModelsServiceRequest } from "./models"
import { EmptyRequest } from "@shared/proto/common"
import { buildApiHandler } from "@api/index"
import { cleanupLegacyCheckpoints } from "@integrations/checkpoints/CheckpointMigration"
import { downloadTask } from "@integrations/misc/export-markdown"
@ -230,15 +232,15 @@ export class Controller {
}
})
this.silentlyRefreshMcpMarketplace()
this.refreshOpenRouterModels().then(async (openRouterModels) => {
if (openRouterModels) {
handleModelsServiceRequest(this, "refreshOpenRouterModels", EmptyRequest.create()).then(async (response) => {
if (response && response.models) {
// update model info in state (this needs to be done here since we don't want to update state while settings is open, and we may refresh models there)
const { apiConfiguration } = await getAllExtensionState(this.context)
if (apiConfiguration.openRouterModelId) {
if (apiConfiguration.openRouterModelId && response.models[apiConfiguration.openRouterModelId]) {
await updateGlobalState(
this.context,
"openRouterModelInfo",
openRouterModels[apiConfiguration.openRouterModelId],
response.models[apiConfiguration.openRouterModelId],
)
await this.postStateToWebview()
}
@ -330,9 +332,6 @@ export class Controller {
case "resetState":
await this.resetState()
break
case "refreshOpenRouterModels":
await this.refreshOpenRouterModels()
break
case "refreshRequestyModels":
await this.refreshRequestyModels()
break
@ -1189,143 +1188,6 @@ Here is the project's README to help you get started:\n\n${mcpDetails.readmeCont
return undefined
}
async refreshOpenRouterModels() {
const openRouterModelsFilePath = path.join(await this.ensureCacheDirectoryExists(), GlobalFileNames.openRouterModels)
let models: Record<string, ModelInfo> = {}
try {
const response = await axios.get("https://openrouter.ai/api/v1/models")
/*
{
"id": "anthropic/claude-3.5-sonnet",
"name": "Anthropic: Claude 3.5 Sonnet",
"created": 1718841600,
"description": "Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at:\n\n- Coding: Autonomously writes, edits, and runs code with reasoning and troubleshooting\n- Data science: Augments human data science expertise; navigates unstructured data while using multiple tools for insights\n- Visual processing: excelling at interpreting charts, graphs, and images, accurately transcribing text to derive insights beyond just the text alone\n- Agentic tasks: exceptional tool use, making it great at agentic tasks (i.e. complex, multi-step problem solving tasks that require engaging with other systems)\n\n#multimodal",
"context_length": 200000,
"architecture": {
"modality": "text+image-\u003Etext",
"tokenizer": "Claude",
"instruct_type": null
},
"pricing": {
"prompt": "0.000003",
"completion": "0.000015",
"image": "0.0048",
"request": "0"
},
"top_provider": {
"context_length": 200000,
"max_completion_tokens": 8192,
"is_moderated": true
},
"per_request_limits": null
},
*/
if (response.data?.data) {
const rawModels = response.data.data
const parsePrice = (price: any) => {
if (price) {
return parseFloat(price) * 1_000_000
}
return undefined
}
for (const rawModel of rawModels) {
const modelInfo: ModelInfo = {
maxTokens: rawModel.top_provider?.max_completion_tokens,
contextWindow: rawModel.context_length,
supportsImages: rawModel.architecture?.modality?.includes("image"),
supportsPromptCache: false,
inputPrice: parsePrice(rawModel.pricing?.prompt),
outputPrice: parsePrice(rawModel.pricing?.completion),
description: rawModel.description,
}
switch (rawModel.id) {
case "anthropic/claude-3-7-sonnet":
case "anthropic/claude-3-7-sonnet:beta":
case "anthropic/claude-3.7-sonnet":
case "anthropic/claude-3.7-sonnet:beta":
case "anthropic/claude-3.7-sonnet:thinking":
case "anthropic/claude-3.5-sonnet":
case "anthropic/claude-3.5-sonnet:beta":
// NOTE: this needs to be synced with api.ts/openrouter default model info
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 3.75
modelInfo.cacheReadsPrice = 0.3
break
case "anthropic/claude-3.5-sonnet-20240620":
case "anthropic/claude-3.5-sonnet-20240620:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 3.75
modelInfo.cacheReadsPrice = 0.3
break
case "anthropic/claude-3-5-haiku":
case "anthropic/claude-3-5-haiku:beta":
case "anthropic/claude-3-5-haiku-20241022":
case "anthropic/claude-3-5-haiku-20241022:beta":
case "anthropic/claude-3.5-haiku":
case "anthropic/claude-3.5-haiku:beta":
case "anthropic/claude-3.5-haiku-20241022":
case "anthropic/claude-3.5-haiku-20241022:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 1.25
modelInfo.cacheReadsPrice = 0.1
break
case "anthropic/claude-3-opus":
case "anthropic/claude-3-opus:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 18.75
modelInfo.cacheReadsPrice = 1.5
break
case "anthropic/claude-3-haiku":
case "anthropic/claude-3-haiku:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 0.3
modelInfo.cacheReadsPrice = 0.03
break
case "deepseek/deepseek-chat":
modelInfo.supportsPromptCache = true
// see api.ts/deepSeekModels for more info
modelInfo.inputPrice = 0
modelInfo.cacheWritesPrice = 0.14
modelInfo.cacheReadsPrice = 0.014
break
default:
if (rawModel.id.startsWith("openai/")) {
modelInfo.cacheReadsPrice = parsePrice(rawModel.pricing?.input_cache_read)
if (modelInfo.cacheReadsPrice) {
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = parsePrice(rawModel.pricing?.input_cache_write)
// openrouter charges no cache write pricing for openAI models
}
} else if (rawModel.id.startsWith("google/")) {
modelInfo.cacheReadsPrice = parsePrice(rawModel.pricing?.input_cache_read)
if (modelInfo.cacheReadsPrice) {
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = parsePrice(rawModel.pricing?.input_cache_write)
}
}
break
}
models[rawModel.id] = modelInfo
}
} else {
console.error("Invalid response from OpenRouter API")
}
await fs.writeFile(openRouterModelsFilePath, JSON.stringify(models))
console.log("OpenRouter models fetched and saved", models)
} catch (error) {
console.error("Error fetching OpenRouter models:", error)
}
await this.postMessageToWebview({
type: "openRouterModels",
openRouterModels: models,
})
return models
}
async refreshRequestyModels() {
const parsePrice = (price: any) => {
if (price) {

View File

@ -6,6 +6,7 @@ import { registerMethod } from "./index"
import { getLmStudioModels } from "./getLmStudioModels"
import { getOllamaModels } from "./getOllamaModels"
import { getVsCodeLmModels } from "./getVsCodeLmModels"
import { refreshOpenRouterModels } from "./refreshOpenRouterModels"
// Register all models service methods
export function registerAllMethods(): void {
@ -13,4 +14,5 @@ export function registerAllMethods(): void {
registerMethod("getLmStudioModels", getLmStudioModels)
registerMethod("getOllamaModels", getOllamaModels)
registerMethod("getVsCodeLmModels", getVsCodeLmModels)
registerMethod("refreshOpenRouterModels", refreshOpenRouterModels)
}

View File

@ -0,0 +1,178 @@
import { Controller } from ".."
import { EmptyRequest } from "../../../shared/proto/common"
import { OpenRouterModels, OpenRouterModelInfo } from "../../../shared/proto/models"
import axios from "axios"
import path from "path"
import fs from "fs/promises"
import { fileExistsAtPath } from "@utils/fs"
import { GlobalFileNames } from "@core/storage/disk"
/**
* Refreshes the OpenRouter models and returns the updated model list
* @param controller The controller instance
* @param request Empty request object
* @returns Response containing the OpenRouter models
*/
export async function refreshOpenRouterModels(controller: Controller, request: EmptyRequest): Promise<OpenRouterModels> {
const openRouterModelsFilePath = path.join(await ensureCacheDirectoryExists(controller), GlobalFileNames.openRouterModels)
let models: Record<string, Partial<OpenRouterModelInfo>> = {}
try {
const response = await axios.get("https://openrouter.ai/api/v1/models")
if (response.data?.data) {
const rawModels = response.data.data
const parsePrice = (price: any) => {
if (price) {
return parseFloat(price) * 1_000_000
}
return undefined
}
for (const rawModel of rawModels) {
const modelInfo: Partial<OpenRouterModelInfo> = {
maxTokens: rawModel.top_provider?.max_completion_tokens,
contextWindow: rawModel.context_length,
supportsImages: rawModel.architecture?.modality?.includes("image"),
supportsPromptCache: false,
inputPrice: parsePrice(rawModel.pricing?.prompt),
outputPrice: parsePrice(rawModel.pricing?.completion),
description: rawModel.description,
}
switch (rawModel.id) {
case "anthropic/claude-3-7-sonnet":
case "anthropic/claude-3-7-sonnet:beta":
case "anthropic/claude-3.7-sonnet":
case "anthropic/claude-3.7-sonnet:beta":
case "anthropic/claude-3.7-sonnet:thinking":
case "anthropic/claude-3.5-sonnet":
case "anthropic/claude-3.5-sonnet:beta":
// NOTE: this needs to be synced with api.ts/openrouter default model info
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 3.75
modelInfo.cacheReadsPrice = 0.3
break
case "anthropic/claude-3.5-sonnet-20240620":
case "anthropic/claude-3.5-sonnet-20240620:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 3.75
modelInfo.cacheReadsPrice = 0.3
break
case "anthropic/claude-3-5-haiku":
case "anthropic/claude-3-5-haiku:beta":
case "anthropic/claude-3-5-haiku-20241022":
case "anthropic/claude-3-5-haiku-20241022:beta":
case "anthropic/claude-3.5-haiku":
case "anthropic/claude-3.5-haiku:beta":
case "anthropic/claude-3.5-haiku-20241022":
case "anthropic/claude-3.5-haiku-20241022:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 1.25
modelInfo.cacheReadsPrice = 0.1
break
case "anthropic/claude-3-opus":
case "anthropic/claude-3-opus:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 18.75
modelInfo.cacheReadsPrice = 1.5
break
case "anthropic/claude-3-haiku":
case "anthropic/claude-3-haiku:beta":
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 0.3
modelInfo.cacheReadsPrice = 0.03
break
case "deepseek/deepseek-chat":
modelInfo.supportsPromptCache = true
// see api.ts/deepSeekModels for more info
modelInfo.inputPrice = 0
modelInfo.cacheWritesPrice = 0.14
modelInfo.cacheReadsPrice = 0.014
break
default:
if (rawModel.id.startsWith("openai/")) {
modelInfo.cacheReadsPrice = parsePrice(rawModel.pricing?.input_cache_read)
if (modelInfo.cacheReadsPrice) {
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = parsePrice(rawModel.pricing?.input_cache_write)
// openrouter charges no cache write pricing for openAI models
}
} else if (rawModel.id.startsWith("google/")) {
modelInfo.cacheReadsPrice = parsePrice(rawModel.pricing?.input_cache_read)
if (modelInfo.cacheReadsPrice) {
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = parsePrice(rawModel.pricing?.input_cache_write)
}
}
break
}
models[rawModel.id] = modelInfo
}
} else {
console.error("Invalid response from OpenRouter API")
}
await fs.writeFile(openRouterModelsFilePath, JSON.stringify(models))
console.log("OpenRouter models fetched and saved", models)
} catch (error) {
console.error("Error fetching OpenRouter models:", error)
// If we failed to fetch models, try to read cached models
const cachedModels = await readOpenRouterModels(controller)
if (cachedModels) {
models = cachedModels
}
}
// Convert the Record<string, Partial<OpenRouterModelInfo>> to Record<string, OpenRouterModelInfo>
// by filling in any missing required fields with defaults
const typedModels: Record<string, OpenRouterModelInfo> = {}
for (const [key, model] of Object.entries(models)) {
typedModels[key] = {
maxTokens: model.maxTokens ?? 0,
contextWindow: model.contextWindow ?? 0,
supportsImages: model.supportsImages ?? false,
supportsPromptCache: model.supportsPromptCache ?? false,
inputPrice: model.inputPrice ?? 0,
outputPrice: model.outputPrice ?? 0,
cacheWritesPrice: model.cacheWritesPrice ?? 0,
cacheReadsPrice: model.cacheReadsPrice ?? 0,
description: model.description ?? "",
}
}
// Send models to webview
await controller.postMessageToWebview({
type: "openRouterModels",
openRouterModels: typedModels,
})
return OpenRouterModels.create({ models: typedModels })
}
/**
* Reads cached OpenRouter models from disk
*/
async function readOpenRouterModels(controller: Controller): Promise<Record<string, Partial<OpenRouterModelInfo>> | undefined> {
const openRouterModelsFilePath = path.join(await ensureCacheDirectoryExists(controller), GlobalFileNames.openRouterModels)
const fileExists = await fileExistsAtPath(openRouterModelsFilePath)
if (fileExists) {
try {
const fileContents = await fs.readFile(openRouterModelsFilePath, "utf8")
return JSON.parse(fileContents)
} catch (error) {
console.error("Error reading cached OpenRouter models:", error)
return undefined
}
}
return undefined
}
/**
* Ensures the cache directory exists and returns its path
*/
async function ensureCacheDirectoryExists(controller: Controller): Promise<string> {
const cacheDir = path.join(controller.context.globalStorageUri.fsPath, "cache")
await fs.mkdir(cacheDir, { recursive: true })
return cacheDir
}

View File

@ -20,7 +20,6 @@ export interface WebviewMessage {
| "openInBrowser"
| "openMention"
| "showChatView"
| "refreshOpenRouterModels"
| "refreshRequestyModels"
| "refreshOpenAiModels"
| "refreshClineRules"

View File

@ -23,6 +23,29 @@ export interface VsCodeLmModel {
id: string
}
/** For ModelInfo structure in OpenRouterModels */
export interface OpenRouterModelInfo {
maxTokens: number
contextWindow: number
supportsImages: boolean
supportsPromptCache: boolean
inputPrice: number
outputPrice: number
cacheWritesPrice: number
cacheReadsPrice: number
description: string
}
/** Response message for OpenRouter models */
export interface OpenRouterModels {
models: { [key: string]: OpenRouterModelInfo }
}
export interface OpenRouterModels_ModelsEntry {
key: string
value?: OpenRouterModelInfo | undefined
}
function createBaseVsCodeLmModelsArray(): VsCodeLmModelsArray {
return { models: [] }
}
@ -191,6 +214,363 @@ export const VsCodeLmModel: MessageFns<VsCodeLmModel> = {
},
}
function createBaseOpenRouterModelInfo(): OpenRouterModelInfo {
return {
maxTokens: 0,
contextWindow: 0,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0,
outputPrice: 0,
cacheWritesPrice: 0,
cacheReadsPrice: 0,
description: "",
}
}
export const OpenRouterModelInfo: MessageFns<OpenRouterModelInfo> = {
encode(message: OpenRouterModelInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.maxTokens !== 0) {
writer.uint32(8).int32(message.maxTokens)
}
if (message.contextWindow !== 0) {
writer.uint32(16).int32(message.contextWindow)
}
if (message.supportsImages !== false) {
writer.uint32(24).bool(message.supportsImages)
}
if (message.supportsPromptCache !== false) {
writer.uint32(32).bool(message.supportsPromptCache)
}
if (message.inputPrice !== 0) {
writer.uint32(41).double(message.inputPrice)
}
if (message.outputPrice !== 0) {
writer.uint32(49).double(message.outputPrice)
}
if (message.cacheWritesPrice !== 0) {
writer.uint32(57).double(message.cacheWritesPrice)
}
if (message.cacheReadsPrice !== 0) {
writer.uint32(65).double(message.cacheReadsPrice)
}
if (message.description !== "") {
writer.uint32(74).string(message.description)
}
return writer
},
decode(input: BinaryReader | Uint8Array, length?: number): OpenRouterModelInfo {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input)
let end = length === undefined ? reader.len : reader.pos + length
const message = createBaseOpenRouterModelInfo()
while (reader.pos < end) {
const tag = reader.uint32()
switch (tag >>> 3) {
case 1: {
if (tag !== 8) {
break
}
message.maxTokens = reader.int32()
continue
}
case 2: {
if (tag !== 16) {
break
}
message.contextWindow = reader.int32()
continue
}
case 3: {
if (tag !== 24) {
break
}
message.supportsImages = reader.bool()
continue
}
case 4: {
if (tag !== 32) {
break
}
message.supportsPromptCache = reader.bool()
continue
}
case 5: {
if (tag !== 41) {
break
}
message.inputPrice = reader.double()
continue
}
case 6: {
if (tag !== 49) {
break
}
message.outputPrice = reader.double()
continue
}
case 7: {
if (tag !== 57) {
break
}
message.cacheWritesPrice = reader.double()
continue
}
case 8: {
if (tag !== 65) {
break
}
message.cacheReadsPrice = reader.double()
continue
}
case 9: {
if (tag !== 74) {
break
}
message.description = reader.string()
continue
}
}
if ((tag & 7) === 4 || tag === 0) {
break
}
reader.skip(tag & 7)
}
return message
},
fromJSON(object: any): OpenRouterModelInfo {
return {
maxTokens: isSet(object.maxTokens) ? globalThis.Number(object.maxTokens) : 0,
contextWindow: isSet(object.contextWindow) ? globalThis.Number(object.contextWindow) : 0,
supportsImages: isSet(object.supportsImages) ? globalThis.Boolean(object.supportsImages) : false,
supportsPromptCache: isSet(object.supportsPromptCache) ? globalThis.Boolean(object.supportsPromptCache) : false,
inputPrice: isSet(object.inputPrice) ? globalThis.Number(object.inputPrice) : 0,
outputPrice: isSet(object.outputPrice) ? globalThis.Number(object.outputPrice) : 0,
cacheWritesPrice: isSet(object.cacheWritesPrice) ? globalThis.Number(object.cacheWritesPrice) : 0,
cacheReadsPrice: isSet(object.cacheReadsPrice) ? globalThis.Number(object.cacheReadsPrice) : 0,
description: isSet(object.description) ? globalThis.String(object.description) : "",
}
},
toJSON(message: OpenRouterModelInfo): unknown {
const obj: any = {}
if (message.maxTokens !== 0) {
obj.maxTokens = Math.round(message.maxTokens)
}
if (message.contextWindow !== 0) {
obj.contextWindow = Math.round(message.contextWindow)
}
if (message.supportsImages !== false) {
obj.supportsImages = message.supportsImages
}
if (message.supportsPromptCache !== false) {
obj.supportsPromptCache = message.supportsPromptCache
}
if (message.inputPrice !== 0) {
obj.inputPrice = message.inputPrice
}
if (message.outputPrice !== 0) {
obj.outputPrice = message.outputPrice
}
if (message.cacheWritesPrice !== 0) {
obj.cacheWritesPrice = message.cacheWritesPrice
}
if (message.cacheReadsPrice !== 0) {
obj.cacheReadsPrice = message.cacheReadsPrice
}
if (message.description !== "") {
obj.description = message.description
}
return obj
},
create<I extends Exact<DeepPartial<OpenRouterModelInfo>, I>>(base?: I): OpenRouterModelInfo {
return OpenRouterModelInfo.fromPartial(base ?? ({} as any))
},
fromPartial<I extends Exact<DeepPartial<OpenRouterModelInfo>, I>>(object: I): OpenRouterModelInfo {
const message = createBaseOpenRouterModelInfo()
message.maxTokens = object.maxTokens ?? 0
message.contextWindow = object.contextWindow ?? 0
message.supportsImages = object.supportsImages ?? false
message.supportsPromptCache = object.supportsPromptCache ?? false
message.inputPrice = object.inputPrice ?? 0
message.outputPrice = object.outputPrice ?? 0
message.cacheWritesPrice = object.cacheWritesPrice ?? 0
message.cacheReadsPrice = object.cacheReadsPrice ?? 0
message.description = object.description ?? ""
return message
},
}
function createBaseOpenRouterModels(): OpenRouterModels {
return { models: {} }
}
export const OpenRouterModels: MessageFns<OpenRouterModels> = {
encode(message: OpenRouterModels, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
Object.entries(message.models).forEach(([key, value]) => {
OpenRouterModels_ModelsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join()
})
return writer
},
decode(input: BinaryReader | Uint8Array, length?: number): OpenRouterModels {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input)
let end = length === undefined ? reader.len : reader.pos + length
const message = createBaseOpenRouterModels()
while (reader.pos < end) {
const tag = reader.uint32()
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break
}
const entry1 = OpenRouterModels_ModelsEntry.decode(reader, reader.uint32())
if (entry1.value !== undefined) {
message.models[entry1.key] = entry1.value
}
continue
}
}
if ((tag & 7) === 4 || tag === 0) {
break
}
reader.skip(tag & 7)
}
return message
},
fromJSON(object: any): OpenRouterModels {
return {
models: isObject(object.models)
? Object.entries(object.models).reduce<{ [key: string]: OpenRouterModelInfo }>((acc, [key, value]) => {
acc[key] = OpenRouterModelInfo.fromJSON(value)
return acc
}, {})
: {},
}
},
toJSON(message: OpenRouterModels): unknown {
const obj: any = {}
if (message.models) {
const entries = Object.entries(message.models)
if (entries.length > 0) {
obj.models = {}
entries.forEach(([k, v]) => {
obj.models[k] = OpenRouterModelInfo.toJSON(v)
})
}
}
return obj
},
create<I extends Exact<DeepPartial<OpenRouterModels>, I>>(base?: I): OpenRouterModels {
return OpenRouterModels.fromPartial(base ?? ({} as any))
},
fromPartial<I extends Exact<DeepPartial<OpenRouterModels>, I>>(object: I): OpenRouterModels {
const message = createBaseOpenRouterModels()
message.models = Object.entries(object.models ?? {}).reduce<{ [key: string]: OpenRouterModelInfo }>(
(acc, [key, value]) => {
if (value !== undefined) {
acc[key] = OpenRouterModelInfo.fromPartial(value)
}
return acc
},
{},
)
return message
},
}
function createBaseOpenRouterModels_ModelsEntry(): OpenRouterModels_ModelsEntry {
return { key: "", value: undefined }
}
export const OpenRouterModels_ModelsEntry: MessageFns<OpenRouterModels_ModelsEntry> = {
encode(message: OpenRouterModels_ModelsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.key !== "") {
writer.uint32(10).string(message.key)
}
if (message.value !== undefined) {
OpenRouterModelInfo.encode(message.value, writer.uint32(18).fork()).join()
}
return writer
},
decode(input: BinaryReader | Uint8Array, length?: number): OpenRouterModels_ModelsEntry {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input)
let end = length === undefined ? reader.len : reader.pos + length
const message = createBaseOpenRouterModels_ModelsEntry()
while (reader.pos < end) {
const tag = reader.uint32()
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break
}
message.key = reader.string()
continue
}
case 2: {
if (tag !== 18) {
break
}
message.value = OpenRouterModelInfo.decode(reader, reader.uint32())
continue
}
}
if ((tag & 7) === 4 || tag === 0) {
break
}
reader.skip(tag & 7)
}
return message
},
fromJSON(object: any): OpenRouterModels_ModelsEntry {
return {
key: isSet(object.key) ? globalThis.String(object.key) : "",
value: isSet(object.value) ? OpenRouterModelInfo.fromJSON(object.value) : undefined,
}
},
toJSON(message: OpenRouterModels_ModelsEntry): unknown {
const obj: any = {}
if (message.key !== "") {
obj.key = message.key
}
if (message.value !== undefined) {
obj.value = OpenRouterModelInfo.toJSON(message.value)
}
return obj
},
create<I extends Exact<DeepPartial<OpenRouterModels_ModelsEntry>, I>>(base?: I): OpenRouterModels_ModelsEntry {
return OpenRouterModels_ModelsEntry.fromPartial(base ?? ({} as any))
},
fromPartial<I extends Exact<DeepPartial<OpenRouterModels_ModelsEntry>, I>>(object: I): OpenRouterModels_ModelsEntry {
const message = createBaseOpenRouterModels_ModelsEntry()
message.key = object.key ?? ""
message.value =
object.value !== undefined && object.value !== null ? OpenRouterModelInfo.fromPartial(object.value) : undefined
return message
},
}
/** Service for model-related operations */
export type ModelsServiceDefinition = typeof ModelsServiceDefinition
export const ModelsServiceDefinition = {
@ -224,6 +604,15 @@ export const ModelsServiceDefinition = {
responseStream: false,
options: {},
},
/** Refreshes and returns OpenRouter models */
refreshOpenRouterModels: {
name: "refreshOpenRouterModels",
requestType: EmptyRequest,
requestStream: false,
responseType: OpenRouterModels,
responseStream: false,
options: {},
},
},
} as const
@ -244,6 +633,10 @@ export type Exact<P, I extends P> = P extends Builtin
? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never }
function isObject(value: any): boolean {
return typeof value === "object" && value !== null
}
function isSet(value: any): boolean {
return value !== null && value !== undefined
}

View File

@ -6,6 +6,7 @@ import { useMount } from "react-use"
import styled from "styled-components"
import { openRouterDefaultModelId } from "@shared/api"
import { useExtensionState } from "@/context/ExtensionStateContext"
import { ModelsServiceClient } from "@/services/grpc-client"
import { vscode } from "@/utils/vscode"
import { highlight } from "../history/HistoryView"
import { ModelInfoView, normalizeApiConfiguration } from "./ApiOptions"
@ -84,7 +85,9 @@ const OpenRouterModelPicker: React.FC<OpenRouterModelPickerProps> = ({ isPopup }
}, [apiConfiguration])
useMount(() => {
vscode.postMessage({ type: "refreshOpenRouterModels" })
ModelsServiceClient.refreshOpenRouterModels({}).catch((error: Error) =>
console.error("Failed to refresh OpenRouter models:", error),
)
})
useEffect(() => {