refreshRequestyModels protobus migration (#3422)

This commit is contained in:
canvrno 2025-05-11 22:42:14 -07:00 committed by GitHub
parent 801c59e75e
commit 33413e91c6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 121 additions and 97 deletions

View File

@ -0,0 +1,5 @@
---
"claude-dev": patch
---
refreshRequestyModels protobus migration

View File

@ -15,9 +15,11 @@ service ModelsService {
// Fetches available models from VS Code LM API
rpc getVsCodeLmModels(EmptyRequest) returns (VsCodeLmModelsArray);
// Refreshes and returns OpenRouter models
rpc refreshOpenRouterModels(EmptyRequest) returns (OpenRouterModels);
rpc refreshOpenRouterModels(EmptyRequest) returns (OpenRouterCompatibleModelInfo);
// Refreshes and returns OpenAI models
rpc refreshOpenAiModels(OpenAiModelsRequest) returns (StringArray);
// Refreshes and returns Requesty models
rpc refreshRequestyModels(EmptyRequest) returns (OpenRouterCompatibleModelInfo);
}
// List of VS Code LM models
@ -33,7 +35,7 @@ message VsCodeLmModel {
string id = 4;
}
// For ModelInfo structure in OpenRouterModels
// For OpenRouterCompatibleModelInfo structure in OpenRouterModels
message OpenRouterModelInfo {
int32 max_tokens = 1;
int32 context_window = 2;
@ -46,8 +48,8 @@ message OpenRouterModelInfo {
string description = 9;
}
// Response message for OpenRouter models
message OpenRouterModels {
// Shared response message for model information
message OpenRouterCompatibleModelInfo {
map<string, OpenRouterModelInfo> models = 1;
}
@ -57,4 +59,3 @@ message OpenAiModelsRequest {
string baseUrl = 2;
string apiKey = 3;
}

View File

@ -334,9 +334,6 @@ export class Controller {
case "resetState":
await this.resetState()
break
case "refreshRequestyModels":
await this.refreshRequestyModels()
break
case "refreshClineRules":
await refreshClineRulesToggles(this.context, cwd)
await refreshExternalRulesToggles(this.context, cwd)
@ -1149,6 +1146,7 @@ Here is the project's README to help you get started:\n\n${mcpDetails.readmeCont
return cacheDir
}
// Read OpenRouter models from disk cache
async readOpenRouterModels(): Promise<Record<string, ModelInfo> | undefined> {
const openRouterModelsFilePath = path.join(await this.ensureCacheDirectoryExists(), GlobalFileNames.openRouterModels)
const fileExists = await fileExistsAtPath(openRouterModelsFilePath)
@ -1159,51 +1157,6 @@ Here is the project's README to help you get started:\n\n${mcpDetails.readmeCont
return undefined
}
async refreshRequestyModels() {
const parsePrice = (price: any) => {
if (price) {
return parseFloat(price) * 1_000_000
}
return undefined
}
let models: Record<string, ModelInfo> = {}
try {
const apiKey = await getSecret(this.context, "requestyApiKey")
const headers = {
Authorization: `Bearer ${apiKey}`,
}
const response = await axios.get("https://router.requesty.ai/v1/models", { headers })
if (response.data?.data) {
for (const model of response.data.data) {
const modelInfo: ModelInfo = {
maxTokens: model.max_output_tokens || undefined,
contextWindow: model.context_window,
supportsImages: model.supports_vision || undefined,
supportsPromptCache: model.supports_caching || undefined,
inputPrice: parsePrice(model.input_price),
outputPrice: parsePrice(model.output_price),
cacheWritesPrice: parsePrice(model.caching_price),
cacheReadsPrice: parsePrice(model.cached_price),
description: model.description,
}
models[model.id] = modelInfo
}
console.log("Requesty models fetched", models)
} else {
console.error("Invalid response from Requesty API")
}
} catch (error) {
console.error("Error fetching Requesty models:", error)
}
await this.postMessageToWebview({
type: "requestyModels",
requestyModels: models,
})
return models
}
// Context menus and code actions
getFileMentionFromPath(filePath: string) {

View File

@ -8,6 +8,7 @@ import { getOllamaModels } from "./getOllamaModels"
import { getVsCodeLmModels } from "./getVsCodeLmModels"
import { refreshOpenAiModels } from "./refreshOpenAiModels"
import { refreshOpenRouterModels } from "./refreshOpenRouterModels"
import { refreshRequestyModels } from "./refreshRequestyModels"
// Register all models service methods
export function registerAllMethods(): void {
@ -17,4 +18,5 @@ export function registerAllMethods(): void {
registerMethod("getVsCodeLmModels", getVsCodeLmModels)
registerMethod("refreshOpenAiModels", refreshOpenAiModels)
registerMethod("refreshOpenRouterModels", refreshOpenRouterModels)
registerMethod("refreshRequestyModels", refreshRequestyModels)
}

View File

@ -29,9 +29,6 @@ export async function refreshOpenAiModels(controller: Controller, request: OpenA
const modelsArray = response.data?.data?.map((model: any) => model.id) || []
const models = [...new Set<string>(modelsArray)]
// Send models to webview
controller.postMessageToWebview({ type: "openAiModels", openAiModels: models })
return StringArray.create({ values: models })
} catch (error) {
console.error("Error fetching OpenAI models:", error)

View File

@ -1,6 +1,6 @@
import { Controller } from ".."
import { EmptyRequest } from "../../../shared/proto/common"
import { OpenRouterModels, OpenRouterModelInfo } from "../../../shared/proto/models"
import { OpenRouterCompatibleModelInfo, OpenRouterModelInfo } from "../../../shared/proto/models"
import axios from "axios"
import path from "path"
import fs from "fs/promises"
@ -13,7 +13,10 @@ import { GlobalFileNames } from "@core/storage/disk"
* @param request Empty request object
* @returns Response containing the OpenRouter models
*/
export async function refreshOpenRouterModels(controller: Controller, request: EmptyRequest): Promise<OpenRouterModels> {
export async function refreshOpenRouterModels(
controller: Controller,
request: EmptyRequest,
): Promise<OpenRouterCompatibleModelInfo> {
const openRouterModelsFilePath = path.join(await ensureCacheDirectoryExists(controller), GlobalFileNames.openRouterModels)
let models: Record<string, Partial<OpenRouterModelInfo>> = {}
@ -141,13 +144,7 @@ export async function refreshOpenRouterModels(controller: Controller, request: E
}
}
// Send models to webview
await controller.postMessageToWebview({
type: "openRouterModels",
openRouterModels: typedModels,
})
return OpenRouterModels.create({ models: typedModels })
return OpenRouterCompatibleModelInfo.create({ models: typedModels })
}
/**

View File

@ -0,0 +1,55 @@
import { Controller } from ".."
import { EmptyRequest } from "../../../shared/proto/common"
import { OpenRouterCompatibleModelInfo, OpenRouterModelInfo } from "../../../shared/proto/models"
import axios from "axios"
import { getSecret } from "@core/storage/state"
/**
* Refreshes the Requesty models and returns the updated model list
* @param controller The controller instance
* @param request Empty request object
* @returns Response containing the Requesty models
*/
export async function refreshRequestyModels(
controller: Controller,
request: EmptyRequest,
): Promise<OpenRouterCompatibleModelInfo> {
const parsePrice = (price: any) => {
if (price) {
return parseFloat(price) * 1_000_000
}
return undefined
}
let models: Record<string, OpenRouterModelInfo> = {}
try {
const apiKey = await getSecret(controller.context, "requestyApiKey")
const headers = {
Authorization: `Bearer ${apiKey}`,
}
const response = await axios.get("https://router.requesty.ai/v1/models", { headers })
if (response.data?.data) {
for (const model of response.data.data) {
const modelInfo: OpenRouterModelInfo = {
maxTokens: model.max_output_tokens || undefined,
contextWindow: model.context_window,
supportsImages: model.supports_vision || undefined,
supportsPromptCache: model.supports_caching || undefined,
inputPrice: parsePrice(model.input_price) || 0,
outputPrice: parsePrice(model.output_price) || 0,
cacheWritesPrice: parsePrice(model.caching_price) || 0,
cacheReadsPrice: parsePrice(model.cached_price) || 0,
description: model.description,
}
models[model.id] = modelInfo
}
console.log("Requesty models fetched", models)
} else {
console.error("Invalid response from Requesty API")
}
} catch (error) {
console.error("Error fetching Requesty models:", error)
}
return OpenRouterCompatibleModelInfo.create({ models })
}

View File

@ -21,7 +21,6 @@ export interface WebviewMessage {
| "openInBrowser"
| "openMention"
| "showChatView"
| "refreshRequestyModels"
| "refreshClineRules"
| "openMcpSettings"
| "restartMcpServer"

View File

@ -23,7 +23,7 @@ export interface VsCodeLmModel {
id: string
}
/** For ModelInfo structure in OpenRouterModels */
/** For OpenRouterCompatibleModelInfo structure in OpenRouterModels */
export interface OpenRouterModelInfo {
maxTokens: number
contextWindow: number
@ -36,12 +36,12 @@ export interface OpenRouterModelInfo {
description: string
}
/** Response message for OpenRouter models */
export interface OpenRouterModels {
/** Shared response message for model information */
export interface OpenRouterCompatibleModelInfo {
models: { [key: string]: OpenRouterModelInfo }
}
export interface OpenRouterModels_ModelsEntry {
export interface OpenRouterCompatibleModelInfo_ModelsEntry {
key: string
value?: OpenRouterModelInfo | undefined
}
@ -419,22 +419,22 @@ export const OpenRouterModelInfo: MessageFns<OpenRouterModelInfo> = {
},
}
function createBaseOpenRouterModels(): OpenRouterModels {
function createBaseOpenRouterCompatibleModelInfo(): OpenRouterCompatibleModelInfo {
return { models: {} }
}
export const OpenRouterModels: MessageFns<OpenRouterModels> = {
encode(message: OpenRouterModels, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
export const OpenRouterCompatibleModelInfo: MessageFns<OpenRouterCompatibleModelInfo> = {
encode(message: OpenRouterCompatibleModelInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
Object.entries(message.models).forEach(([key, value]) => {
OpenRouterModels_ModelsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join()
OpenRouterCompatibleModelInfo_ModelsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join()
})
return writer
},
decode(input: BinaryReader | Uint8Array, length?: number): OpenRouterModels {
decode(input: BinaryReader | Uint8Array, length?: number): OpenRouterCompatibleModelInfo {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input)
let end = length === undefined ? reader.len : reader.pos + length
const message = createBaseOpenRouterModels()
const message = createBaseOpenRouterCompatibleModelInfo()
while (reader.pos < end) {
const tag = reader.uint32()
switch (tag >>> 3) {
@ -443,7 +443,7 @@ export const OpenRouterModels: MessageFns<OpenRouterModels> = {
break
}
const entry1 = OpenRouterModels_ModelsEntry.decode(reader, reader.uint32())
const entry1 = OpenRouterCompatibleModelInfo_ModelsEntry.decode(reader, reader.uint32())
if (entry1.value !== undefined) {
message.models[entry1.key] = entry1.value
}
@ -458,7 +458,7 @@ export const OpenRouterModels: MessageFns<OpenRouterModels> = {
return message
},
fromJSON(object: any): OpenRouterModels {
fromJSON(object: any): OpenRouterCompatibleModelInfo {
return {
models: isObject(object.models)
? Object.entries(object.models).reduce<{ [key: string]: OpenRouterModelInfo }>((acc, [key, value]) => {
@ -469,7 +469,7 @@ export const OpenRouterModels: MessageFns<OpenRouterModels> = {
}
},
toJSON(message: OpenRouterModels): unknown {
toJSON(message: OpenRouterCompatibleModelInfo): unknown {
const obj: any = {}
if (message.models) {
const entries = Object.entries(message.models)
@ -483,11 +483,11 @@ export const OpenRouterModels: MessageFns<OpenRouterModels> = {
return obj
},
create<I extends Exact<DeepPartial<OpenRouterModels>, I>>(base?: I): OpenRouterModels {
return OpenRouterModels.fromPartial(base ?? ({} as any))
create<I extends Exact<DeepPartial<OpenRouterCompatibleModelInfo>, I>>(base?: I): OpenRouterCompatibleModelInfo {
return OpenRouterCompatibleModelInfo.fromPartial(base ?? ({} as any))
},
fromPartial<I extends Exact<DeepPartial<OpenRouterModels>, I>>(object: I): OpenRouterModels {
const message = createBaseOpenRouterModels()
fromPartial<I extends Exact<DeepPartial<OpenRouterCompatibleModelInfo>, I>>(object: I): OpenRouterCompatibleModelInfo {
const message = createBaseOpenRouterCompatibleModelInfo()
message.models = Object.entries(object.models ?? {}).reduce<{ [key: string]: OpenRouterModelInfo }>(
(acc, [key, value]) => {
if (value !== undefined) {
@ -501,12 +501,12 @@ export const OpenRouterModels: MessageFns<OpenRouterModels> = {
},
}
function createBaseOpenRouterModels_ModelsEntry(): OpenRouterModels_ModelsEntry {
function createBaseOpenRouterCompatibleModelInfo_ModelsEntry(): OpenRouterCompatibleModelInfo_ModelsEntry {
return { key: "", value: undefined }
}
export const OpenRouterModels_ModelsEntry: MessageFns<OpenRouterModels_ModelsEntry> = {
encode(message: OpenRouterModels_ModelsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
export const OpenRouterCompatibleModelInfo_ModelsEntry: MessageFns<OpenRouterCompatibleModelInfo_ModelsEntry> = {
encode(message: OpenRouterCompatibleModelInfo_ModelsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.key !== "") {
writer.uint32(10).string(message.key)
}
@ -516,10 +516,10 @@ export const OpenRouterModels_ModelsEntry: MessageFns<OpenRouterModels_ModelsEnt
return writer
},
decode(input: BinaryReader | Uint8Array, length?: number): OpenRouterModels_ModelsEntry {
decode(input: BinaryReader | Uint8Array, length?: number): OpenRouterCompatibleModelInfo_ModelsEntry {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input)
let end = length === undefined ? reader.len : reader.pos + length
const message = createBaseOpenRouterModels_ModelsEntry()
const message = createBaseOpenRouterCompatibleModelInfo_ModelsEntry()
while (reader.pos < end) {
const tag = reader.uint32()
switch (tag >>> 3) {
@ -548,14 +548,14 @@ export const OpenRouterModels_ModelsEntry: MessageFns<OpenRouterModels_ModelsEnt
return message
},
fromJSON(object: any): OpenRouterModels_ModelsEntry {
fromJSON(object: any): OpenRouterCompatibleModelInfo_ModelsEntry {
return {
key: isSet(object.key) ? globalThis.String(object.key) : "",
value: isSet(object.value) ? OpenRouterModelInfo.fromJSON(object.value) : undefined,
}
},
toJSON(message: OpenRouterModels_ModelsEntry): unknown {
toJSON(message: OpenRouterCompatibleModelInfo_ModelsEntry): unknown {
const obj: any = {}
if (message.key !== "") {
obj.key = message.key
@ -566,11 +566,15 @@ export const OpenRouterModels_ModelsEntry: MessageFns<OpenRouterModels_ModelsEnt
return obj
},
create<I extends Exact<DeepPartial<OpenRouterModels_ModelsEntry>, I>>(base?: I): OpenRouterModels_ModelsEntry {
return OpenRouterModels_ModelsEntry.fromPartial(base ?? ({} as any))
create<I extends Exact<DeepPartial<OpenRouterCompatibleModelInfo_ModelsEntry>, I>>(
base?: I,
): OpenRouterCompatibleModelInfo_ModelsEntry {
return OpenRouterCompatibleModelInfo_ModelsEntry.fromPartial(base ?? ({} as any))
},
fromPartial<I extends Exact<DeepPartial<OpenRouterModels_ModelsEntry>, I>>(object: I): OpenRouterModels_ModelsEntry {
const message = createBaseOpenRouterModels_ModelsEntry()
fromPartial<I extends Exact<DeepPartial<OpenRouterCompatibleModelInfo_ModelsEntry>, I>>(
object: I,
): OpenRouterCompatibleModelInfo_ModelsEntry {
const message = createBaseOpenRouterCompatibleModelInfo_ModelsEntry()
message.key = object.key ?? ""
message.value =
object.value !== undefined && object.value !== null ? OpenRouterModelInfo.fromPartial(object.value) : undefined
@ -709,7 +713,7 @@ export const ModelsServiceDefinition = {
name: "refreshOpenRouterModels",
requestType: EmptyRequest,
requestStream: false,
responseType: OpenRouterModels,
responseType: OpenRouterCompatibleModelInfo,
responseStream: false,
options: {},
},
@ -722,6 +726,15 @@ export const ModelsServiceDefinition = {
responseStream: false,
options: {},
},
/** Refreshes and returns Requesty models */
refreshRequestyModels: {
name: "refreshRequestyModels",
requestType: EmptyRequest,
requestStream: false,
responseType: OpenRouterCompatibleModelInfo,
responseStream: false,
options: {},
},
},
} as const

View File

@ -6,7 +6,7 @@ import { useMount } from "react-use"
import styled from "styled-components"
import { requestyDefaultModelId } from "../../../../src/shared/api"
import { useExtensionState } from "../../context/ExtensionStateContext"
import { vscode } from "../../utils/vscode"
import { ModelsServiceClient } from "../../services/grpc-client"
import { highlight } from "../history/HistoryView"
import { ModelInfoView, normalizeApiConfiguration } from "./ApiOptions"
import { CODE_BLOCK_BG_COLOR } from "../common/CodeBlock"
@ -43,7 +43,9 @@ const RequestyModelPicker: React.FC<RequestyModelPickerProps> = ({ isPopup }) =>
}, [apiConfiguration])
useMount(() => {
vscode.postMessage({ type: "refreshRequestyModels" })
ModelsServiceClient.refreshRequestyModels({}).catch((err) => {
console.error("Failed to refresh Requesty models:", err)
})
})
useEffect(() => {