Fix o1 on openai native

This commit is contained in:
Saoud Rizwan 2024-10-16 09:59:30 -04:00
parent 3761023b60
commit dff81b9d09
2 changed files with 13 additions and 3 deletions

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "claude-dev",
"version": "2.0.5",
"version": "2.0.7",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "claude-dev",
"version": "2.0.5",
"version": "2.0.7",
"license": "Apache-2.0",
"dependencies": {
"@anthropic-ai/bedrock-sdk": "^0.10.2",

View File

@ -23,8 +23,18 @@ export class OpenAiNativeHandler implements ApiHandler {
}
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
let systemPromptMessage: OpenAI.Chat.ChatCompletionMessageParam
switch (this.getModel().id) {
case "o1-preview":
case "o1-mini":
systemPromptMessage = { role: "user", content: systemPrompt }
break
default:
systemPromptMessage = { role: "system", content: systemPrompt }
}
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
systemPromptMessage,
...convertToOpenAiMessages(messages),
]