Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

148 changes: 95 additions & 53 deletions src/api/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,61 +5,103 @@ import { ApiHandler } from "../index"
import { convertToOpenAiMessages } from "../transform/openai-format"
import { ApiStream } from "../transform/stream"

export class AOAI_O1Handler implements ApiHandler {
private options: ApiHandlerOptions
private client: OpenAI

constructor(options: ApiHandlerOptions) {
this.options = options
this.client = new OpenAI({
baseURL: this.options.openAiBaseUrl,
apiKey: this.options.openAiApiKey,
})
}

async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "user", content: systemPrompt },
...convertToOpenAiMessages(messages),
]
const response = await this.client.chat.completions.create({
model: this.options.openAiModelId ?? "",
messages: openAiMessages,
})
yield {
type: "text",
text: response.choices[0]?.message?.content || "",
}
if (response.usage) {
yield {
type: "usage",
inputTokens: response.usage.prompt_tokens || 0,
outputTokens: response.usage.completion_tokens || 0,
}
}
}

getModel(): { id: string; info: ModelInfo } {
return {
id: this.options.openAiModelId ?? "",
info: openAiModelInfoSaneDefaults,
}
}
}

export class OpenAiHandler implements ApiHandler {
private options: ApiHandlerOptions
private client: OpenAI
private options: ApiHandlerOptions
private client: OpenAI

constructor(options: ApiHandlerOptions) {
this.options = options
// Azure API shape slightly differs from the core API shape: https://github.com/openai/openai-node?tab=readme-ov-file#microsoft-azure-openai
if (this.options.openAiBaseUrl?.toLowerCase().includes("azure.com")) {
this.client = new AzureOpenAI({
baseURL: this.options.openAiBaseUrl,
apiKey: this.options.openAiApiKey,
apiVersion: this.options.azureApiVersion || azureOpenAiDefaultApiVersion,
})
} else {
this.client = new OpenAI({
baseURL: this.options.openAiBaseUrl,
apiKey: this.options.openAiApiKey,
})
}
}
constructor(options: ApiHandlerOptions) {
this.options = options
// Azure API shape slightly differs from the core API shape: https://github.com/openai/openai-node?tab=readme-ov-file#microsoft-azure-openai
if (this.options.openAiBaseUrl?.toLowerCase().includes("azure.com")) {
this.client = new AzureOpenAI({
baseURL: this.options.openAiBaseUrl,
apiKey: this.options.openAiApiKey,
apiVersion: this.options.azureApiVersion || azureOpenAiDefaultApiVersion,
})
} else {
this.client = new OpenAI({
baseURL: this.options.openAiBaseUrl,
apiKey: this.options.openAiApiKey,
})
}
}

async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
...convertToOpenAiMessages(messages),
]
const stream = await this.client.chat.completions.create({
model: this.options.openAiModelId ?? "",
messages: openAiMessages,
temperature: 0,
stream: true,
stream_options: { include_usage: true },
})
for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta
if (delta?.content) {
yield {
type: "text",
text: delta.content,
}
}
if (chunk.usage) {
yield {
type: "usage",
inputTokens: chunk.usage.prompt_tokens || 0,
outputTokens: chunk.usage.completion_tokens || 0,
}
}
}
}
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
...convertToOpenAiMessages(messages),
]
const stream = await this.client.chat.completions.create({
model: this.options.openAiModelId ?? "",
messages: openAiMessages,
temperature: 0,
stream: true,
stream_options: { include_usage: true },
})
for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta
if (delta?.content) {
yield {
type: "text",
text: delta.content,
}
}
if (chunk.usage) {
yield {
type: "usage",
inputTokens: chunk.usage.prompt_tokens || 0,
outputTokens: chunk.usage.completion_tokens || 0,
}
}
}
}

getModel(): { id: string; info: ModelInfo } {
return {
id: this.options.openAiModelId ?? "",
info: openAiModelInfoSaneDefaults,
}
}
getModel(): { id: string; info: ModelInfo } {
return {
id: this.options.openAiModelId ?? "",
info: openAiModelInfoSaneDefaults,
}
}
}
Loading