diff --git a/extensions/cli/src/util/exponentialBackoff.ts b/extensions/cli/src/util/exponentialBackoff.ts index c44aee6e818..df8c170fc4f 100644 --- a/extensions/cli/src/util/exponentialBackoff.ts +++ b/extensions/cli/src/util/exponentialBackoff.ts @@ -1,4 +1,4 @@ -import { BaseLlmApi, isResponsesModel } from "@continuedev/openai-adapters"; +import { BaseLlmApi } from "@continuedev/openai-adapters"; import type { ChatCompletionCreateParamsStreaming } from "openai/resources.mjs"; import { error, warn } from "../logging.js"; @@ -173,14 +173,6 @@ export async function chatCompletionStreamWithBackoff( throw new Error("Request aborted"); } - const useResponses = - typeof llmApi.responsesStream === "function" && - isResponsesModel(params.model); - - if (useResponses) { - return llmApi.responsesStream!(params, abortSignal); - } - return llmApi.chatCompletionStream(params, abortSignal); } catch (err: any) { lastError = err;