Commit 66e06527 authored by Alex's avatar Alex Committed by 陈曦
Browse files

fix(openai): do not normalize /completion API token based accounts

parent e7439c32
...@@ -46,7 +46,7 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions( ...@@ -46,7 +46,7 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
// 2. Resolve model mapping early so compat prompt_cache_key injection can // 2. Resolve model mapping early so compat prompt_cache_key injection can
// derive a stable seed from the final upstream model family. // derive a stable seed from the final upstream model family.
billingModel := resolveOpenAIForwardModel(account, originalModel, defaultMappedModel) billingModel := resolveOpenAIForwardModel(account, originalModel, defaultMappedModel)
upstreamModel := normalizeCodexModel(billingModel) upstreamModel := normalizeOpenAIModelForUpstream(account, billingModel)
promptCacheKey = strings.TrimSpace(promptCacheKey) promptCacheKey = strings.TrimSpace(promptCacheKey)
compatPromptCacheInjected := false compatPromptCacheInjected := false
......
...@@ -62,7 +62,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic( ...@@ -62,7 +62,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
// 3. Model mapping // 3. Model mapping
billingModel := resolveOpenAIForwardModel(account, normalizedModel, defaultMappedModel) billingModel := resolveOpenAIForwardModel(account, normalizedModel, defaultMappedModel)
upstreamModel := normalizeCodexModel(billingModel) upstreamModel := normalizeOpenAIModelForUpstream(account, billingModel)
responsesReq.Model = upstreamModel responsesReq.Model = upstreamModel
logger.L().Debug("openai messages: model mapping applied", logger.L().Debug("openai messages: model mapping applied",
......
...@@ -2515,7 +2515,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient( ...@@ -2515,7 +2515,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient(
} }
normalized = next normalized = next
} }
upstreamModel := normalizeCodexModel(account.GetMappedModel(originalModel)) upstreamModel := normalizeOpenAIModelForUpstream(account, account.GetMappedModel(originalModel))
if upstreamModel != originalModel { if upstreamModel != originalModel {
next, setErr := applyPayloadMutation(normalized, "model", upstreamModel) next, setErr := applyPayloadMutation(normalized, "model", upstreamModel)
if setErr != nil { if setErr != nil {
...@@ -2773,7 +2773,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient( ...@@ -2773,7 +2773,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient(
mappedModel := "" mappedModel := ""
var mappedModelBytes []byte var mappedModelBytes []byte
if originalModel != "" { if originalModel != "" {
mappedModel = normalizeCodexModel(account.GetMappedModel(originalModel)) mappedModel = normalizeOpenAIModelForUpstream(account, account.GetMappedModel(originalModel))
needModelReplace = mappedModel != "" && mappedModel != originalModel needModelReplace = mappedModel != "" && mappedModel != originalModel
if needModelReplace { if needModelReplace {
mappedModelBytes = []byte(mappedModel) mappedModelBytes = []byte(mappedModel)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment