Commit e27b0adb authored by erio's avatar erio
Browse files

refactor: remove resolveOpenAIUpstreamModel, use normalizeCodexModel directly

Eliminates unnecessary indirection layer. The wrapper function only
called normalizeCodexModel with a special case for "gpt 5.3 codex spark"
(space-separated variant) that is no longer needed.

All call sites now use normalizeCodexModel directly.
parent e59fa863
...@@ -10,8 +10,8 @@ import ( ...@@ -10,8 +10,8 @@ import (
const compatPromptCacheKeyPrefix = "compat_cc_" const compatPromptCacheKeyPrefix = "compat_cc_"
func shouldAutoInjectPromptCacheKeyForCompat(model string) bool { func shouldAutoInjectPromptCacheKeyForCompat(model string) bool {
switch resolveOpenAIUpstreamModel(strings.TrimSpace(model)) { switch normalizeCodexModel(strings.TrimSpace(model)) {
case "gpt-5.4", "gpt-5.3-codex", "gpt-5.3-codex-spark": case "gpt-5.4", "gpt-5.3-codex":
return true return true
default: default:
return false return false
...@@ -23,9 +23,9 @@ func deriveCompatPromptCacheKey(req *apicompat.ChatCompletionsRequest, mappedMod ...@@ -23,9 +23,9 @@ func deriveCompatPromptCacheKey(req *apicompat.ChatCompletionsRequest, mappedMod
return "" return ""
} }
normalizedModel := resolveOpenAIUpstreamModel(strings.TrimSpace(mappedModel)) normalizedModel := normalizeCodexModel(strings.TrimSpace(mappedModel))
if normalizedModel == "" { if normalizedModel == "" {
normalizedModel = resolveOpenAIUpstreamModel(strings.TrimSpace(req.Model)) normalizedModel = normalizeCodexModel(strings.TrimSpace(req.Model))
} }
if normalizedModel == "" { if normalizedModel == "" {
normalizedModel = strings.TrimSpace(req.Model) normalizedModel = strings.TrimSpace(req.Model)
......
...@@ -46,7 +46,7 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions( ...@@ -46,7 +46,7 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
// 2. Resolve model mapping early so compat prompt_cache_key injection can // 2. Resolve model mapping early so compat prompt_cache_key injection can
// derive a stable seed from the final upstream model family. // derive a stable seed from the final upstream model family.
billingModel := resolveOpenAIForwardModel(account, originalModel, defaultMappedModel) billingModel := resolveOpenAIForwardModel(account, originalModel, defaultMappedModel)
upstreamModel := resolveOpenAIUpstreamModel(billingModel) upstreamModel := normalizeCodexModel(billingModel)
promptCacheKey = strings.TrimSpace(promptCacheKey) promptCacheKey = strings.TrimSpace(promptCacheKey)
compatPromptCacheInjected := false compatPromptCacheInjected := false
......
...@@ -62,7 +62,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic( ...@@ -62,7 +62,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
// 3. Model mapping // 3. Model mapping
billingModel := resolveOpenAIForwardModel(account, normalizedModel, defaultMappedModel) billingModel := resolveOpenAIForwardModel(account, normalizedModel, defaultMappedModel)
upstreamModel := resolveOpenAIUpstreamModel(billingModel) upstreamModel := normalizeCodexModel(billingModel)
responsesReq.Model = upstreamModel responsesReq.Model = upstreamModel
logger.L().Debug("openai messages: model mapping applied", logger.L().Debug("openai messages: model mapping applied",
......
...@@ -1939,7 +1939,7 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco ...@@ -1939,7 +1939,7 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
// 针对所有 OpenAI 账号执行 Codex 模型名规范化,确保上游识别一致。 // 针对所有 OpenAI 账号执行 Codex 模型名规范化,确保上游识别一致。
if model, ok := reqBody["model"].(string); ok { if model, ok := reqBody["model"].(string); ok {
upstreamModel = resolveOpenAIUpstreamModel(model) upstreamModel = normalizeCodexModel(model)
if upstreamModel != "" && upstreamModel != model { if upstreamModel != "" && upstreamModel != model {
logger.LegacyPrintf("service.openai_gateway", "[OpenAI] Upstream model resolved: %s -> %s (account: %s, type: %s, isCodexCLI: %v)", logger.LegacyPrintf("service.openai_gateway", "[OpenAI] Upstream model resolved: %s -> %s (account: %s, type: %s, isCodexCLI: %v)",
model, upstreamModel, account.Name, account.Type, isCodexCLI) model, upstreamModel, account.Name, account.Type, isCodexCLI)
......
package service package service
import "strings" // resolveOpenAIForwardModel determines the upstream model for OpenAI-compatible
// forwarding. Group-level default mapping only applies when the account itself
// resolveOpenAIForwardModel resolves the account/group mapping result for // did not match any explicit model_mapping rule.
// OpenAI-compatible forwarding. Group-level default mapping only applies when
// the account itself did not match any explicit model_mapping rule.
func resolveOpenAIForwardModel(account *Account, requestedModel, defaultMappedModel string) string { func resolveOpenAIForwardModel(account *Account, requestedModel, defaultMappedModel string) string {
if account == nil { if account == nil {
if defaultMappedModel != "" { if defaultMappedModel != "" {
...@@ -19,23 +17,3 @@ func resolveOpenAIForwardModel(account *Account, requestedModel, defaultMappedMo ...@@ -19,23 +17,3 @@ func resolveOpenAIForwardModel(account *Account, requestedModel, defaultMappedMo
} }
return mappedModel return mappedModel
} }
func resolveOpenAIUpstreamModel(model string) string {
if isBareGPT53CodexSparkModel(model) {
return "gpt-5.3-codex-spark"
}
return normalizeCodexModel(strings.TrimSpace(model))
}
func isBareGPT53CodexSparkModel(model string) bool {
modelID := strings.TrimSpace(model)
if modelID == "" {
return false
}
if strings.Contains(modelID, "/") {
parts := strings.Split(modelID, "/")
modelID = parts[len(parts)-1]
}
normalized := strings.ToLower(strings.TrimSpace(modelID))
return normalized == "gpt-5.3-codex-spark" || normalized == "gpt 5.3 codex spark"
}
...@@ -74,30 +74,28 @@ func TestResolveOpenAIForwardModel_PreventsClaudeModelFromFallingBackToGpt51(t * ...@@ -74,30 +74,28 @@ func TestResolveOpenAIForwardModel_PreventsClaudeModelFromFallingBackToGpt51(t *
Credentials: map[string]any{}, Credentials: map[string]any{},
} }
withoutDefault := resolveOpenAIUpstreamModel(resolveOpenAIForwardModel(account, "claude-opus-4-6", "")) withoutDefault := normalizeCodexModel(resolveOpenAIForwardModel(account, "claude-opus-4-6", ""))
if withoutDefault != "gpt-5.1" { if withoutDefault != "gpt-5.1" {
t.Fatalf("resolveOpenAIUpstreamModel(...) = %q, want %q", withoutDefault, "gpt-5.1") t.Fatalf("normalizeCodexModel(...) = %q, want %q", withoutDefault, "gpt-5.1")
} }
withDefault := resolveOpenAIUpstreamModel(resolveOpenAIForwardModel(account, "claude-opus-4-6", "gpt-5.4")) withDefault := normalizeCodexModel(resolveOpenAIForwardModel(account, "claude-opus-4-6", "gpt-5.4"))
if withDefault != "gpt-5.4" { if withDefault != "gpt-5.4" {
t.Fatalf("resolveOpenAIUpstreamModel(...) = %q, want %q", withDefault, "gpt-5.4") t.Fatalf("normalizeCodexModel(...) = %q, want %q", withDefault, "gpt-5.4")
} }
} }
func TestResolveOpenAIUpstreamModel(t *testing.T) { func TestNormalizeCodexModel(t *testing.T) {
cases := map[string]string{ cases := map[string]string{
"gpt-5.3-codex-spark": "gpt-5.3-codex-spark", "gpt-5.3-codex-spark": "gpt-5.3-codex",
"gpt 5.3 codex spark": "gpt-5.3-codex-spark",
" openai/gpt-5.3-codex-spark ": "gpt-5.3-codex-spark",
"gpt-5.3-codex-spark-high": "gpt-5.3-codex", "gpt-5.3-codex-spark-high": "gpt-5.3-codex",
"gpt-5.3-codex-spark-xhigh": "gpt-5.3-codex", "gpt-5.3-codex-spark-xhigh": "gpt-5.3-codex",
"gpt-5.3": "gpt-5.3-codex", "gpt-5.3": "gpt-5.3-codex",
} }
for input, expected := range cases { for input, expected := range cases {
if got := resolveOpenAIUpstreamModel(input); got != expected { if got := normalizeCodexModel(input); got != expected {
t.Fatalf("resolveOpenAIUpstreamModel(%q) = %q, want %q", input, got, expected) t.Fatalf("normalizeCodexModel(%q) = %q, want %q", input, got, expected)
} }
} }
} }
...@@ -2515,7 +2515,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient( ...@@ -2515,7 +2515,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient(
} }
normalized = next normalized = next
} }
upstreamModel := resolveOpenAIUpstreamModel(account.GetMappedModel(originalModel)) upstreamModel := normalizeCodexModel(account.GetMappedModel(originalModel))
if upstreamModel != originalModel { if upstreamModel != originalModel {
next, setErr := applyPayloadMutation(normalized, "model", upstreamModel) next, setErr := applyPayloadMutation(normalized, "model", upstreamModel)
if setErr != nil { if setErr != nil {
...@@ -2773,7 +2773,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient( ...@@ -2773,7 +2773,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient(
mappedModel := "" mappedModel := ""
var mappedModelBytes []byte var mappedModelBytes []byte
if originalModel != "" { if originalModel != "" {
mappedModel = resolveOpenAIUpstreamModel(account.GetMappedModel(originalModel)) mappedModel = normalizeCodexModel(account.GetMappedModel(originalModel))
needModelReplace = mappedModel != "" && mappedModel != originalModel needModelReplace = mappedModel != "" && mappedModel != originalModel
if needModelReplace { if needModelReplace {
mappedModelBytes = []byte(mappedModel) mappedModelBytes = []byte(mappedModel)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment