Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
Menu
Open sidebar
陈曦
sub2api
Commits
8c109411
Commit
8c109411
authored
Mar 29, 2026
by
YanzheL
Browse files
fix(openai): normalize gpt-5.4-xhigh compat mapping
parent
fdd8499f
Changes
5
Hide whitespace changes
Inline
Side-by-side
backend/internal/service/openai_compat_model.go
0 → 100644
View file @
8c109411
package
service
import
(
"strings"
"github.com/Wei-Shaw/sub2api/internal/pkg/apicompat"
)
func
NormalizeOpenAICompatRequestedModel
(
model
string
)
string
{
trimmed
:=
strings
.
TrimSpace
(
model
)
if
trimmed
==
""
{
return
""
}
normalized
,
_
,
ok
:=
splitOpenAICompatReasoningModel
(
trimmed
)
if
!
ok
||
normalized
==
""
{
return
trimmed
}
return
normalized
}
func
applyOpenAICompatModelNormalization
(
req
*
apicompat
.
AnthropicRequest
)
{
if
req
==
nil
{
return
}
originalModel
:=
strings
.
TrimSpace
(
req
.
Model
)
if
originalModel
==
""
{
return
}
normalizedModel
,
derivedEffort
,
hasReasoningSuffix
:=
splitOpenAICompatReasoningModel
(
originalModel
)
if
hasReasoningSuffix
&&
normalizedModel
!=
""
{
req
.
Model
=
normalizedModel
}
if
req
.
OutputConfig
!=
nil
&&
strings
.
TrimSpace
(
req
.
OutputConfig
.
Effort
)
!=
""
{
return
}
claudeEffort
:=
openAIReasoningEffortToClaudeOutputEffort
(
derivedEffort
)
if
claudeEffort
==
""
{
return
}
if
req
.
OutputConfig
==
nil
{
req
.
OutputConfig
=
&
apicompat
.
AnthropicOutputConfig
{}
}
req
.
OutputConfig
.
Effort
=
claudeEffort
}
func
splitOpenAICompatReasoningModel
(
model
string
)
(
normalizedModel
string
,
reasoningEffort
string
,
ok
bool
)
{
trimmed
:=
strings
.
TrimSpace
(
model
)
if
trimmed
==
""
{
return
""
,
""
,
false
}
modelID
:=
trimmed
if
strings
.
Contains
(
modelID
,
"/"
)
{
parts
:=
strings
.
Split
(
modelID
,
"/"
)
modelID
=
parts
[
len
(
parts
)
-
1
]
}
modelID
=
strings
.
TrimSpace
(
modelID
)
if
!
strings
.
HasPrefix
(
strings
.
ToLower
(
modelID
),
"gpt-"
)
{
return
trimmed
,
""
,
false
}
parts
:=
strings
.
FieldsFunc
(
strings
.
ToLower
(
modelID
),
func
(
r
rune
)
bool
{
switch
r
{
case
'-'
,
'_'
,
' '
:
return
true
default
:
return
false
}
})
if
len
(
parts
)
==
0
{
return
trimmed
,
""
,
false
}
last
:=
strings
.
NewReplacer
(
"-"
,
""
,
"_"
,
""
,
" "
,
""
)
.
Replace
(
parts
[
len
(
parts
)
-
1
])
switch
last
{
case
"none"
,
"minimal"
:
case
"low"
,
"medium"
,
"high"
:
reasoningEffort
=
last
case
"xhigh"
,
"extrahigh"
:
reasoningEffort
=
"xhigh"
default
:
return
trimmed
,
""
,
false
}
return
normalizeCodexModel
(
modelID
),
reasoningEffort
,
true
}
func
openAIReasoningEffortToClaudeOutputEffort
(
effort
string
)
string
{
switch
strings
.
TrimSpace
(
effort
)
{
case
"low"
,
"medium"
,
"high"
:
return
effort
case
"xhigh"
:
return
"max"
default
:
return
""
}
}
backend/internal/service/openai_compat_model_test.go
0 → 100644
View file @
8c109411
package
service
import
(
"bytes"
"context"
"io"
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/Wei-Shaw/sub2api/internal/pkg/apicompat"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
"github.com/tidwall/gjson"
)
func
TestNormalizeOpenAICompatRequestedModel
(
t
*
testing
.
T
)
{
t
.
Parallel
()
tests
:=
[]
struct
{
name
string
input
string
want
string
}{
{
name
:
"gpt reasoning alias strips xhigh"
,
input
:
"gpt-5.4-xhigh"
,
want
:
"gpt-5.4"
},
{
name
:
"gpt reasoning alias strips none"
,
input
:
"gpt-5.4-none"
,
want
:
"gpt-5.4"
},
{
name
:
"codex max model stays intact"
,
input
:
"gpt-5.1-codex-max"
,
want
:
"gpt-5.1-codex-max"
},
{
name
:
"non openai model unchanged"
,
input
:
"claude-opus-4-6"
,
want
:
"claude-opus-4-6"
},
}
for
_
,
tt
:=
range
tests
{
t
.
Run
(
tt
.
name
,
func
(
t
*
testing
.
T
)
{
require
.
Equal
(
t
,
tt
.
want
,
NormalizeOpenAICompatRequestedModel
(
tt
.
input
))
})
}
}
func
TestApplyOpenAICompatModelNormalization
(
t
*
testing
.
T
)
{
t
.
Parallel
()
t
.
Run
(
"derives xhigh from model suffix when output config missing"
,
func
(
t
*
testing
.
T
)
{
req
:=
&
apicompat
.
AnthropicRequest
{
Model
:
"gpt-5.4-xhigh"
}
applyOpenAICompatModelNormalization
(
req
)
require
.
Equal
(
t
,
"gpt-5.4"
,
req
.
Model
)
require
.
NotNil
(
t
,
req
.
OutputConfig
)
require
.
Equal
(
t
,
"max"
,
req
.
OutputConfig
.
Effort
)
})
t
.
Run
(
"explicit output config wins over model suffix"
,
func
(
t
*
testing
.
T
)
{
req
:=
&
apicompat
.
AnthropicRequest
{
Model
:
"gpt-5.4-xhigh"
,
OutputConfig
:
&
apicompat
.
AnthropicOutputConfig
{
Effort
:
"low"
},
}
applyOpenAICompatModelNormalization
(
req
)
require
.
Equal
(
t
,
"gpt-5.4"
,
req
.
Model
)
require
.
NotNil
(
t
,
req
.
OutputConfig
)
require
.
Equal
(
t
,
"low"
,
req
.
OutputConfig
.
Effort
)
})
t
.
Run
(
"non openai model is untouched"
,
func
(
t
*
testing
.
T
)
{
req
:=
&
apicompat
.
AnthropicRequest
{
Model
:
"claude-opus-4-6"
}
applyOpenAICompatModelNormalization
(
req
)
require
.
Equal
(
t
,
"claude-opus-4-6"
,
req
.
Model
)
require
.
Nil
(
t
,
req
.
OutputConfig
)
})
}
func
TestForwardAsAnthropic_NormalizesRoutingAndEffortForGpt54XHigh
(
t
*
testing
.
T
)
{
t
.
Parallel
()
gin
.
SetMode
(
gin
.
TestMode
)
rec
:=
httptest
.
NewRecorder
()
c
,
_
:=
gin
.
CreateTestContext
(
rec
)
body
:=
[]
byte
(
`{"model":"gpt-5.4-xhigh","max_tokens":16,"messages":[{"role":"user","content":"hello"}],"stream":false}`
)
c
.
Request
=
httptest
.
NewRequest
(
http
.
MethodPost
,
"/v1/messages"
,
bytes
.
NewReader
(
body
))
c
.
Request
.
Header
.
Set
(
"Content-Type"
,
"application/json"
)
upstreamBody
:=
strings
.
Join
([]
string
{
`data: {"type":"response.completed","response":{"id":"resp_1","object":"response","model":"gpt-5.4","status":"completed","output":[{"type":"message","id":"msg_1","role":"assistant","status":"completed","content":[{"type":"output_text","text":"ok"}]}],"usage":{"input_tokens":5,"output_tokens":2,"total_tokens":7}}}`
,
""
,
"data: [DONE]"
,
""
,
},
"
\n
"
)
upstream
:=
&
httpUpstreamRecorder
{
resp
:
&
http
.
Response
{
StatusCode
:
http
.
StatusOK
,
Header
:
http
.
Header
{
"Content-Type"
:
[]
string
{
"text/event-stream"
},
"x-request-id"
:
[]
string
{
"rid_compat"
}},
Body
:
io
.
NopCloser
(
strings
.
NewReader
(
upstreamBody
)),
}}
svc
:=
&
OpenAIGatewayService
{
httpUpstream
:
upstream
}
account
:=
&
Account
{
ID
:
1
,
Name
:
"openai-oauth"
,
Platform
:
PlatformOpenAI
,
Type
:
AccountTypeOAuth
,
Concurrency
:
1
,
Credentials
:
map
[
string
]
any
{
"access_token"
:
"oauth-token"
,
"chatgpt_account_id"
:
"chatgpt-acc"
,
"model_mapping"
:
map
[
string
]
any
{
"gpt-5.4"
:
"gpt-5.4"
,
},
},
}
result
,
err
:=
svc
.
ForwardAsAnthropic
(
context
.
Background
(),
c
,
account
,
body
,
""
,
"gpt-5.1"
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
result
)
require
.
Equal
(
t
,
"gpt-5.4-xhigh"
,
result
.
Model
)
require
.
Equal
(
t
,
"gpt-5.4"
,
result
.
UpstreamModel
)
require
.
Equal
(
t
,
"gpt-5.4"
,
result
.
BillingModel
)
require
.
NotNil
(
t
,
result
.
ReasoningEffort
)
require
.
Equal
(
t
,
"xhigh"
,
*
result
.
ReasoningEffort
)
require
.
Equal
(
t
,
"gpt-5.4"
,
gjson
.
GetBytes
(
upstream
.
lastBody
,
"model"
)
.
String
())
require
.
Equal
(
t
,
"xhigh"
,
gjson
.
GetBytes
(
upstream
.
lastBody
,
"reasoning.effort"
)
.
String
())
require
.
Equal
(
t
,
http
.
StatusOK
,
rec
.
Code
)
require
.
Equal
(
t
,
"gpt-5.4-xhigh"
,
gjson
.
GetBytes
(
rec
.
Body
.
Bytes
(),
"model"
)
.
String
())
require
.
Equal
(
t
,
"ok"
,
gjson
.
GetBytes
(
rec
.
Body
.
Bytes
(),
"content.0.text"
)
.
String
())
t
.
Logf
(
"upstream body: %s"
,
string
(
upstream
.
lastBody
))
t
.
Logf
(
"response body: %s"
,
rec
.
Body
.
String
())
}
backend/internal/service/openai_gateway_messages.go
View file @
8c109411
...
@@ -40,6 +40,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
...
@@ -40,6 +40,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
return
nil
,
fmt
.
Errorf
(
"parse anthropic request: %w"
,
err
)
return
nil
,
fmt
.
Errorf
(
"parse anthropic request: %w"
,
err
)
}
}
originalModel
:=
anthropicReq
.
Model
originalModel
:=
anthropicReq
.
Model
applyOpenAICompatModelNormalization
(
&
anthropicReq
)
clientStream
:=
anthropicReq
.
Stream
// client's original stream preference
clientStream
:=
anthropicReq
.
Stream
// client's original stream preference
// 2. Convert Anthropic → Responses
// 2. Convert Anthropic → Responses
...
@@ -59,7 +60,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
...
@@ -59,7 +60,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
}
}
// 3. Model mapping
// 3. Model mapping
mappedModel
:=
resolveOpenAIForwardModel
(
account
,
original
Model
,
defaultMappedModel
)
mappedModel
:=
resolveOpenAIForwardModel
(
account
,
anthropicReq
.
Model
,
defaultMappedModel
)
responsesReq
.
Model
=
mappedModel
responsesReq
.
Model
=
mappedModel
logger
.
L
()
.
Debug
(
"openai messages: model mapping applied"
,
logger
.
L
()
.
Debug
(
"openai messages: model mapping applied"
,
...
...
backend/internal/service/openai_model_mapping.go
View file @
8c109411
...
@@ -4,6 +4,7 @@ package service
...
@@ -4,6 +4,7 @@ package service
// forwarding. Group-level default mapping only applies when the account itself
// forwarding. Group-level default mapping only applies when the account itself
// did not match any explicit model_mapping rule.
// did not match any explicit model_mapping rule.
func
resolveOpenAIForwardModel
(
account
*
Account
,
requestedModel
,
defaultMappedModel
string
)
string
{
func
resolveOpenAIForwardModel
(
account
*
Account
,
requestedModel
,
defaultMappedModel
string
)
string
{
requestedModel
=
NormalizeOpenAICompatRequestedModel
(
requestedModel
)
if
account
==
nil
{
if
account
==
nil
{
if
defaultMappedModel
!=
""
{
if
defaultMappedModel
!=
""
{
return
defaultMappedModel
return
defaultMappedModel
...
...
backend/internal/service/openai_model_mapping_test.go
View file @
8c109411
...
@@ -58,6 +58,19 @@ func TestResolveOpenAIForwardModel(t *testing.T) {
...
@@ -58,6 +58,19 @@ func TestResolveOpenAIForwardModel(t *testing.T) {
defaultMappedModel
:
"gpt-4o-mini"
,
defaultMappedModel
:
"gpt-4o-mini"
,
expectedModel
:
"gpt-5.4"
,
expectedModel
:
"gpt-5.4"
,
},
},
{
name
:
"normalizes gpt reasoning alias before passthrough mapping"
,
account
:
&
Account
{
Credentials
:
map
[
string
]
any
{
"model_mapping"
:
map
[
string
]
any
{
"gpt-5.4"
:
"gpt-5.4"
,
},
},
},
requestedModel
:
"gpt-5.4-xhigh"
,
defaultMappedModel
:
"gpt-5.1"
,
expectedModel
:
"gpt-5.4"
,
},
}
}
for
_
,
tt
:=
range
tests
{
for
_
,
tt
:=
range
tests
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment