Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
Menu
Open sidebar
陈曦
sub2api
Commits
ebe6f418
Commit
ebe6f418
authored
Mar 09, 2026
by
shaw
Browse files
fix: gpt->claude格式转换对齐effort映射和fast
parent
391e79f8
Changes
6
Hide whitespace changes
Inline
Side-by-side
backend/internal/pkg/apicompat/anthropic_responses_test.go
View file @
ebe6f418
...
@@ -631,7 +631,8 @@ func TestAnthropicToResponses_ThinkingEnabled(t *testing.T) {
...
@@ -631,7 +631,8 @@ func TestAnthropicToResponses_ThinkingEnabled(t *testing.T) {
resp
,
err
:=
AnthropicToResponses
(
req
)
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"high"
,
resp
.
Reasoning
.
Effort
)
// thinking.type is ignored for effort; default xhigh applies.
assert
.
Equal
(
t
,
"xhigh"
,
resp
.
Reasoning
.
Effort
)
assert
.
Equal
(
t
,
"auto"
,
resp
.
Reasoning
.
Summary
)
assert
.
Equal
(
t
,
"auto"
,
resp
.
Reasoning
.
Summary
)
assert
.
Contains
(
t
,
resp
.
Include
,
"reasoning.encrypted_content"
)
assert
.
Contains
(
t
,
resp
.
Include
,
"reasoning.encrypted_content"
)
assert
.
NotContains
(
t
,
resp
.
Include
,
"reasoning.summary"
)
assert
.
NotContains
(
t
,
resp
.
Include
,
"reasoning.summary"
)
...
@@ -648,7 +649,8 @@ func TestAnthropicToResponses_ThinkingAdaptive(t *testing.T) {
...
@@ -648,7 +649,8 @@ func TestAnthropicToResponses_ThinkingAdaptive(t *testing.T) {
resp
,
err
:=
AnthropicToResponses
(
req
)
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"medium"
,
resp
.
Reasoning
.
Effort
)
// thinking.type is ignored for effort; default xhigh applies.
assert
.
Equal
(
t
,
"xhigh"
,
resp
.
Reasoning
.
Effort
)
assert
.
Equal
(
t
,
"auto"
,
resp
.
Reasoning
.
Summary
)
assert
.
Equal
(
t
,
"auto"
,
resp
.
Reasoning
.
Summary
)
assert
.
NotContains
(
t
,
resp
.
Include
,
"reasoning.summary"
)
assert
.
NotContains
(
t
,
resp
.
Include
,
"reasoning.summary"
)
}
}
...
@@ -663,8 +665,9 @@ func TestAnthropicToResponses_ThinkingDisabled(t *testing.T) {
...
@@ -663,8 +665,9 @@ func TestAnthropicToResponses_ThinkingDisabled(t *testing.T) {
resp
,
err
:=
AnthropicToResponses
(
req
)
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NoError
(
t
,
err
)
assert
.
Nil
(
t
,
resp
.
Reasoning
)
// Default effort applies (high → xhigh) even when thinking is disabled.
assert
.
NotContains
(
t
,
resp
.
Include
,
"reasoning.summary"
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"xhigh"
,
resp
.
Reasoning
.
Effort
)
}
}
func
TestAnthropicToResponses_NoThinking
(
t
*
testing
.
T
)
{
func
TestAnthropicToResponses_NoThinking
(
t
*
testing
.
T
)
{
...
@@ -676,7 +679,93 @@ func TestAnthropicToResponses_NoThinking(t *testing.T) {
...
@@ -676,7 +679,93 @@ func TestAnthropicToResponses_NoThinking(t *testing.T) {
resp
,
err
:=
AnthropicToResponses
(
req
)
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NoError
(
t
,
err
)
assert
.
Nil
(
t
,
resp
.
Reasoning
)
// Default effort applies (high → xhigh) when no thinking/output_config is set.
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"xhigh"
,
resp
.
Reasoning
.
Effort
)
}
// ---------------------------------------------------------------------------
// output_config.effort override tests
// ---------------------------------------------------------------------------
func
TestAnthropicToResponses_OutputConfigOverridesDefault
(
t
*
testing
.
T
)
{
// Default is xhigh, but output_config.effort="low" overrides. low→low after mapping.
req
:=
&
AnthropicRequest
{
Model
:
"gpt-5.2"
,
MaxTokens
:
1024
,
Messages
:
[]
AnthropicMessage
{{
Role
:
"user"
,
Content
:
json
.
RawMessage
(
`"Hello"`
)}},
Thinking
:
&
AnthropicThinking
{
Type
:
"enabled"
,
BudgetTokens
:
10000
},
OutputConfig
:
&
AnthropicOutputConfig
{
Effort
:
"low"
},
}
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"low"
,
resp
.
Reasoning
.
Effort
)
assert
.
Equal
(
t
,
"auto"
,
resp
.
Reasoning
.
Summary
)
}
func
TestAnthropicToResponses_OutputConfigWithoutThinking
(
t
*
testing
.
T
)
{
// No thinking field, but output_config.effort="medium" → creates reasoning.
// medium→high after mapping.
req
:=
&
AnthropicRequest
{
Model
:
"gpt-5.2"
,
MaxTokens
:
1024
,
Messages
:
[]
AnthropicMessage
{{
Role
:
"user"
,
Content
:
json
.
RawMessage
(
`"Hello"`
)}},
OutputConfig
:
&
AnthropicOutputConfig
{
Effort
:
"medium"
},
}
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"high"
,
resp
.
Reasoning
.
Effort
)
assert
.
Equal
(
t
,
"auto"
,
resp
.
Reasoning
.
Summary
)
}
func
TestAnthropicToResponses_OutputConfigHigh
(
t
*
testing
.
T
)
{
// output_config.effort="high" → mapped to "xhigh".
req
:=
&
AnthropicRequest
{
Model
:
"gpt-5.2"
,
MaxTokens
:
1024
,
Messages
:
[]
AnthropicMessage
{{
Role
:
"user"
,
Content
:
json
.
RawMessage
(
`"Hello"`
)}},
OutputConfig
:
&
AnthropicOutputConfig
{
Effort
:
"high"
},
}
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"xhigh"
,
resp
.
Reasoning
.
Effort
)
assert
.
Equal
(
t
,
"auto"
,
resp
.
Reasoning
.
Summary
)
}
func
TestAnthropicToResponses_NoOutputConfig
(
t
*
testing
.
T
)
{
// No output_config → default xhigh regardless of thinking.type.
req
:=
&
AnthropicRequest
{
Model
:
"gpt-5.2"
,
MaxTokens
:
1024
,
Messages
:
[]
AnthropicMessage
{{
Role
:
"user"
,
Content
:
json
.
RawMessage
(
`"Hello"`
)}},
Thinking
:
&
AnthropicThinking
{
Type
:
"enabled"
,
BudgetTokens
:
10000
},
}
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"xhigh"
,
resp
.
Reasoning
.
Effort
)
}
func
TestAnthropicToResponses_OutputConfigWithoutEffort
(
t
*
testing
.
T
)
{
// output_config present but effort empty (e.g. only format set) → default xhigh.
req
:=
&
AnthropicRequest
{
Model
:
"gpt-5.2"
,
MaxTokens
:
1024
,
Messages
:
[]
AnthropicMessage
{{
Role
:
"user"
,
Content
:
json
.
RawMessage
(
`"Hello"`
)}},
OutputConfig
:
&
AnthropicOutputConfig
{},
}
resp
,
err
:=
AnthropicToResponses
(
req
)
require
.
NoError
(
t
,
err
)
require
.
NotNil
(
t
,
resp
.
Reasoning
)
assert
.
Equal
(
t
,
"xhigh"
,
resp
.
Reasoning
.
Effort
)
}
}
// ---------------------------------------------------------------------------
// ---------------------------------------------------------------------------
...
...
backend/internal/pkg/apicompat/anthropic_to_responses.go
View file @
ebe6f418
...
@@ -45,18 +45,16 @@ func AnthropicToResponses(req *AnthropicRequest) (*ResponsesRequest, error) {
...
@@ -45,18 +45,16 @@ func AnthropicToResponses(req *AnthropicRequest) (*ResponsesRequest, error) {
out
.
Tools
=
convertAnthropicToolsToResponses
(
req
.
Tools
)
out
.
Tools
=
convertAnthropicToolsToResponses
(
req
.
Tools
)
}
}
// Convert thinking → reasoning.
// Determine reasoning effort: only output_config.effort controls the
// generate_summary="auto" causes the upstream to emit reasoning_summary_text
// level; thinking.type is ignored. Default is xhigh when unset.
// streaming events; the include array only needs reasoning.encrypted_content
// Anthropic levels map to OpenAI: low→low, medium→high, high→xhigh.
// (already set above) for content continuity.
effort
:=
"high"
// default → maps to xhigh
if
req
.
Thinking
!=
nil
{
if
req
.
OutputConfig
!=
nil
&&
req
.
OutputConfig
.
Effort
!=
""
{
switch
req
.
Thinking
.
Type
{
effort
=
req
.
OutputConfig
.
Effort
case
"enabled"
:
}
out
.
Reasoning
=
&
ResponsesReasoning
{
Effort
:
"high"
,
Summary
:
"auto"
}
out
.
Reasoning
=
&
ResponsesReasoning
{
case
"adaptive"
:
Effort
:
mapAnthropicEffortToResponses
(
effort
),
out
.
Reasoning
=
&
ResponsesReasoning
{
Effort
:
"medium"
,
Summary
:
"auto"
}
Summary
:
"auto"
,
}
// "disabled" or unknown → omit reasoning
}
}
// Convert tool_choice
// Convert tool_choice
...
@@ -380,6 +378,23 @@ func extractAnthropicTextFromBlocks(blocks []AnthropicContentBlock) string {
...
@@ -380,6 +378,23 @@ func extractAnthropicTextFromBlocks(blocks []AnthropicContentBlock) string {
return
strings
.
Join
(
parts
,
"
\n\n
"
)
return
strings
.
Join
(
parts
,
"
\n\n
"
)
}
}
// mapAnthropicEffortToResponses converts Anthropic reasoning effort levels to
// OpenAI Responses API effort levels.
//
// low → low
// medium → high
// high → xhigh
func
mapAnthropicEffortToResponses
(
effort
string
)
string
{
switch
effort
{
case
"medium"
:
return
"high"
case
"high"
:
return
"xhigh"
default
:
return
effort
// "low" and any unknown values pass through unchanged
}
}
// convertAnthropicToolsToResponses maps Anthropic tool definitions to
// convertAnthropicToolsToResponses maps Anthropic tool definitions to
// Responses API tools. Server-side tools like web_search are mapped to their
// Responses API tools. Server-side tools like web_search are mapped to their
// OpenAI equivalents; regular tools become function tools.
// OpenAI equivalents; regular tools become function tools.
...
...
backend/internal/pkg/apicompat/types.go
View file @
ebe6f418
...
@@ -12,17 +12,23 @@ import "encoding/json"
...
@@ -12,17 +12,23 @@ import "encoding/json"
// AnthropicRequest is the request body for POST /v1/messages.
// AnthropicRequest is the request body for POST /v1/messages.
type
AnthropicRequest
struct
{
type
AnthropicRequest
struct
{
Model
string
`json:"model"`
Model
string
`json:"model"`
MaxTokens
int
`json:"max_tokens"`
MaxTokens
int
`json:"max_tokens"`
System
json
.
RawMessage
`json:"system,omitempty"`
// string or []AnthropicContentBlock
System
json
.
RawMessage
`json:"system,omitempty"`
// string or []AnthropicContentBlock
Messages
[]
AnthropicMessage
`json:"messages"`
Messages
[]
AnthropicMessage
`json:"messages"`
Tools
[]
AnthropicTool
`json:"tools,omitempty"`
Tools
[]
AnthropicTool
`json:"tools,omitempty"`
Stream
bool
`json:"stream,omitempty"`
Stream
bool
`json:"stream,omitempty"`
Temperature
*
float64
`json:"temperature,omitempty"`
Temperature
*
float64
`json:"temperature,omitempty"`
TopP
*
float64
`json:"top_p,omitempty"`
TopP
*
float64
`json:"top_p,omitempty"`
StopSeqs
[]
string
`json:"stop_sequences,omitempty"`
StopSeqs
[]
string
`json:"stop_sequences,omitempty"`
Thinking
*
AnthropicThinking
`json:"thinking,omitempty"`
Thinking
*
AnthropicThinking
`json:"thinking,omitempty"`
ToolChoice
json
.
RawMessage
`json:"tool_choice,omitempty"`
ToolChoice
json
.
RawMessage
`json:"tool_choice,omitempty"`
OutputConfig
*
AnthropicOutputConfig
`json:"output_config,omitempty"`
}
// AnthropicOutputConfig controls output generation parameters.
type
AnthropicOutputConfig
struct
{
Effort
string
`json:"effort,omitempty"`
// "low" | "medium" | "high"
}
}
// AnthropicThinking configures extended thinking in the Anthropic API.
// AnthropicThinking configures extended thinking in the Anthropic API.
...
@@ -156,6 +162,7 @@ type ResponsesRequest struct {
...
@@ -156,6 +162,7 @@ type ResponsesRequest struct {
Store
*
bool
`json:"store,omitempty"`
Store
*
bool
`json:"store,omitempty"`
Reasoning
*
ResponsesReasoning
`json:"reasoning,omitempty"`
Reasoning
*
ResponsesReasoning
`json:"reasoning,omitempty"`
ToolChoice
json
.
RawMessage
`json:"tool_choice,omitempty"`
ToolChoice
json
.
RawMessage
`json:"tool_choice,omitempty"`
ServiceTier
string
`json:"service_tier,omitempty"`
}
}
// ResponsesReasoning configures reasoning effort in the Responses API.
// ResponsesReasoning configures reasoning effort in the Responses API.
...
...
backend/internal/service/gateway_beta_test.go
View file @
ebe6f418
...
@@ -148,6 +148,32 @@ func TestBuildBetaTokenSet(t *testing.T) {
...
@@ -148,6 +148,32 @@ func TestBuildBetaTokenSet(t *testing.T) {
require
.
Empty
(
t
,
empty
)
require
.
Empty
(
t
,
empty
)
}
}
func
TestContainsBetaToken
(
t
*
testing
.
T
)
{
tests
:=
[]
struct
{
name
string
header
string
token
string
want
bool
}{
{
"present in middle"
,
"oauth-2025-04-20,fast-mode-2026-02-01,interleaved-thinking-2025-05-14"
,
"fast-mode-2026-02-01"
,
true
},
{
"present at start"
,
"fast-mode-2026-02-01,oauth-2025-04-20"
,
"fast-mode-2026-02-01"
,
true
},
{
"present at end"
,
"oauth-2025-04-20,fast-mode-2026-02-01"
,
"fast-mode-2026-02-01"
,
true
},
{
"only token"
,
"fast-mode-2026-02-01"
,
"fast-mode-2026-02-01"
,
true
},
{
"not present"
,
"oauth-2025-04-20,interleaved-thinking-2025-05-14"
,
"fast-mode-2026-02-01"
,
false
},
{
"with spaces"
,
"oauth-2025-04-20, fast-mode-2026-02-01 , interleaved-thinking-2025-05-14"
,
"fast-mode-2026-02-01"
,
true
},
{
"empty header"
,
""
,
"fast-mode-2026-02-01"
,
false
},
{
"empty token"
,
"fast-mode-2026-02-01"
,
""
,
false
},
{
"partial match"
,
"fast-mode-2026-02-01-extra"
,
"fast-mode-2026-02-01"
,
false
},
}
for
_
,
tt
:=
range
tests
{
t
.
Run
(
tt
.
name
,
func
(
t
*
testing
.
T
)
{
got
:=
containsBetaToken
(
tt
.
header
,
tt
.
token
)
require
.
Equal
(
t
,
tt
.
want
,
got
)
})
}
}
func
TestStripBetaTokensWithSet_EmptyDropSet
(
t
*
testing
.
T
)
{
func
TestStripBetaTokensWithSet_EmptyDropSet
(
t
*
testing
.
T
)
{
header
:=
"oauth-2025-04-20,interleaved-thinking-2025-05-14"
header
:=
"oauth-2025-04-20,interleaved-thinking-2025-05-14"
got
:=
stripBetaTokensWithSet
(
header
,
map
[
string
]
struct
{}{})
got
:=
stripBetaTokensWithSet
(
header
,
map
[
string
]
struct
{}{})
...
...
backend/internal/service/gateway_service.go
View file @
ebe6f418
...
@@ -5341,6 +5341,19 @@ func droppedBetaSet(extra ...string) map[string]struct{} {
...
@@ -5341,6 +5341,19 @@ func droppedBetaSet(extra ...string) map[string]struct{} {
return
m
return
m
}
}
// containsBetaToken checks if a comma-separated header value contains the given token.
func
containsBetaToken
(
header
,
token
string
)
bool
{
if
header
==
""
||
token
==
""
{
return
false
}
for
_
,
p
:=
range
strings
.
Split
(
header
,
","
)
{
if
strings
.
TrimSpace
(
p
)
==
token
{
return
true
}
}
return
false
}
func
buildBetaTokenSet
(
tokens
[]
string
)
map
[
string
]
struct
{}
{
func
buildBetaTokenSet
(
tokens
[]
string
)
map
[
string
]
struct
{}
{
m
:=
make
(
map
[
string
]
struct
{},
len
(
tokens
))
m
:=
make
(
map
[
string
]
struct
{},
len
(
tokens
))
for
_
,
t
:=
range
tokens
{
for
_
,
t
:=
range
tokens
{
...
...
backend/internal/service/openai_gateway_messages.go
View file @
ebe6f418
...
@@ -12,6 +12,7 @@ import (
...
@@ -12,6 +12,7 @@ import (
"time"
"time"
"github.com/Wei-Shaw/sub2api/internal/pkg/apicompat"
"github.com/Wei-Shaw/sub2api/internal/pkg/apicompat"
"github.com/Wei-Shaw/sub2api/internal/pkg/claude"
"github.com/Wei-Shaw/sub2api/internal/pkg/logger"
"github.com/Wei-Shaw/sub2api/internal/pkg/logger"
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/gin-gonic/gin"
"github.com/gin-gonic/gin"
...
@@ -46,6 +47,11 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
...
@@ -46,6 +47,11 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
return
nil
,
fmt
.
Errorf
(
"convert anthropic to responses: %w"
,
err
)
return
nil
,
fmt
.
Errorf
(
"convert anthropic to responses: %w"
,
err
)
}
}
// 2b. Handle BetaFastMode → service_tier: "priority"
if
containsBetaToken
(
c
.
GetHeader
(
"anthropic-beta"
),
claude
.
BetaFastMode
)
{
responsesReq
.
ServiceTier
=
"priority"
}
// 3. Model mapping
// 3. Model mapping
mappedModel
:=
account
.
GetMappedModel
(
originalModel
)
mappedModel
:=
account
.
GetMappedModel
(
originalModel
)
// 分组级降级:账号未映射时使用分组默认映射模型
// 分组级降级:账号未映射时使用分组默认映射模型
...
@@ -94,6 +100,12 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
...
@@ -94,6 +100,12 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
return
nil
,
fmt
.
Errorf
(
"build upstream request: %w"
,
err
)
return
nil
,
fmt
.
Errorf
(
"build upstream request: %w"
,
err
)
}
}
// Override session_id with a deterministic UUID derived from the sticky
// session key (buildUpstreamRequest may have set it to the raw value).
if
promptCacheKey
!=
""
{
upstreamReq
.
Header
.
Set
(
"session_id"
,
generateSessionUUID
(
promptCacheKey
))
}
// 7. Send request
// 7. Send request
proxyURL
:=
""
proxyURL
:=
""
if
account
.
Proxy
!=
nil
{
if
account
.
Proxy
!=
nil
{
...
@@ -160,6 +172,18 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
...
@@ -160,6 +172,18 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
result
,
handleErr
=
s
.
handleAnthropicNonStreamingResponse
(
resp
,
c
,
originalModel
,
mappedModel
,
startTime
)
result
,
handleErr
=
s
.
handleAnthropicNonStreamingResponse
(
resp
,
c
,
originalModel
,
mappedModel
,
startTime
)
}
}
// Propagate ServiceTier and ReasoningEffort to result for billing
if
handleErr
==
nil
&&
result
!=
nil
{
if
responsesReq
.
ServiceTier
!=
""
{
st
:=
responsesReq
.
ServiceTier
result
.
ServiceTier
=
&
st
}
if
responsesReq
.
Reasoning
!=
nil
&&
responsesReq
.
Reasoning
.
Effort
!=
""
{
re
:=
responsesReq
.
Reasoning
.
Effort
result
.
ReasoningEffort
=
&
re
}
}
// Extract and save Codex usage snapshot from response headers (for OAuth accounts)
// Extract and save Codex usage snapshot from response headers (for OAuth accounts)
if
handleErr
==
nil
&&
account
.
Type
==
AccountTypeOAuth
{
if
handleErr
==
nil
&&
account
.
Type
==
AccountTypeOAuth
{
if
snapshot
:=
ParseCodexRateLimitHeaders
(
resp
.
Header
);
snapshot
!=
nil
{
if
snapshot
:=
ParseCodexRateLimitHeaders
(
resp
.
Header
);
snapshot
!=
nil
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment