Unverified Commit 55a7fa1e authored by Wesley Liddick's avatar Wesley Liddick Committed by GitHub
Browse files

Merge pull request #2005 from gaoren002/pr/openai-strip-passthrough-fields

fix(openai): strip unsupported passthrough fields
parents 5e54d492 9fe02bba
......@@ -53,6 +53,23 @@ const (
codexSparkImageUnsupportedText = codexSparkImageUnsupportedMarker + "\nThe current model is gpt-5.3-codex-spark, which does not support image generation, image editing, image input, the `image_generation` tool, or Codex `image_gen`/`$imagegen` workflows. If the user asks for image generation or image editing, clearly explain this model limitation and ask them to switch to a non-Spark Codex model such as gpt-5.3-codex or gpt-5.4. Do not claim that the local environment merely lacks image_gen tooling, and do not suggest CLI fallback as the primary fix while the model remains Spark.\n</sub2api-codex-spark-image-unsupported>"
)
var openAIChatGPTInternalUnsupportedFields = []string{
"user",
"metadata",
"prompt_cache_retention",
"safety_identifier",
"stream_options",
}
var openAICodexOAuthUnsupportedFields = append([]string{
"max_output_tokens",
"max_completion_tokens",
"temperature",
"top_p",
"frequency_penalty",
"presence_penalty",
}, openAIChatGPTInternalUnsupportedFields...)
func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact bool) codexTransformResult {
result := codexTransformResult{}
// 工具续链需求会影响存储策略与 input 过滤逻辑。
......@@ -93,23 +110,8 @@ func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact
}
}
// Strip parameters unsupported by codex models via the Responses API.
for _, key := range []string{
"max_output_tokens",
"max_completion_tokens",
"temperature",
"top_p",
"frequency_penalty",
"presence_penalty",
// prompt_cache_retention is a newer Responses API parameter (cache TTL).
// The ChatGPT internal Codex endpoint rejects it with
// "Unsupported parameter: prompt_cache_retention". Defense-in-depth
// for any OAuth path that reaches this transform — the Cursor
// Responses-shape short-circuit in ForwardAsChatCompletions strips
// it earlier too, but we keep this line so other OAuth callers are
// equally protected.
"prompt_cache_retention",
} {
// Strip parameters unsupported by ChatGPT internal Codex endpoint.
for _, key := range openAICodexOAuthUnsupportedFields {
if _, ok := reqBody[key]; ok {
delete(reqBody, key)
result.Modified = true
......
......@@ -1088,6 +1088,27 @@ func TestApplyCodexOAuthTransform_StripsPromptCacheRetention(t *testing.T) {
"prompt_cache_retention must be stripped before forwarding to Codex upstream")
}
func TestApplyCodexOAuthTransform_StripsChatGPTInternalUnsupportedFields(t *testing.T) {
reqBody := map[string]any{
"model": "gpt-5.4",
"user": "user_123",
"metadata": map[string]any{"trace_id": "abc"},
"prompt_cache_retention": "24h",
"safety_identifier": "sid",
"stream_options": map[string]any{"include_usage": true},
"input": []any{
map[string]any{"role": "user", "content": "hi"},
},
}
result := applyCodexOAuthTransform(reqBody, true, false)
require.True(t, result.Modified)
for _, field := range openAIChatGPTInternalUnsupportedFields {
require.NotContains(t, reqBody, field)
}
}
func TestApplyCodexOAuthTransform_ExtractsSystemMessages(t *testing.T) {
reqBody := map[string]any{
"model": "gpt-5.1",
......
......@@ -5553,7 +5553,8 @@ func extractOpenAIRequestMetaFromBody(body []byte) (model string, stream bool, p
}
// normalizeOpenAIPassthroughOAuthBody 将透传 OAuth 请求体收敛为旧链路关键行为:
// 1) store=false 2) 非 compact 保持 stream=true;compact 强制 stream=false
// 1) 删除 ChatGPT internal API 不支持的顶层 Responses 参数
// 2) store=false 3) 非 compact 保持 stream=true;compact 强制 stream=false
func normalizeOpenAIPassthroughOAuthBody(body []byte, compact bool) ([]byte, bool, error) {
if len(body) == 0 {
return body, false, nil
......@@ -5562,6 +5563,18 @@ func normalizeOpenAIPassthroughOAuthBody(body []byte, compact bool) ([]byte, boo
normalized := body
changed := false
for _, field := range openAIChatGPTInternalUnsupportedFields {
if value := gjson.GetBytes(normalized, field); !value.Exists() {
continue
}
next, err := sjson.DeleteBytes(normalized, field)
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body delete %s: %w", field, err)
}
normalized = next
changed = true
}
if compact {
if store := gjson.GetBytes(normalized, "store"); store.Exists() {
next, err := sjson.DeleteBytes(normalized, "store")
......
package service
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/tidwall/gjson"
)
func TestNormalizeOpenAIPassthroughOAuthBody_RemovesUnsupportedUser(t *testing.T) {
body := []byte(`{"model":"gpt-5.4","input":"hello","user":"user_123","metadata":{"user_id":"user_123"},"prompt_cache_retention":"24h","safety_identifier":"sid","stream_options":{"include_usage":true}}`)
normalized, changed, err := normalizeOpenAIPassthroughOAuthBody(body, false)
require.NoError(t, err)
require.True(t, changed)
for _, field := range openAIChatGPTInternalUnsupportedFields {
require.False(t, gjson.GetBytes(normalized, field).Exists(), "%s should be stripped", field)
}
require.True(t, gjson.GetBytes(normalized, "stream").Bool())
require.False(t, gjson.GetBytes(normalized, "store").Bool())
}
func TestNormalizeOpenAIPassthroughOAuthBody_CompactRemovesUnsupportedUser(t *testing.T) {
body := []byte(`{"model":"gpt-5.4","input":"hello","user":"user_123","metadata":{"user_id":"user_123"},"stream":true,"store":true}`)
normalized, changed, err := normalizeOpenAIPassthroughOAuthBody(body, true)
require.NoError(t, err)
require.True(t, changed)
require.False(t, gjson.GetBytes(normalized, "user").Exists())
require.False(t, gjson.GetBytes(normalized, "metadata").Exists())
require.False(t, gjson.GetBytes(normalized, "stream").Exists())
require.False(t, gjson.GetBytes(normalized, "store").Exists())
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment