Unverified Commit 46f06b24 authored by Wesley Liddick's avatar Wesley Liddick Committed by GitHub
Browse files

Merge pull request #2050 from zvensmoluya/fix/openai-compact-payload-fields

fix(openai): preserve current Codex compact payload fields
parents 7ce5b832 3d4ca5e8
......@@ -4929,7 +4929,18 @@ func normalizeOpenAICompactRequestBody(body []byte) ([]byte, bool, error) {
}
normalized := []byte(`{}`)
for _, field := range []string{"model", "input", "instructions", "previous_response_id"} {
// Keep the current Codex /compact schema while still dropping request-scoped
// fields such as prompt_cache_key, store, and stream.
for _, field := range []string{
"model",
"input",
"instructions",
"tools",
"parallel_tool_calls",
"reasoning",
"text",
"previous_response_id",
} {
value := gjson.GetBytes(body, field)
if !value.Exists() {
continue
......
......@@ -1767,6 +1767,24 @@ func TestOpenAIResponsesRequestPathSuffix(t *testing.T) {
}
}
func TestNormalizeOpenAICompactRequestBodyPreservesCurrentCodexPayloadFields(t *testing.T) {
body := []byte(`{"model":"gpt-5.5","input":[{"type":"message","role":"user","content":"compact me"}],"instructions":"compact-test","tools":[{"type":"function","name":"shell"}],"parallel_tool_calls":true,"reasoning":{"effort":"high"},"text":{"verbosity":"low"},"previous_response_id":"resp_123","store":true,"stream":true,"prompt_cache_key":"cache_123"}`)
normalized, changed, err := normalizeOpenAICompactRequestBody(body)
require.NoError(t, err)
require.True(t, changed)
require.Equal(t, "gpt-5.5", gjson.GetBytes(normalized, "model").String())
require.True(t, gjson.GetBytes(normalized, "tools").Exists())
require.True(t, gjson.GetBytes(normalized, "parallel_tool_calls").Bool())
require.Equal(t, "high", gjson.GetBytes(normalized, "reasoning.effort").String())
require.Equal(t, "low", gjson.GetBytes(normalized, "text.verbosity").String())
require.Equal(t, "resp_123", gjson.GetBytes(normalized, "previous_response_id").String())
require.False(t, gjson.GetBytes(normalized, "store").Exists())
require.False(t, gjson.GetBytes(normalized, "stream").Exists())
require.False(t, gjson.GetBytes(normalized, "prompt_cache_key").Exists())
}
func TestOpenAIBuildUpstreamRequestOpenAIPassthroughPreservesCompactPath(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment