Commit eea6f388 authored by wx-11's avatar wx-11
Browse files

使用codex的生图接口代替web2api

parent 0b85a8da
......@@ -930,10 +930,8 @@ func (a *Account) SupportsOpenAIImageCapability(capability OpenAIImagesCapabilit
return false
}
switch capability {
case OpenAIImagesCapabilityBasic:
case OpenAIImagesCapabilityBasic, OpenAIImagesCapabilityNative:
return a.Type == AccountTypeOAuth || a.Type == AccountTypeAPIKey
case OpenAIImagesCapabilityNative:
return a.Type == AccountTypeAPIKey
default:
return true
}
......
......@@ -5,7 +5,6 @@ import (
"bytes"
"context"
"crypto/rand"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
......@@ -1138,7 +1137,7 @@ func (s *AccountTestService) testOpenAIImageAPIKey(c *gin.Context, ctx context.C
return nil
}
// testOpenAIImageOAuth tests OpenAI image generation using an OAuth account via ChatGPT backend API.
// testOpenAIImageOAuth tests OpenAI image generation using an OAuth account via Codex /responses API.
func (s *AccountTestService) testOpenAIImageOAuth(c *gin.Context, ctx context.Context, account *Account, modelID, prompt string) error {
authToken := account.GetOpenAIAccessToken()
if authToken == "" {
......@@ -1153,69 +1152,46 @@ func (s *AccountTestService) testOpenAIImageOAuth(c *gin.Context, ctx context.Co
c.Writer.Flush()
s.sendEvent(c, TestEvent{Type: "test_start", Model: modelID})
s.sendEvent(c, TestEvent{Type: "content", Text: "Initializing ChatGPT backend...\n"})
s.sendEvent(c, TestEvent{Type: "content", Text: "Calling Codex /responses image tool...\n"})
// Build headers (replicating buildOpenAIBackendAPIHeaders logic)
headers := buildOpenAIBackendAPIHeadersForTest(ctx, account, authToken, s.accountRepo)
proxyURL := ""
if account.ProxyID != nil && account.Proxy != nil {
proxyURL = account.Proxy.URL()
parsed := &OpenAIImagesRequest{
Endpoint: openAIImagesGenerationsEndpoint,
Model: strings.TrimSpace(modelID),
Prompt: prompt,
}
applyOpenAIImagesDefaults(parsed)
client, err := newOpenAIBackendAPIClient(proxyURL)
responsesBody, err := buildOpenAIImagesResponsesRequest(parsed, parsed.Model)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Failed to create client: %s", err.Error()))
return s.sendErrorAndEnd(c, fmt.Sprintf("Failed to build image request: %s", err.Error()))
}
// Bootstrap
if bootstrapErr := bootstrapOpenAIBackendAPI(ctx, client, headers); bootstrapErr != nil {
log.Printf("OpenAI image test bootstrap warning: %v", bootstrapErr)
}
// Fetch chat requirements
s.sendEvent(c, TestEvent{Type: "content", Text: "Fetching chat requirements...\n"})
chatReqs, err := fetchOpenAIChatRequirements(ctx, client, headers)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, chatgptCodexAPIURL, bytes.NewReader(responsesBody))
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Chat requirements failed: %s", err.Error()))
return s.sendErrorAndEnd(c, "Failed to create request")
}
if chatReqs.Arkose.Required {
return s.sendErrorAndEnd(c, "Unsupported challenge: arkose required")
req.Host = "chatgpt.com"
req.Header.Set("Authorization", "Bearer "+authToken)
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "text/event-stream")
req.Header.Set("OpenAI-Beta", "responses=experimental")
req.Header.Set("originator", "opencode")
if customUA := strings.TrimSpace(account.GetOpenAIUserAgent()); customUA != "" {
req.Header.Set("User-Agent", customUA)
} else {
req.Header.Set("User-Agent", codexCLIUserAgent)
}
// Initialize and prepare conversation
s.sendEvent(c, TestEvent{Type: "content", Text: "Preparing image conversation...\n"})
parentMessageID := uuid.NewString()
proofToken := generateOpenAIProofToken(chatReqs.ProofOfWork.Required, chatReqs.ProofOfWork.Seed, chatReqs.ProofOfWork.Difficulty, headers.Get("User-Agent"))
_ = initializeOpenAIImageConversation(ctx, client, headers)
conduitToken, err := prepareOpenAIImageConversation(ctx, client, headers, prompt, parentMessageID, chatReqs.Token, proofToken)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Conversation prepare failed: %s", err.Error()))
if chatgptAccountID := strings.TrimSpace(account.GetChatGPTAccountID()); chatgptAccountID != "" {
req.Header.Set("chatgpt-account-id", chatgptAccountID)
}
// Build simplified conversation request (no file uploads)
convReq := buildOpenAIImageTestConversationRequest(prompt, parentMessageID)
convHeaders := cloneHTTPHeader(headers)
convHeaders.Set("Accept", "text/event-stream")
convHeaders.Set("Content-Type", "application/json")
convHeaders.Set("openai-sentinel-chat-requirements-token", chatReqs.Token)
if conduitToken != "" {
convHeaders.Set("x-conduit-token", conduitToken)
}
if proofToken != "" {
convHeaders.Set("openai-sentinel-proof-token", proofToken)
proxyURL := ""
if account.ProxyID != nil && account.Proxy != nil {
proxyURL = account.Proxy.URL()
}
s.sendEvent(c, TestEvent{Type: "content", Text: "Generating image...\n"})
resp, err := client.R().
SetContext(ctx).
DisableAutoReadResponse().
SetHeaders(headerToMap(convHeaders)).
SetBodyJsonMarshal(convReq).
Post(openAIChatGPTConversationURL)
resp, err := s.httpUpstream.Do(req, proxyURL, account.ID, account.Concurrency)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Conversation request failed: %s", err.Error()))
return s.sendErrorAndEnd(c, fmt.Sprintf("Responses API request failed: %s", err.Error()))
}
defer func() {
if resp != nil && resp.Body != nil {
......@@ -1223,49 +1199,35 @@ func (s *AccountTestService) testOpenAIImageOAuth(c *gin.Context, ctx context.Co
}
}()
if resp.StatusCode >= 400 {
return s.sendErrorAndEnd(c, fmt.Sprintf("Conversation API returned %d", resp.StatusCode))
body, _ := io.ReadAll(io.LimitReader(resp.Body, 2<<20))
message := strings.TrimSpace(extractUpstreamErrorMessage(body))
if message == "" {
message = fmt.Sprintf("Responses API returned %d", resp.StatusCode)
}
return s.sendErrorAndEnd(c, message)
}
startTime := time.Now()
conversationID, pointerInfos, _, _, err := readOpenAIImageConversationStream(resp, startTime)
body, err := io.ReadAll(resp.Body)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Stream read failed: %s", err.Error()))
return s.sendErrorAndEnd(c, fmt.Sprintf("Failed to read image response: %s", err.Error()))
}
pointerInfos = mergeOpenAIImagePointerInfos(pointerInfos, nil)
if conversationID != "" && !hasOpenAIFileServicePointerInfos(pointerInfos) {
s.sendEvent(c, TestEvent{Type: "content", Text: "Waiting for image generation to complete...\n"})
polledPointers, pollErr := pollOpenAIImageConversation(ctx, client, headers, conversationID)
if pollErr != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Poll failed: %s", pollErr.Error()))
}
pointerInfos = mergeOpenAIImagePointerInfos(pointerInfos, polledPointers)
results, _, _, _, _, err := collectOpenAIImagesFromResponsesBody(body)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Failed to parse image response: %s", err.Error()))
}
pointerInfos = preferOpenAIFileServicePointerInfos(pointerInfos)
if len(pointerInfos) == 0 {
return s.sendErrorAndEnd(c, "No images returned from conversation")
if len(results) == 0 {
return s.sendErrorAndEnd(c, "No images returned from responses API")
}
s.sendEvent(c, TestEvent{Type: "content", Text: "Downloading generated image...\n"})
// Download and encode each image
for _, pointer := range pointerInfos {
downloadURL, err := fetchOpenAIImageDownloadURL(ctx, client, headers, conversationID, pointer.Pointer)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Download URL fetch failed: %s", err.Error()))
}
data, err := downloadOpenAIImageBytes(ctx, client, headers, downloadURL)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Image download failed: %s", err.Error()))
}
b64 := base64.StdEncoding.EncodeToString(data)
mimeType := http.DetectContentType(data)
if pointer.Prompt != "" {
s.sendEvent(c, TestEvent{Type: "content", Text: pointer.Prompt})
for _, item := range results {
if item.RevisedPrompt != "" {
s.sendEvent(c, TestEvent{Type: "content", Text: item.RevisedPrompt})
}
mimeType := openAIImageOutputMIMEType(item.OutputFormat)
s.sendEvent(c, TestEvent{
Type: "image",
ImageURL: "data:" + mimeType + ";base64," + b64,
ImageURL: "data:" + mimeType + ";base64," + item.Result,
MimeType: mimeType,
})
}
......@@ -1274,107 +1236,6 @@ func (s *AccountTestService) testOpenAIImageOAuth(c *gin.Context, ctx context.Co
return nil
}
// buildOpenAIBackendAPIHeadersForTest builds ChatGPT backend API headers for test purposes.
// Replicates the logic from OpenAIGatewayService.buildOpenAIBackendAPIHeaders without
// requiring the full gateway service dependency.
func buildOpenAIBackendAPIHeadersForTest(ctx context.Context, account *Account, token string, repo AccountRepository) http.Header {
// Ensure device and session IDs exist
deviceID := account.GetOpenAIDeviceID()
sessionID := account.GetOpenAISessionID()
if deviceID == "" || sessionID == "" {
updates := map[string]any{}
if deviceID == "" {
deviceID = uuid.NewString()
updates["openai_device_id"] = deviceID
}
if sessionID == "" {
sessionID = uuid.NewString()
updates["openai_session_id"] = sessionID
}
if account.Extra == nil {
account.Extra = map[string]any{}
}
for key, value := range updates {
account.Extra[key] = value
}
if repo != nil {
updateCtx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
_ = repo.UpdateExtra(updateCtx, account.ID, updates)
}
}
headers := make(http.Header)
headers.Set("Authorization", "Bearer "+token)
headers.Set("Accept", "application/json")
headers.Set("Origin", "https://chatgpt.com")
headers.Set("Referer", "https://chatgpt.com/")
headers.Set("Sec-Fetch-Dest", "empty")
headers.Set("Sec-Fetch-Mode", "cors")
headers.Set("Sec-Fetch-Site", "same-origin")
headers.Set("User-Agent", openAIImageBackendUserAgent)
if customUA := strings.TrimSpace(account.GetOpenAIUserAgent()); customUA != "" {
headers.Set("User-Agent", customUA)
}
if chatgptAccountID := strings.TrimSpace(account.GetChatGPTAccountID()); chatgptAccountID != "" {
headers.Set("chatgpt-account-id", chatgptAccountID)
}
if deviceID != "" {
headers.Set("oai-device-id", deviceID)
headers.Set("Cookie", "oai-did="+deviceID)
}
if sessionID != "" {
headers.Set("oai-session-id", sessionID)
}
return headers
}
// buildOpenAIImageTestConversationRequest creates a simplified image generation conversation request.
func buildOpenAIImageTestConversationRequest(prompt, parentMessageID string) map[string]any {
promptText := strings.TrimSpace(prompt)
if promptText == "" {
promptText = "Generate an image."
}
metadata := map[string]any{
"developer_mode_connector_ids": []any{},
"selected_github_repos": []any{},
"selected_all_github_repos": false,
"system_hints": []string{"picture_v2"},
"serialization_metadata": map[string]any{
"custom_symbol_offsets": []any{},
},
}
message := map[string]any{
"id": uuid.NewString(),
"author": map[string]any{"role": "user"},
"content": map[string]any{
"content_type": "text",
"parts": []any{promptText},
},
"metadata": metadata,
"create_time": float64(time.Now().UnixMilli()) / 1000,
}
return map[string]any{
"action": "next",
"client_prepare_state": "sent",
"parent_message_id": parentMessageID,
"messages": []any{message},
"model": "auto",
"timezone_offset_min": openAITimezoneOffsetMinutes(),
"timezone": openAITimezoneName(),
"conversation_mode": map[string]any{"kind": "primary_assistant"},
"system_hints": []string{"picture_v2"},
"supports_buffering": true,
"supported_encodings": []string{"v1"},
"client_contextual_info": map[string]any{"app_name": "chatgpt.com"},
"force_nulligen": false,
"force_paragen": false,
"force_paragen_model_slug": "",
"force_rate_limit": false,
"websocket_request_id": uuid.NewString(),
}
}
func (s *AccountTestService) sendEvent(c *gin.Context, event TestEvent) {
eventJSON, _ := json.Marshal(event)
if _, err := fmt.Fprintf(c.Writer, "data: %s\n\n", eventJSON); err != nil {
......
package service
import (
"context"
"io"
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
)
func TestAccountTestService_OpenAIImageOAuthHandlesOutputItemDoneFallback(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/api/v1/admin/accounts/1/test", nil)
upstream := &httpUpstreamRecorder{
resp: &http.Response{
StatusCode: http.StatusOK,
Header: http.Header{
"Content-Type": []string{"text/event-stream"},
},
Body: io.NopCloser(strings.NewReader(
"data: {\"type\":\"response.output_item.done\",\"item\":{\"id\":\"ig_123\",\"type\":\"image_generation_call\",\"result\":\"aGVsbG8=\",\"revised_prompt\":\"draw a cat\",\"output_format\":\"png\"}}\n\n" +
"data: {\"type\":\"response.completed\",\"response\":{\"created_at\":1710000006,\"tool_usage\":{\"image_gen\":{\"images\":1}},\"output\":[]}}\n\n" +
"data: [DONE]\n\n",
)),
},
}
svc := &AccountTestService{httpUpstream: upstream}
account := &Account{
ID: 53,
Name: "openai-oauth",
Platform: PlatformOpenAI,
Type: AccountTypeOAuth,
Credentials: map[string]any{
"access_token": "token-123",
},
}
err := svc.testOpenAIImageOAuth(c, context.Background(), account, "gpt-image-2", "draw a cat")
require.NoError(t, err)
require.Contains(t, rec.Body.String(), "Calling Codex /responses image tool")
require.Contains(t, rec.Body.String(), "data:image/png;base64,aGVsbG8=")
require.Contains(t, rec.Body.String(), "\"success\":true")
}
......@@ -50,6 +50,7 @@ const (
openAIImageLifecycleTimeout = 2 * time.Minute
openAIImageMaxDownloadBytes = 20 << 20 // 20MB per image download
openAIImageMaxUploadPartSize = 20 << 20 // 20MB per multipart upload part
openAIImagesResponsesMainModel = "gpt-5.4-mini"
)
type OpenAIImagesCapability string
......@@ -81,10 +82,21 @@ type OpenAIImagesRequest struct {
ExplicitSize bool
SizeTier string
ResponseFormat string
Quality string
Background string
OutputFormat string
Moderation string
InputFidelity string
Style string
OutputCompression *int
PartialImages *int
HasMask bool
HasNativeOptions bool
RequiredCapability OpenAIImagesCapability
InputImageURLs []string
MaskImageURL string
Uploads []OpenAIImagesUpload
MaskUpload *OpenAIImagesUpload
Body []byte
bodyHash string
}
......@@ -188,7 +200,54 @@ func parseOpenAIImagesJSONRequest(body []byte, req *OpenAIImagesRequest) error {
req.ExplicitSize = req.Size != ""
}
req.ResponseFormat = strings.ToLower(strings.TrimSpace(gjson.GetBytes(body, "response_format").String()))
req.Quality = strings.TrimSpace(gjson.GetBytes(body, "quality").String())
req.Background = strings.TrimSpace(gjson.GetBytes(body, "background").String())
req.OutputFormat = strings.TrimSpace(gjson.GetBytes(body, "output_format").String())
req.Moderation = strings.TrimSpace(gjson.GetBytes(body, "moderation").String())
req.InputFidelity = strings.TrimSpace(gjson.GetBytes(body, "input_fidelity").String())
req.Style = strings.TrimSpace(gjson.GetBytes(body, "style").String())
req.HasMask = gjson.GetBytes(body, "mask").Exists()
if outputCompression := gjson.GetBytes(body, "output_compression"); outputCompression.Exists() {
if outputCompression.Type != gjson.Number {
return fmt.Errorf("invalid output_compression field type")
}
v := int(outputCompression.Int())
req.OutputCompression = &v
}
if partialImages := gjson.GetBytes(body, "partial_images"); partialImages.Exists() {
if partialImages.Type != gjson.Number {
return fmt.Errorf("invalid partial_images field type")
}
v := int(partialImages.Int())
req.PartialImages = &v
}
if req.IsEdits() {
images := gjson.GetBytes(body, "images")
if images.Exists() {
if !images.IsArray() {
return fmt.Errorf("invalid images field type")
}
for _, item := range images.Array() {
if imageURL := strings.TrimSpace(item.Get("image_url").String()); imageURL != "" {
req.InputImageURLs = append(req.InputImageURLs, imageURL)
continue
}
if item.Get("file_id").Exists() {
return fmt.Errorf("images[].file_id is not supported (use images[].image_url instead)")
}
}
}
if maskImageURL := strings.TrimSpace(gjson.GetBytes(body, "mask.image_url").String()); maskImageURL != "" {
req.MaskImageURL = maskImageURL
req.HasMask = true
}
if gjson.GetBytes(body, "mask.file_id").Exists() {
return fmt.Errorf("mask.file_id is not supported (use mask.image_url instead)")
}
if len(req.InputImageURLs) == 0 {
return fmt.Errorf("images[].image_url is required")
}
}
req.HasNativeOptions = hasOpenAINativeImageOptions(func(path string) bool {
return gjson.GetBytes(body, path).Exists()
})
......@@ -231,6 +290,16 @@ func parseOpenAIImagesMultipartRequest(body []byte, contentType string, req *Ope
partContentType := strings.TrimSpace(part.Header.Get("Content-Type"))
if name == "mask" && len(data) > 0 {
req.HasMask = true
width, height := parseOpenAIImageDimensions(part.Header)
maskUpload := OpenAIImagesUpload{
FieldName: name,
FileName: fileName,
ContentType: partContentType,
Data: data,
Width: width,
Height: height,
}
req.MaskUpload = &maskUpload
}
if name == "image" || strings.HasPrefix(name, "image[") {
width, height := parseOpenAIImageDimensions(part.Header)
......@@ -270,6 +339,38 @@ func parseOpenAIImagesMultipartRequest(body []byte, contentType string, req *Ope
return fmt.Errorf("n must be a positive integer")
}
req.N = n
case "quality":
req.Quality = value
req.HasNativeOptions = true
case "background":
req.Background = value
req.HasNativeOptions = true
case "output_format":
req.OutputFormat = value
req.HasNativeOptions = true
case "moderation":
req.Moderation = value
req.HasNativeOptions = true
case "input_fidelity":
req.InputFidelity = value
req.HasNativeOptions = true
case "style":
req.Style = value
req.HasNativeOptions = true
case "output_compression":
n, err := strconv.Atoi(value)
if err != nil {
return fmt.Errorf("invalid output_compression field value")
}
req.OutputCompression = &n
req.HasNativeOptions = true
case "partial_images":
n, err := strconv.Atoi(value)
if err != nil {
return fmt.Errorf("invalid partial_images field value")
}
req.PartialImages = &n
req.HasNativeOptions = true
default:
if isOpenAINativeImageOption(name) && value != "" {
req.HasNativeOptions = true
......@@ -359,6 +460,8 @@ func hasOpenAINativeImageOptions(exists func(path string) bool) bool {
"output_format",
"output_compression",
"moderation",
"input_fidelity",
"partial_images",
} {
if exists(path) {
return true
......@@ -369,7 +472,7 @@ func hasOpenAINativeImageOptions(exists func(path string) bool) bool {
func isOpenAINativeImageOption(name string) bool {
switch strings.TrimSpace(strings.ToLower(name)) {
case "background", "quality", "style", "output_format", "output_compression", "moderation":
case "background", "quality", "style", "output_format", "output_compression", "moderation", "input_fidelity", "partial_images":
return true
default:
return false
......@@ -782,156 +885,6 @@ func extractOpenAIImageCountFromJSONBytes(body []byte) int {
return 0
}
func (s *OpenAIGatewayService) forwardOpenAIImagesOAuth(
ctx context.Context,
c *gin.Context,
account *Account,
parsed *OpenAIImagesRequest,
channelMappedModel string,
) (*OpenAIForwardResult, error) {
startTime := time.Now()
requestModel := strings.TrimSpace(parsed.Model)
if mapped := strings.TrimSpace(channelMappedModel); mapped != "" {
requestModel = mapped
}
if err := validateOpenAIImagesModel(requestModel); err != nil {
return nil, err
}
logger.LegacyPrintf(
"service.openai_gateway",
"[OpenAI] Images request routing request_model=%s endpoint=%s account_type=%s uploads=%d",
requestModel,
parsed.Endpoint,
account.Type,
len(parsed.Uploads),
)
token, _, err := s.GetAccessToken(ctx, account)
if err != nil {
return nil, err
}
client, err := newOpenAIBackendAPIClient(resolveOpenAIProxyURL(account))
if err != nil {
return nil, err
}
headers, err := s.buildOpenAIBackendAPIHeaders(account, token)
if err != nil {
return nil, err
}
if bootstrapErr := bootstrapOpenAIBackendAPI(ctx, client, headers); bootstrapErr != nil {
logger.LegacyPrintf("service.openai_gateway", "OpenAI image bootstrap failed: %v", bootstrapErr)
}
chatReqs, err := fetchOpenAIChatRequirements(ctx, client, headers)
if err != nil {
return nil, s.wrapOpenAIImageBackendError(ctx, c, account, err)
}
if chatReqs.Arkose.Required {
return nil, s.wrapOpenAIImageBackendError(
ctx,
c,
account,
newOpenAIImageSyntheticStatusError(
http.StatusForbidden,
"chat-requirements requires unsupported challenge (arkose)",
openAIChatGPTChatRequirementsURL,
),
)
}
parentMessageID := uuid.NewString()
proofToken := generateOpenAIProofToken(chatReqs.ProofOfWork.Required, chatReqs.ProofOfWork.Seed, chatReqs.ProofOfWork.Difficulty, headers.Get("User-Agent"))
_ = initializeOpenAIImageConversation(ctx, client, headers)
conduitToken, err := prepareOpenAIImageConversation(ctx, client, headers, parsed.Prompt, parentMessageID, chatReqs.Token, proofToken)
if err != nil {
return nil, s.wrapOpenAIImageBackendError(ctx, c, account, err)
}
uploads, err := uploadOpenAIImageFiles(ctx, client, headers, parsed.Uploads)
if err != nil {
return nil, s.wrapOpenAIImageBackendError(ctx, c, account, err)
}
convReq := buildOpenAIImageConversationRequest(parsed, parentMessageID, uploads)
if parsedContent, err := json.Marshal(convReq); err == nil {
setOpsUpstreamRequestBody(c, parsedContent)
}
convHeaders := cloneHTTPHeader(headers)
convHeaders.Set("Accept", "text/event-stream")
convHeaders.Set("Content-Type", "application/json")
convHeaders.Set("openai-sentinel-chat-requirements-token", chatReqs.Token)
if conduitToken != "" {
convHeaders.Set("x-conduit-token", conduitToken)
}
if proofToken != "" {
convHeaders.Set("openai-sentinel-proof-token", proofToken)
}
resp, err := client.R().
SetContext(ctx).
DisableAutoReadResponse().
SetHeaders(headerToMap(convHeaders)).
SetBodyJsonMarshal(convReq).
Post(openAIChatGPTConversationURL)
if err != nil {
return nil, fmt.Errorf("openai image conversation request failed: %w", err)
}
defer func() {
if resp != nil && resp.Body != nil {
_ = resp.Body.Close()
}
}()
if resp.StatusCode >= 400 {
return nil, s.wrapOpenAIImageBackendError(ctx, c, account, handleOpenAIImageBackendError(resp))
}
conversationID, pointerInfos, usage, firstTokenMs, err := readOpenAIImageConversationStream(resp, startTime)
if err != nil {
return nil, err
}
pointerInfos = mergeOpenAIImagePointerInfos(pointerInfos, nil)
logger.LegacyPrintf(
"service.openai_gateway",
"[OpenAI] Image extraction stream conversation_id=%s total_assets=%d file_service_assets=%d direct_assets=%d",
conversationID,
len(pointerInfos),
countOpenAIFileServicePointerInfos(pointerInfos),
countOpenAIDirectImageAssets(pointerInfos),
)
lifecycleCtx, releaseLifecycleCtx := detachOpenAIImageLifecycleContext(ctx, openAIImageLifecycleTimeout)
defer releaseLifecycleCtx()
if conversationID != "" && !hasOpenAIFileServicePointerInfos(pointerInfos) {
polledPointers, pollErr := pollOpenAIImageConversation(lifecycleCtx, client, headers, conversationID)
if pollErr != nil {
return nil, s.wrapOpenAIImageBackendError(ctx, c, account, pollErr)
}
pointerInfos = mergeOpenAIImagePointerInfos(pointerInfos, polledPointers)
}
pointerInfos = preferOpenAIFileServicePointerInfos(pointerInfos)
if len(pointerInfos) == 0 {
logger.LegacyPrintf("service.openai_gateway", "[OpenAI] Image extraction yielded no assets conversation_id=%s", conversationID)
return nil, fmt.Errorf("openai image conversation returned no downloadable images")
}
responseBody, imageCount, err := buildOpenAIImageResponse(lifecycleCtx, client, headers, conversationID, pointerInfos)
if err != nil {
return nil, s.wrapOpenAIImageBackendError(ctx, c, account, err)
}
c.Data(http.StatusOK, "application/json; charset=utf-8", responseBody)
return &OpenAIForwardResult{
RequestID: resp.Header.Get("x-request-id"),
Usage: usage,
Model: requestModel,
UpstreamModel: requestModel,
Stream: false,
Duration: time.Since(startTime),
FirstTokenMs: firstTokenMs,
ImageCount: imageCount,
ImageSize: parsed.SizeTier,
}, nil
}
func resolveOpenAIProxyURL(account *Account) string {
if account != nil && account.ProxyID != nil && account.Proxy != nil {
return account.Proxy.URL()
......
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment