Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
Menu
Open sidebar
陈曦
sub2api
Commits
ef967d8f
Commit
ef967d8f
authored
Apr 23, 2026
by
shaw
Browse files
fix: 修复 golangci-lint 报告的 36 个问题
parent
27ffc7f3
Changes
3
Hide whitespace changes
Inline
Side-by-side
backend/internal/service/openai_gateway_service.go
View file @
ef967d8f
...
...
@@ -4643,32 +4643,6 @@ func (s *OpenAIGatewayService) calculateOpenAIRecordUsageCost(
return
s
.
billingService
.
CalculateCostWithServiceTier
(
billingModel
,
tokens
,
multiplier
,
serviceTier
)
}
func
(
s
*
OpenAIGatewayService
)
calculateOpenAIImageTokenCost
(
ctx
context
.
Context
,
apiKey
*
APIKey
,
billingModel
string
,
multiplier
float64
,
tokens
UsageTokens
,
serviceTier
string
,
sizeTier
string
,
)
(
*
CostBreakdown
,
error
)
{
if
s
.
resolver
!=
nil
&&
apiKey
.
Group
!=
nil
{
gid
:=
apiKey
.
Group
.
ID
return
s
.
billingService
.
CalculateCostUnified
(
CostInput
{
Ctx
:
ctx
,
Model
:
billingModel
,
GroupID
:
&
gid
,
Tokens
:
tokens
,
RequestCount
:
1
,
SizeTier
:
sizeTier
,
RateMultiplier
:
multiplier
,
ServiceTier
:
serviceTier
,
Resolver
:
s
.
resolver
,
})
}
return
s
.
billingService
.
CalculateCostWithServiceTier
(
billingModel
,
tokens
,
multiplier
,
serviceTier
)
}
func
(
s
*
OpenAIGatewayService
)
calculateOpenAIImageCost
(
ctx
context
.
Context
,
billingModel
string
,
...
...
@@ -4718,17 +4692,6 @@ func (s *OpenAIGatewayService) resolveOpenAIChannelPricing(ctx context.Context,
return
nil
}
func
hasOpenAIImageUsageTokens
(
result
*
OpenAIForwardResult
)
bool
{
if
result
==
nil
{
return
false
}
return
result
.
Usage
.
InputTokens
>
0
||
result
.
Usage
.
OutputTokens
>
0
||
result
.
Usage
.
CacheCreationInputTokens
>
0
||
result
.
Usage
.
CacheReadInputTokens
>
0
||
result
.
Usage
.
ImageOutputTokens
>
0
}
// ParseCodexRateLimitHeaders extracts Codex usage limits from response headers.
// Exported for use in ratelimit_service when handling OpenAI 429 responses.
func
ParseCodexRateLimitHeaders
(
headers
http
.
Header
)
*
OpenAICodexUsageSnapshot
{
...
...
backend/internal/service/openai_images.go
View file @
ef967d8f
...
...
@@ -5,27 +5,22 @@ import (
"bytes"
"context"
"crypto/sha256"
"crypto/sha3"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"io"
"mime"
"mime/multipart"
"net/http"
"net/textproto"
"sort"
"strconv"
"strings"
"time"
"github.com/Wei-Shaw/sub2api/internal/pkg/logger"
"github.com/Wei-Shaw/sub2api/internal/pkg/proxyurl"
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/imroc/req/v3"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
...
...
@@ -38,18 +33,11 @@ const (
openAIImagesGenerationsURL
=
"https://api.openai.com/v1/images/generations"
openAIImagesEditsURL
=
"https://api.openai.com/v1/images/edits"
openAIChatGPTStartURL
=
"https://chatgpt.com/"
openAIChatGPTFilesURL
=
"https://chatgpt.com/backend-api/files"
openAIChatGPTConversationInitURL
=
"https://chatgpt.com/backend-api/conversation/init"
openAIChatGPTConversationURL
=
"https://chatgpt.com/backend-api/f/conversation"
openAIChatGPTConversationPrepareURL
=
"https://chatgpt.com/backend-api/f/conversation/prepare"
openAIChatGPTChatRequirementsURL
=
"https://chatgpt.com/backend-api/sentinel/chat-requirements"
openAIImageBackendUserAgent
=
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
openAIImageRequirementsDiff
=
"0fffff"
openAIImageLifecycleTimeout
=
2
*
time
.
Minute
openAIImageMaxDownloadBytes
=
20
<<
20
// 20MB per image download
openAIImageMaxUploadPartSize
=
20
<<
20
// 20MB per multipart upload part
openAIChatGPTStartURL
=
"https://chatgpt.com/"
openAIChatGPTFilesURL
=
"https://chatgpt.com/backend-api/files"
openAIImageBackendUserAgent
=
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
openAIImageMaxDownloadBytes
=
20
<<
20
// 20MB per image download
openAIImageMaxUploadPartSize
=
20
<<
20
// 20MB per multipart upload part
openAIImagesResponsesMainModel
=
"gpt-5.4-mini"
)
...
...
@@ -885,413 +873,6 @@ func extractOpenAIImageCountFromJSONBytes(body []byte) int {
return
0
}
func
resolveOpenAIProxyURL
(
account
*
Account
)
string
{
if
account
!=
nil
&&
account
.
ProxyID
!=
nil
&&
account
.
Proxy
!=
nil
{
return
account
.
Proxy
.
URL
()
}
return
""
}
func
newOpenAIBackendAPIClient
(
proxyURL
string
)
(
*
req
.
Client
,
error
)
{
client
:=
req
.
C
()
.
SetTimeout
(
180
*
time
.
Second
)
.
ImpersonateChrome
()
trimmed
,
_
,
err
:=
proxyurl
.
Parse
(
proxyURL
)
if
err
!=
nil
{
return
nil
,
err
}
if
trimmed
!=
""
{
client
.
SetProxyURL
(
trimmed
)
}
return
client
,
nil
}
func
(
s
*
OpenAIGatewayService
)
buildOpenAIBackendAPIHeaders
(
account
*
Account
,
token
string
)
(
http
.
Header
,
error
)
{
deviceID
,
sessionID
:=
s
.
ensureOpenAIImageSessionCredentials
(
context
.
Background
(),
account
)
headers
:=
make
(
http
.
Header
)
headers
.
Set
(
"Authorization"
,
"Bearer "
+
token
)
headers
.
Set
(
"Accept"
,
"application/json"
)
headers
.
Set
(
"Origin"
,
"https://chatgpt.com"
)
headers
.
Set
(
"Referer"
,
"https://chatgpt.com/"
)
headers
.
Set
(
"Sec-Fetch-Dest"
,
"empty"
)
headers
.
Set
(
"Sec-Fetch-Mode"
,
"cors"
)
headers
.
Set
(
"Sec-Fetch-Site"
,
"same-origin"
)
headers
.
Set
(
"User-Agent"
,
openAIImageBackendUserAgent
)
if
customUA
:=
strings
.
TrimSpace
(
account
.
GetOpenAIUserAgent
());
customUA
!=
""
{
headers
.
Set
(
"User-Agent"
,
customUA
)
}
if
chatgptAccountID
:=
strings
.
TrimSpace
(
account
.
GetChatGPTAccountID
());
chatgptAccountID
!=
""
{
headers
.
Set
(
"chatgpt-account-id"
,
chatgptAccountID
)
}
if
deviceID
!=
""
{
headers
.
Set
(
"oai-device-id"
,
deviceID
)
headers
.
Set
(
"Cookie"
,
"oai-did="
+
deviceID
)
}
if
sessionID
!=
""
{
headers
.
Set
(
"oai-session-id"
,
sessionID
)
}
return
headers
,
nil
}
func
(
s
*
OpenAIGatewayService
)
ensureOpenAIImageSessionCredentials
(
ctx
context
.
Context
,
account
*
Account
)
(
string
,
string
)
{
if
account
==
nil
{
return
""
,
""
}
deviceID
:=
account
.
GetOpenAIDeviceID
()
sessionID
:=
account
.
GetOpenAISessionID
()
if
deviceID
!=
""
&&
sessionID
!=
""
{
return
deviceID
,
sessionID
}
updates
:=
map
[
string
]
any
{}
if
deviceID
==
""
{
deviceID
=
uuid
.
NewString
()
updates
[
"openai_device_id"
]
=
deviceID
}
if
sessionID
==
""
{
sessionID
=
uuid
.
NewString
()
updates
[
"openai_session_id"
]
=
sessionID
}
if
account
.
Extra
==
nil
{
account
.
Extra
=
map
[
string
]
any
{}
}
for
key
,
value
:=
range
updates
{
account
.
Extra
[
key
]
=
value
}
if
len
(
updates
)
==
0
||
s
==
nil
||
s
.
accountRepo
==
nil
{
return
deviceID
,
sessionID
}
updateCtx
,
cancel
:=
context
.
WithTimeout
(
ctx
,
5
*
time
.
Second
)
defer
cancel
()
if
err
:=
s
.
accountRepo
.
UpdateExtra
(
updateCtx
,
account
.
ID
,
updates
);
err
!=
nil
{
logger
.
LegacyPrintf
(
"service.openai_gateway"
,
"persist openai image session creds failed: account=%d err=%v"
,
account
.
ID
,
err
)
}
return
deviceID
,
sessionID
}
func
bootstrapOpenAIBackendAPI
(
ctx
context
.
Context
,
client
*
req
.
Client
,
headers
http
.
Header
)
error
{
resp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
DisableAutoReadResponse
()
.
SetHeaders
(
headerToMap
(
headers
))
.
Get
(
openAIChatGPTStartURL
)
if
err
!=
nil
{
return
err
}
if
resp
!=
nil
&&
resp
.
Body
!=
nil
{
_
,
_
=
io
.
Copy
(
io
.
Discard
,
resp
.
Body
)
_
=
resp
.
Body
.
Close
()
}
return
nil
}
func
initializeOpenAIImageConversation
(
ctx
context
.
Context
,
client
*
req
.
Client
,
headers
http
.
Header
)
error
{
payload
:=
map
[
string
]
any
{
"gizmo_id"
:
nil
,
"requested_default_model"
:
nil
,
"conversation_id"
:
nil
,
"timezone_offset_min"
:
openAITimezoneOffsetMinutes
(),
"system_hints"
:
[]
string
{
"picture_v2"
},
}
resp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
SetHeaders
(
headerToMap
(
headers
))
.
SetBodyJsonMarshal
(
payload
)
.
Post
(
openAIChatGPTConversationInitURL
)
if
err
!=
nil
{
return
err
}
if
!
resp
.
IsSuccessState
()
{
return
newOpenAIImageStatusError
(
resp
,
"conversation init failed"
)
}
return
nil
}
type
openAIChatRequirements
struct
{
Token
string
`json:"token"`
Turnstile
struct
{
Required
bool
`json:"required"`
}
`json:"turnstile"`
Arkose
struct
{
Required
bool
`json:"required"`
}
`json:"arkose"`
ProofOfWork
struct
{
Required
bool
`json:"required"`
Seed
string
`json:"seed"`
Difficulty
string
`json:"difficulty"`
}
`json:"proofofwork"`
}
func
fetchOpenAIChatRequirements
(
ctx
context
.
Context
,
client
*
req
.
Client
,
headers
http
.
Header
)
(
*
openAIChatRequirements
,
error
)
{
var
lastErr
error
for
_
,
payload
:=
range
[]
map
[
string
]
any
{
{
"p"
:
nil
},
{
"p"
:
generateOpenAIRequirementsToken
(
headers
.
Get
(
"User-Agent"
))},
}
{
var
result
openAIChatRequirements
resp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
SetHeaders
(
headerToMap
(
headers
))
.
SetBodyJsonMarshal
(
payload
)
.
SetSuccessResult
(
&
result
)
.
Post
(
openAIChatGPTChatRequirementsURL
)
if
err
!=
nil
{
lastErr
=
err
continue
}
if
resp
.
IsSuccessState
()
&&
strings
.
TrimSpace
(
result
.
Token
)
!=
""
{
return
&
result
,
nil
}
lastErr
=
newOpenAIImageStatusError
(
resp
,
"chat-requirements failed"
)
}
if
lastErr
==
nil
{
lastErr
=
fmt
.
Errorf
(
"chat-requirements failed"
)
}
return
nil
,
lastErr
}
func
prepareOpenAIImageConversation
(
ctx
context
.
Context
,
client
*
req
.
Client
,
headers
http
.
Header
,
prompt
string
,
parentMessageID
string
,
chatToken
string
,
proofToken
string
,
)
(
string
,
error
)
{
messageID
:=
uuid
.
NewString
()
payload
:=
map
[
string
]
any
{
"action"
:
"next"
,
"client_prepare_state"
:
"success"
,
"fork_from_shared_post"
:
false
,
"parent_message_id"
:
parentMessageID
,
"model"
:
"auto"
,
"timezone_offset_min"
:
openAITimezoneOffsetMinutes
(),
"timezone"
:
openAITimezoneName
(),
"conversation_mode"
:
map
[
string
]
any
{
"kind"
:
"primary_assistant"
},
"system_hints"
:
[]
string
{
"picture_v2"
},
"supports_buffering"
:
true
,
"supported_encodings"
:
[]
string
{
"v1"
},
"partial_query"
:
map
[
string
]
any
{
"id"
:
messageID
,
"author"
:
map
[
string
]
any
{
"role"
:
"user"
},
"content"
:
map
[
string
]
any
{
"content_type"
:
"text"
,
"parts"
:
[]
string
{
coalesceOpenAIFileName
(
prompt
,
"Generate an image."
)},
},
},
"client_contextual_info"
:
map
[
string
]
any
{
"app_name"
:
"chatgpt.com"
,
},
}
prepareHeaders
:=
cloneHTTPHeader
(
headers
)
prepareHeaders
.
Set
(
"Accept"
,
"*/*"
)
prepareHeaders
.
Set
(
"Content-Type"
,
"application/json"
)
if
strings
.
TrimSpace
(
chatToken
)
!=
""
{
prepareHeaders
.
Set
(
"openai-sentinel-chat-requirements-token"
,
strings
.
TrimSpace
(
chatToken
))
}
if
strings
.
TrimSpace
(
proofToken
)
!=
""
{
prepareHeaders
.
Set
(
"openai-sentinel-proof-token"
,
strings
.
TrimSpace
(
proofToken
))
}
var
result
struct
{
ConduitToken
string
`json:"conduit_token"`
}
resp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
SetHeaders
(
headerToMap
(
prepareHeaders
))
.
SetBodyJsonMarshal
(
payload
)
.
SetSuccessResult
(
&
result
)
.
Post
(
openAIChatGPTConversationPrepareURL
)
if
err
!=
nil
{
return
""
,
err
}
if
!
resp
.
IsSuccessState
()
{
return
""
,
newOpenAIImageStatusError
(
resp
,
"conversation prepare failed"
)
}
return
strings
.
TrimSpace
(
result
.
ConduitToken
),
nil
}
type
openAIUploadedImage
struct
{
FileID
string
FileName
string
FileSize
int
MimeType
string
Width
int
Height
int
}
func
uploadOpenAIImageFiles
(
ctx
context
.
Context
,
client
*
req
.
Client
,
headers
http
.
Header
,
uploads
[]
OpenAIImagesUpload
)
([]
openAIUploadedImage
,
error
)
{
if
len
(
uploads
)
==
0
{
return
nil
,
nil
}
results
:=
make
([]
openAIUploadedImage
,
0
,
len
(
uploads
))
for
i
:=
range
uploads
{
item
:=
uploads
[
i
]
fileName
:=
coalesceOpenAIFileName
(
item
.
FileName
,
"image.png"
)
payload
:=
map
[
string
]
any
{
"file_name"
:
fileName
,
"file_size"
:
len
(
item
.
Data
),
"use_case"
:
"multimodal"
,
}
var
created
struct
{
FileID
string
`json:"file_id"`
UploadURL
string
`json:"upload_url"`
}
resp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
SetHeaders
(
headerToMap
(
headers
))
.
SetBodyJsonMarshal
(
payload
)
.
SetSuccessResult
(
&
created
)
.
Post
(
openAIChatGPTFilesURL
)
if
err
!=
nil
{
return
nil
,
err
}
if
!
resp
.
IsSuccessState
()
||
strings
.
TrimSpace
(
created
.
FileID
)
==
""
||
strings
.
TrimSpace
(
created
.
UploadURL
)
==
""
{
return
nil
,
newOpenAIImageStatusError
(
resp
,
"create upload slot failed"
)
}
uploadHeaders
:=
map
[
string
]
string
{
"Content-Type"
:
coalesceOpenAIFileName
(
item
.
ContentType
,
"application/octet-stream"
),
"Origin"
:
"https://chatgpt.com"
,
"x-ms-blob-type"
:
"BlockBlob"
,
"x-ms-version"
:
"2020-04-08"
,
"User-Agent"
:
headers
.
Get
(
"User-Agent"
),
}
putResp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
SetHeaders
(
uploadHeaders
)
.
SetBody
(
item
.
Data
)
.
DisableAutoReadResponse
()
.
Put
(
created
.
UploadURL
)
if
err
!=
nil
{
return
nil
,
err
}
if
putResp
.
Response
!=
nil
&&
putResp
.
Body
!=
nil
{
_
,
_
=
io
.
Copy
(
io
.
Discard
,
putResp
.
Body
)
_
=
putResp
.
Body
.
Close
()
}
if
putResp
.
StatusCode
<
200
||
putResp
.
StatusCode
>=
300
{
return
nil
,
newOpenAIImageStatusError
(
putResp
,
"upload image bytes failed"
)
}
uploadedResp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
SetHeaders
(
headerToMap
(
headers
))
.
SetBodyJsonMarshal
(
map
[
string
]
any
{})
.
Post
(
fmt
.
Sprintf
(
"%s/%s/uploaded"
,
openAIChatGPTFilesURL
,
created
.
FileID
))
if
err
!=
nil
{
return
nil
,
err
}
if
!
uploadedResp
.
IsSuccessState
()
{
return
nil
,
newOpenAIImageStatusError
(
uploadedResp
,
"mark upload complete failed"
)
}
results
=
append
(
results
,
openAIUploadedImage
{
FileID
:
created
.
FileID
,
FileName
:
fileName
,
FileSize
:
len
(
item
.
Data
),
MimeType
:
coalesceOpenAIFileName
(
item
.
ContentType
,
"application/octet-stream"
),
Width
:
item
.
Width
,
Height
:
item
.
Height
,
})
}
return
results
,
nil
}
func
coalesceOpenAIFileName
(
value
string
,
fallback
string
)
string
{
value
=
strings
.
TrimSpace
(
value
)
if
value
==
""
{
return
fallback
}
return
value
}
func
buildOpenAIImageConversationRequest
(
parsed
*
OpenAIImagesRequest
,
parentMessageID
string
,
uploads
[]
openAIUploadedImage
)
map
[
string
]
any
{
parts
:=
[]
any
{
coalesceOpenAIFileName
(
parsed
.
Prompt
,
"Generate an image."
)}
attachments
:=
make
([]
map
[
string
]
any
,
0
,
len
(
uploads
))
if
len
(
uploads
)
>
0
{
parts
=
make
([]
any
,
0
,
len
(
uploads
)
+
1
)
for
_
,
upload
:=
range
uploads
{
parts
=
append
(
parts
,
map
[
string
]
any
{
"content_type"
:
"image_asset_pointer"
,
"asset_pointer"
:
"file-service://"
+
upload
.
FileID
,
"size_bytes"
:
upload
.
FileSize
,
"width"
:
upload
.
Width
,
"height"
:
upload
.
Height
,
})
attachment
:=
map
[
string
]
any
{
"id"
:
upload
.
FileID
,
"mimeType"
:
upload
.
MimeType
,
"name"
:
upload
.
FileName
,
"size"
:
upload
.
FileSize
,
}
if
upload
.
Width
>
0
{
attachment
[
"width"
]
=
upload
.
Width
}
if
upload
.
Height
>
0
{
attachment
[
"height"
]
=
upload
.
Height
}
attachments
=
append
(
attachments
,
attachment
)
}
parts
=
append
(
parts
,
coalesceOpenAIFileName
(
parsed
.
Prompt
,
"Edit this image."
))
}
contentType
:=
"text"
if
len
(
uploads
)
>
0
{
contentType
=
"multimodal_text"
}
metadata
:=
map
[
string
]
any
{
"developer_mode_connector_ids"
:
[]
any
{},
"selected_github_repos"
:
[]
any
{},
"selected_all_github_repos"
:
false
,
"system_hints"
:
[]
string
{
"picture_v2"
},
"serialization_metadata"
:
map
[
string
]
any
{
"custom_symbol_offsets"
:
[]
any
{},
},
}
message
:=
map
[
string
]
any
{
"id"
:
uuid
.
NewString
(),
"author"
:
map
[
string
]
any
{
"role"
:
"user"
},
"content"
:
map
[
string
]
any
{
"content_type"
:
contentType
,
"parts"
:
parts
,
},
"metadata"
:
metadata
,
"create_time"
:
float64
(
time
.
Now
()
.
UnixMilli
())
/
1000
,
}
if
len
(
attachments
)
>
0
{
metadata
[
"attachments"
]
=
attachments
}
return
map
[
string
]
any
{
"action"
:
"next"
,
"client_prepare_state"
:
"sent"
,
"parent_message_id"
:
parentMessageID
,
"model"
:
"auto"
,
"timezone_offset_min"
:
openAITimezoneOffsetMinutes
(),
"timezone"
:
openAITimezoneName
(),
"conversation_mode"
:
map
[
string
]
any
{
"kind"
:
"primary_assistant"
},
"enable_message_followups"
:
true
,
"system_hints"
:
[]
string
{
"picture_v2"
},
"supports_buffering"
:
true
,
"supported_encodings"
:
[]
string
{
"v1"
},
"paragen_cot_summary_display_override"
:
"allow"
,
"force_parallel_switch"
:
"auto"
,
"client_contextual_info"
:
map
[
string
]
any
{
"is_dark_mode"
:
false
,
"time_since_loaded"
:
200
,
"page_height"
:
900
,
"page_width"
:
1440
,
"pixel_ratio"
:
1
,
"screen_height"
:
1080
,
"screen_width"
:
1920
,
"app_name"
:
"chatgpt.com"
,
},
"messages"
:
[]
any
{
message
},
}
}
type
openAIImagePointerInfo
struct
{
Pointer
string
DownloadURL
string
...
...
@@ -1300,51 +881,6 @@ type openAIImagePointerInfo struct {
Prompt
string
}
type
openAIImageToolMessage
struct
{
MessageID
string
CreateTime
float64
PointerInfos
[]
openAIImagePointerInfo
}
func
readOpenAIImageConversationStream
(
resp
*
req
.
Response
,
startTime
time
.
Time
)
(
string
,
[]
openAIImagePointerInfo
,
OpenAIUsage
,
*
int
,
error
)
{
if
resp
==
nil
||
resp
.
Body
==
nil
{
return
""
,
nil
,
OpenAIUsage
{},
nil
,
fmt
.
Errorf
(
"empty conversation response"
)
}
reader
:=
bufio
.
NewReader
(
resp
.
Body
)
var
(
conversationID
string
firstTokenMs
*
int
usage
OpenAIUsage
pointers
[]
openAIImagePointerInfo
)
for
{
line
,
err
:=
reader
.
ReadString
(
'\n'
)
if
strings
.
TrimSpace
(
line
)
!=
""
&&
firstTokenMs
==
nil
{
ms
:=
int
(
time
.
Since
(
startTime
)
.
Milliseconds
())
firstTokenMs
=
&
ms
}
if
data
,
ok
:=
extractOpenAISSEDataLine
(
strings
.
TrimRight
(
line
,
"
\r\n
"
));
ok
&&
data
!=
""
&&
data
!=
"[DONE]"
{
dataBytes
:=
[]
byte
(
data
)
if
conversationID
==
""
{
conversationID
=
strings
.
TrimSpace
(
gjson
.
GetBytes
(
dataBytes
,
"v.conversation_id"
)
.
String
())
if
conversationID
==
""
{
conversationID
=
strings
.
TrimSpace
(
gjson
.
GetBytes
(
dataBytes
,
"conversation_id"
)
.
String
())
}
}
mergeOpenAIUsage
(
&
usage
,
dataBytes
)
pointers
=
mergeOpenAIImagePointerInfos
(
pointers
,
collectOpenAIImagePointers
(
dataBytes
))
}
if
err
==
io
.
EOF
{
break
}
if
err
!=
nil
{
return
""
,
nil
,
OpenAIUsage
{},
firstTokenMs
,
err
}
}
return
conversationID
,
pointers
,
usage
,
firstTokenMs
,
nil
}
func
collectOpenAIImagePointers
(
body
[]
byte
)
[]
openAIImagePointerInfo
{
if
len
(
body
)
==
0
{
return
nil
...
...
@@ -1470,222 +1006,6 @@ func mergeOpenAIImagePointerInfo(existing, next openAIImagePointerInfo) openAIIm
return
merged
}
func
hasOpenAIFileServicePointerInfos
(
items
[]
openAIImagePointerInfo
)
bool
{
for
_
,
item
:=
range
items
{
if
strings
.
HasPrefix
(
item
.
Pointer
,
"file-service://"
)
{
return
true
}
}
return
false
}
func
countOpenAIFileServicePointerInfos
(
items
[]
openAIImagePointerInfo
)
int
{
count
:=
0
for
_
,
item
:=
range
items
{
if
strings
.
HasPrefix
(
item
.
Pointer
,
"file-service://"
)
{
count
++
}
}
return
count
}
func
countOpenAIDirectImageAssets
(
items
[]
openAIImagePointerInfo
)
int
{
count
:=
0
for
_
,
item
:=
range
items
{
if
strings
.
TrimSpace
(
item
.
DownloadURL
)
!=
""
||
strings
.
TrimSpace
(
item
.
B64JSON
)
!=
""
{
count
++
}
}
return
count
}
func
preferOpenAIFileServicePointerInfos
(
items
[]
openAIImagePointerInfo
)
[]
openAIImagePointerInfo
{
if
!
hasOpenAIFileServicePointerInfos
(
items
)
{
return
items
}
out
:=
make
([]
openAIImagePointerInfo
,
0
,
len
(
items
))
for
_
,
item
:=
range
items
{
if
strings
.
HasPrefix
(
item
.
Pointer
,
"file-service://"
)
{
out
=
append
(
out
,
item
)
}
}
return
out
}
func
extractOpenAIImageToolMessages
(
mapping
map
[
string
]
any
)
[]
openAIImageToolMessage
{
if
len
(
mapping
)
==
0
{
return
nil
}
out
:=
make
([]
openAIImageToolMessage
,
0
,
4
)
for
messageID
,
raw
:=
range
mapping
{
node
,
_
:=
raw
.
(
map
[
string
]
any
)
if
node
==
nil
{
continue
}
message
,
_
:=
node
[
"message"
]
.
(
map
[
string
]
any
)
if
message
==
nil
{
continue
}
author
,
_
:=
message
[
"author"
]
.
(
map
[
string
]
any
)
metadata
,
_
:=
message
[
"metadata"
]
.
(
map
[
string
]
any
)
content
,
_
:=
message
[
"content"
]
.
(
map
[
string
]
any
)
if
author
==
nil
||
metadata
==
nil
||
content
==
nil
{
continue
}
if
role
,
_
:=
author
[
"role"
]
.
(
string
);
role
!=
"tool"
{
continue
}
if
asyncTaskType
,
_
:=
metadata
[
"async_task_type"
]
.
(
string
);
asyncTaskType
!=
"image_gen"
{
continue
}
if
contentType
,
_
:=
content
[
"content_type"
]
.
(
string
);
contentType
!=
"multimodal_text"
{
continue
}
prompt
:=
""
if
title
,
_
:=
metadata
[
"image_gen_title"
]
.
(
string
);
strings
.
TrimSpace
(
title
)
!=
""
{
prompt
=
strings
.
TrimSpace
(
title
)
}
item
:=
openAIImageToolMessage
{
MessageID
:
messageID
}
if
createTime
,
ok
:=
message
[
"create_time"
]
.
(
float64
);
ok
{
item
.
CreateTime
=
createTime
}
parts
,
_
:=
content
[
"parts"
]
.
([]
any
)
for
_
,
part
:=
range
parts
{
switch
value
:=
part
.
(
type
)
{
case
map
[
string
]
any
:
if
assetPointer
,
_
:=
value
[
"asset_pointer"
]
.
(
string
);
strings
.
TrimSpace
(
assetPointer
)
!=
""
{
for
_
,
pointer
:=
range
openAIImagePointerMatches
([]
byte
(
assetPointer
))
{
item
.
PointerInfos
=
append
(
item
.
PointerInfos
,
openAIImagePointerInfo
{
Pointer
:
pointer
,
Prompt
:
prompt
,
})
}
}
case
string
:
for
_
,
pointer
:=
range
openAIImagePointerMatches
([]
byte
(
value
))
{
item
.
PointerInfos
=
append
(
item
.
PointerInfos
,
openAIImagePointerInfo
{
Pointer
:
pointer
,
Prompt
:
prompt
,
})
}
}
}
if
len
(
item
.
PointerInfos
)
==
0
{
continue
}
item
.
PointerInfos
=
mergeOpenAIImagePointerInfos
(
nil
,
item
.
PointerInfos
)
out
=
append
(
out
,
item
)
}
sort
.
Slice
(
out
,
func
(
i
,
j
int
)
bool
{
return
out
[
i
]
.
CreateTime
<
out
[
j
]
.
CreateTime
})
return
out
}
func
pollOpenAIImageConversation
(
ctx
context
.
Context
,
client
*
req
.
Client
,
headers
http
.
Header
,
conversationID
string
)
([]
openAIImagePointerInfo
,
error
)
{
conversationID
=
strings
.
TrimSpace
(
conversationID
)
if
conversationID
==
""
{
return
nil
,
nil
}
deadline
:=
time
.
Now
()
.
Add
(
90
*
time
.
Second
)
interval
:=
3
*
time
.
Second
previewWait
:=
15
*
time
.
Second
var
(
lastErr
error
firstToolAt
time
.
Time
)
for
time
.
Now
()
.
Before
(
deadline
)
{
resp
,
err
:=
client
.
R
()
.
SetContext
(
ctx
)
.
SetHeaders
(
headerToMap
(
headers
))
.
DisableAutoReadResponse
()
.
Get
(
fmt
.
Sprintf
(
"https://chatgpt.com/backend-api/conversation/%s"
,
conversationID
))
if
err
!=
nil
{
lastErr
=
err
}
else
{
if
resp
.
StatusCode
>=
200
&&
resp
.
StatusCode
<
300
{
body
,
readErr
:=
io
.
ReadAll
(
resp
.
Body
)
_
=
resp
.
Body
.
Close
()
if
readErr
!=
nil
{
lastErr
=
readErr
goto
waitNextPoll
}
pointers
:=
mergeOpenAIImagePointerInfos
(
nil
,
collectOpenAIImagePointers
(
body
))
var
decoded
map
[
string
]
any
if
err
:=
json
.
Unmarshal
(
body
,
&
decoded
);
err
==
nil
{
if
mapping
,
_
:=
decoded
[
"mapping"
]
.
(
map
[
string
]
any
);
len
(
mapping
)
>
0
{
toolMessages
:=
extractOpenAIImageToolMessages
(
mapping
)
if
len
(
toolMessages
)
>
0
&&
firstToolAt
.
IsZero
()
{
firstToolAt
=
time
.
Now
()
}
for
_
,
msg
:=
range
toolMessages
{
pointers
=
mergeOpenAIImagePointerInfos
(
pointers
,
msg
.
PointerInfos
)
}
}
}
if
hasOpenAIFileServicePointerInfos
(
pointers
)
{
return
preferOpenAIFileServicePointerInfos
(
pointers
),
nil
}
if
len
(
pointers
)
>
0
&&
!
firstToolAt
.
IsZero
()
&&
time
.
Since
(
firstToolAt
)
>=
previewWait
{
return
pointers
,
nil
}
}
else
{
statusErr
:=
newOpenAIImageStatusError
(
resp
,
"conversation poll failed"
)
if
isOpenAIImageTransientConversationNotFoundError
(
statusErr
)
{
lastErr
=
statusErr
goto
waitNextPoll
}
return
nil
,
statusErr
}
}
waitNextPoll
:
timer
:=
time
.
NewTimer
(
interval
)
select
{
case
<-
ctx
.
Done
()
:
if
!
timer
.
Stop
()
{
<-
timer
.
C
}
return
nil
,
ctx
.
Err
()
case
<-
timer
.
C
:
}
}
return
nil
,
lastErr
}
func
buildOpenAIImageResponse
(
ctx
context
.
Context
,
client
*
req
.
Client
,
headers
http
.
Header
,
conversationID
string
,
pointers
[]
openAIImagePointerInfo
,
)
([]
byte
,
int
,
error
)
{
type
responseItem
struct
{
B64JSON
string
`json:"b64_json"`
RevisedPrompt
string
`json:"revised_prompt,omitempty"`
}
items
:=
make
([]
responseItem
,
0
,
len
(
pointers
))
for
_
,
pointer
:=
range
pointers
{
data
,
err
:=
resolveOpenAIImageBytes
(
ctx
,
client
,
headers
,
conversationID
,
pointer
)
if
err
!=
nil
{
return
nil
,
0
,
err
}
items
=
append
(
items
,
responseItem
{
B64JSON
:
base64
.
StdEncoding
.
EncodeToString
(
data
),
RevisedPrompt
:
pointer
.
Prompt
,
})
}
payload
:=
map
[
string
]
any
{
"created"
:
time
.
Now
()
.
Unix
(),
"data"
:
items
,
}
body
,
err
:=
json
.
Marshal
(
payload
)
if
err
!=
nil
{
return
nil
,
0
,
err
}
return
body
,
len
(
items
),
nil
}
func
resolveOpenAIImageBytes
(
ctx
context
.
Context
,
client
*
req
.
Client
,
...
...
@@ -1805,17 +1125,6 @@ func isLikelyOpenAIImageDownloadURL(raw string) bool {
strings
.
Contains
(
lower
,
".webp"
)
}
func
detachOpenAIImageLifecycleContext
(
ctx
context
.
Context
,
timeout
time
.
Duration
)
(
context
.
Context
,
context
.
CancelFunc
)
{
base
:=
context
.
Background
()
if
ctx
!=
nil
{
base
=
context
.
WithoutCancel
(
ctx
)
}
if
timeout
<=
0
{
return
base
,
func
()
{}
}
return
context
.
WithTimeout
(
base
,
timeout
)
}
func
fetchOpenAIImageDownloadURL
(
ctx
context
.
Context
,
client
*
req
.
Client
,
...
...
@@ -1910,10 +1219,6 @@ func downloadOpenAIImageBytes(ctx context.Context, client *req.Client, headers h
return
io
.
ReadAll
(
io
.
LimitReader
(
resp
.
Body
,
openAIImageMaxDownloadBytes
))
}
func
handleOpenAIImageBackendError
(
resp
*
req
.
Response
)
error
{
return
newOpenAIImageStatusError
(
resp
,
"backend-api request failed"
)
}
type
openAIImageStatusError
struct
{
StatusCode
int
Message
string
...
...
@@ -1981,23 +1286,6 @@ func newOpenAIImageStatusError(resp *req.Response, fallback string) error {
}
}
func
newOpenAIImageSyntheticStatusError
(
statusCode
int
,
message
string
,
requestURL
string
)
*
openAIImageStatusError
{
message
=
sanitizeUpstreamErrorMessage
(
strings
.
TrimSpace
(
message
))
if
message
==
""
{
message
=
"openai image backend request failed"
}
var
body
[]
byte
if
payload
,
err
:=
json
.
Marshal
(
map
[
string
]
string
{
"detail"
:
message
});
err
==
nil
{
body
=
payload
}
return
&
openAIImageStatusError
{
StatusCode
:
statusCode
,
Message
:
message
,
ResponseBody
:
body
,
URL
:
strings
.
TrimSpace
(
requestURL
),
}
}
func
isOpenAIImageTransientConversationNotFoundError
(
err
error
)
bool
{
statusErr
,
ok
:=
err
.
(
*
openAIImageStatusError
)
if
!
ok
||
statusErr
==
nil
||
statusErr
.
StatusCode
!=
http
.
StatusNotFound
{
...
...
@@ -2017,58 +1305,6 @@ func isOpenAIImageTransientConversationNotFoundError(err error) bool {
return
strings
.
Contains
(
bodyMsg
,
"conversation"
)
&&
strings
.
Contains
(
bodyMsg
,
"not found"
)
}
func
(
s
*
OpenAIGatewayService
)
wrapOpenAIImageBackendError
(
ctx
context
.
Context
,
c
*
gin
.
Context
,
account
*
Account
,
err
error
,
)
error
{
var
statusErr
*
openAIImageStatusError
if
!
errors
.
As
(
err
,
&
statusErr
)
||
statusErr
==
nil
{
return
err
}
upstreamMsg
:=
sanitizeUpstreamErrorMessage
(
statusErr
.
Message
)
appendOpsUpstreamError
(
c
,
OpsUpstreamErrorEvent
{
Platform
:
account
.
Platform
,
AccountID
:
account
.
ID
,
AccountName
:
account
.
Name
,
UpstreamStatusCode
:
statusErr
.
StatusCode
,
UpstreamRequestID
:
statusErr
.
RequestID
,
UpstreamURL
:
safeUpstreamURL
(
statusErr
.
URL
),
Kind
:
"request_error"
,
Message
:
upstreamMsg
,
})
setOpsUpstreamError
(
c
,
statusErr
.
StatusCode
,
upstreamMsg
,
""
)
if
s
.
shouldFailoverOpenAIUpstreamResponse
(
statusErr
.
StatusCode
,
upstreamMsg
,
statusErr
.
ResponseBody
)
{
if
s
.
rateLimitService
!=
nil
{
s
.
rateLimitService
.
HandleUpstreamError
(
ctx
,
account
,
statusErr
.
StatusCode
,
statusErr
.
ResponseHeaders
,
statusErr
.
ResponseBody
)
}
appendOpsUpstreamError
(
c
,
OpsUpstreamErrorEvent
{
Platform
:
account
.
Platform
,
AccountID
:
account
.
ID
,
AccountName
:
account
.
Name
,
UpstreamStatusCode
:
statusErr
.
StatusCode
,
UpstreamRequestID
:
statusErr
.
RequestID
,
UpstreamURL
:
safeUpstreamURL
(
statusErr
.
URL
),
Kind
:
"failover"
,
Message
:
upstreamMsg
,
})
retryableOnSameAccount
:=
account
.
IsPoolMode
()
&&
isPoolModeRetryableStatus
(
statusErr
.
StatusCode
)
if
strings
.
Contains
(
strings
.
ToLower
(
statusErr
.
Message
),
"unsupported challenge"
)
{
retryableOnSameAccount
=
false
}
return
&
UpstreamFailoverError
{
StatusCode
:
statusErr
.
StatusCode
,
ResponseBody
:
statusErr
.
ResponseBody
,
RetryableOnSameAccount
:
retryableOnSameAccount
,
}
}
return
statusErr
}
func
cloneHTTPHeader
(
src
http
.
Header
)
http
.
Header
{
dst
:=
make
(
http
.
Header
,
len
(
src
))
for
key
,
values
:=
range
src
{
...
...
@@ -2093,110 +1329,6 @@ func headerToMap(header http.Header) map[string]string {
return
result
}
func
openAITimezoneOffsetMinutes
()
int
{
_
,
offset
:=
time
.
Now
()
.
Zone
()
return
offset
/
60
}
func
openAITimezoneName
()
string
{
return
time
.
Now
()
.
Location
()
.
String
()
}
func
generateOpenAIRequirementsToken
(
userAgent
string
)
string
{
config
:=
[]
any
{
"core"
+
strconv
.
Itoa
(
3008
),
time
.
Now
()
.
UTC
()
.
Format
(
time
.
RFC1123
),
nil
,
0.123456
,
coalesceOpenAIFileName
(
strings
.
TrimSpace
(
userAgent
),
openAIImageBackendUserAgent
),
nil
,
"prod-openai-images"
,
"en-US"
,
"en-US,en"
,
0
,
"navigator.webdriver"
,
"location"
,
"document.body"
,
float64
(
time
.
Now
()
.
UnixMilli
())
/
1000
,
uuid
.
NewString
(),
""
,
8
,
time
.
Now
()
.
Unix
(),
}
answer
,
solved
:=
generateOpenAIChallengeAnswer
(
strconv
.
FormatInt
(
time
.
Now
()
.
UnixNano
(),
10
),
openAIImageRequirementsDiff
,
config
)
if
solved
{
return
"gAAAAAC"
+
answer
}
return
""
}
func
generateOpenAIChallengeAnswer
(
seed
string
,
difficulty
string
,
config
[]
any
)
(
string
,
bool
)
{
diffBytes
,
err
:=
hex
.
DecodeString
(
difficulty
)
if
err
!=
nil
{
return
""
,
false
}
p1
:=
[]
byte
(
jsonCompactSlice
(
config
[
:
3
],
true
))
p2
:=
[]
byte
(
jsonCompactSlice
(
config
[
4
:
9
],
false
))
p3
:=
[]
byte
(
jsonCompactSlice
(
config
[
10
:
],
false
))
seedBytes
:=
[]
byte
(
seed
)
for
i
:=
0
;
i
<
100000
;
i
++
{
payload
:=
fmt
.
Sprintf
(
"%s%d,%s,%d,%s"
,
p1
,
i
,
p2
,
i
>>
1
,
p3
)
encoded
:=
base64
.
StdEncoding
.
EncodeToString
([]
byte
(
payload
))
sum
:=
sha3
.
Sum512
(
append
(
seedBytes
,
[]
byte
(
encoded
)
...
))
if
bytes
.
Compare
(
sum
[
:
len
(
diffBytes
)],
diffBytes
)
<=
0
{
return
encoded
,
true
}
}
return
""
,
false
}
func
jsonCompactSlice
(
values
[]
any
,
trimSuffixComma
bool
)
string
{
raw
,
_
:=
json
.
Marshal
(
values
)
text
:=
string
(
raw
)
if
trimSuffixComma
{
return
strings
.
TrimSuffix
(
text
,
"]"
)
}
return
strings
.
TrimPrefix
(
text
,
"["
)
}
func
generateOpenAIProofToken
(
required
bool
,
seed
string
,
difficulty
string
,
userAgent
string
)
string
{
if
!
required
||
strings
.
TrimSpace
(
seed
)
==
""
||
strings
.
TrimSpace
(
difficulty
)
==
""
{
return
""
}
screen
:=
3008
if
len
(
seed
)
%
2
==
0
{
screen
=
4010
}
proofToken
:=
[]
any
{
screen
,
time
.
Now
()
.
UTC
()
.
Format
(
time
.
RFC1123
),
nil
,
0
,
coalesceOpenAIFileName
(
strings
.
TrimSpace
(
userAgent
),
openAIImageBackendUserAgent
),
"https://chatgpt.com/"
,
"dpl=openai-images"
,
"en"
,
"en-US"
,
nil
,
"plugins[object PluginArray]"
,
"_reactListening"
,
"alert"
,
}
diffLen
:=
len
(
difficulty
)
for
i
:=
0
;
i
<
100000
;
i
++
{
proofToken
[
3
]
=
i
raw
,
_
:=
json
.
Marshal
(
proofToken
)
encoded
:=
base64
.
StdEncoding
.
EncodeToString
(
raw
)
sum
:=
sha3
.
Sum512
([]
byte
(
seed
+
encoded
))
if
strings
.
Compare
(
hex
.
EncodeToString
(
sum
[
:
])[
:
diffLen
],
difficulty
)
<=
0
{
return
"gAAAAAB"
+
encoded
}
}
fallbackBase
:=
base64
.
StdEncoding
.
EncodeToString
([]
byte
(
fmt
.
Sprintf
(
"%q"
,
seed
)))
return
"gAAAAABwQ8Lk5FbGpA2NcR9dShT6gYjU7VxZ4D"
+
fallbackBase
}
func
dedupeStrings
(
values
[]
string
)
[]
string
{
if
len
(
values
)
==
0
{
return
nil
...
...
backend/internal/service/openai_images_responses.go
View file @
ef967d8f
...
...
@@ -821,7 +821,7 @@ func (s *OpenAIGatewayService) forwardOpenAIImagesOAuth(
var
(
usage
OpenAIUsage
imageCount
=
parsed
.
N
imageCount
int
firstTokenMs
*
int
)
if
parsed
.
Stream
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment